* tree.h (struct tree_base): Add atomic_flag field.
(TYPE_ATOMIC): New accessor macro.
(enum cv_qualifier): Add TYPE_QUAL_ATOMIC.
(TYPE_QUALS, TYPE_QUALS_NO_ADDR_SPACE): Add TYPE_QUAL_ATOMIC.
(TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC): New macro.
(enum tree_index): Add TI_ATOMIC{QHSDT}I_TYPE.
(atomic{QHSDT}I_type_node): Add new type nodes.
* emit-rtl.c (set_mem_attributes_minus_bitpos): Atomics are volatile.
* function.c (assign_stack_temp_for_type): Atomics are volatile.
* print-tree.c (print_node): Print atomic qualifier.
* tree-pretty-print.c (dump_generic_node): Print atomic type attribute.
* tree.c (set_type_quals): Set TYPE_ATOMIC.
(find_atomic_core_type): New. Function to get atomic type from size.
(build_qualified_type): Tweak for atomic qualifier overrides.
(build_atomic_variant): New. Build atomic variant node.
(build_common_tree_nodes): Build atomic{QHSDT}I_type_node, allowing
for override with target hook.
* alias.c (objects_must_conflict_p): Treat atomics as volatile.
* calls.c (expand_call): Treat atomics as volatile.
===================================================================
*************** struct GTY(()) tree_base {
unsigned packed_flag : 1;
unsigned user_align : 1;
unsigned nameless_flag : 1;
! unsigned spare0 : 4;
unsigned spare1 : 8;
unsigned packed_flag : 1;
unsigned user_align : 1;
unsigned nameless_flag : 1;
! unsigned atomic_flag : 1;
! unsigned spare0 : 3;
unsigned spare1 : 8;
*************** extern enum machine_mode vector_type_mod
/* Nonzero in a type considered volatile as a whole. */
#define TYPE_VOLATILE(NODE) (TYPE_CHECK (NODE)->base.volatile_flag)
+ /* Nonzero in a type considered atomic as a whole. */
+ #define TYPE_ATOMIC(NODE) (TYPE_CHECK (NODE)->base.u.bits.atomic_flag)
+
/* Means this type is const-qualified. */
#define TYPE_READONLY(NODE) (TYPE_CHECK (NODE)->base.readonly_flag)
*************** enum cv_qualifier
TYPE_UNQUALIFIED = 0x0,
TYPE_QUAL_CONST = 0x1,
TYPE_QUAL_VOLATILE = 0x2,
! TYPE_QUAL_RESTRICT = 0x4
};
/* Encode/decode the named memory support as part of the qualifier. If more
TYPE_UNQUALIFIED = 0x0,
TYPE_QUAL_CONST = 0x1,
TYPE_QUAL_VOLATILE = 0x2,
! TYPE_QUAL_RESTRICT = 0x4,
! TYPE_QUAL_ATOMIC = 0x8
};
/* Encode/decode the named memory support as part of the qualifier. If more
*************** enum cv_qualifier
#define TYPE_QUALS(NODE) \
((int) ((TYPE_READONLY (NODE) * TYPE_QUAL_CONST) \
| (TYPE_VOLATILE (NODE) * TYPE_QUAL_VOLATILE) \
+ | (TYPE_ATOMIC (NODE) * TYPE_QUAL_ATOMIC) \
| (TYPE_RESTRICT (NODE) * TYPE_QUAL_RESTRICT) \
| (ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (NODE)))))
*************** enum cv_qualifier
#define TYPE_QUALS_NO_ADDR_SPACE(NODE) \
((int) ((TYPE_READONLY (NODE) * TYPE_QUAL_CONST) \
| (TYPE_VOLATILE (NODE) * TYPE_QUAL_VOLATILE) \
+ | (TYPE_ATOMIC (NODE) * TYPE_QUAL_ATOMIC) \
+ | (TYPE_RESTRICT (NODE) * TYPE_QUAL_RESTRICT)))
+ /* The same as TYPE_QUALS without the address space and atomic
+ qualifications. */
+ #define TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC(NODE) \
+ ((int) ((TYPE_READONLY (NODE) * TYPE_QUAL_CONST) \
+ | (TYPE_VOLATILE (NODE) * TYPE_QUAL_VOLATILE) \
| (TYPE_RESTRICT (NODE) * TYPE_QUAL_RESTRICT)))
+
/* These flags are available for each language front end to use internally. */
#define TYPE_LANG_FLAG_0(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_0)
#define TYPE_LANG_FLAG_1(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_1)
*************** enum tree_index
TI_UINTDI_TYPE,
TI_UINTTI_TYPE,
+ TI_ATOMICQI_TYPE,
+ TI_ATOMICHI_TYPE,
+ TI_ATOMICSI_TYPE,
+ TI_ATOMICDI_TYPE,
+ TI_ATOMICTI_TYPE,
+
TI_UINT16_TYPE,
TI_UINT32_TYPE,
TI_UINT64_TYPE,
*************** extern GTY(()) tree global_trees[TI_MAX]
#define unsigned_intDI_type_node global_trees[TI_UINTDI_TYPE]
#define unsigned_intTI_type_node global_trees[TI_UINTTI_TYPE]
+ #define atomicQI_type_node global_trees[TI_ATOMICQI_TYPE]
+ #define atomicHI_type_node global_trees[TI_ATOMICHI_TYPE]
+ #define atomicSI_type_node global_trees[TI_ATOMICSI_TYPE]
+ #define atomicDI_type_node global_trees[TI_ATOMICDI_TYPE]
+ #define atomicTI_type_node global_trees[TI_ATOMICTI_TYPE]
+
#define uint16_type_node global_trees[TI_UINT16_TYPE]
#define uint32_type_node global_trees[TI_UINT32_TYPE]
#define uint64_type_node global_trees[TI_UINT64_TYPE]
===================================================================
*************** set_mem_attributes_minus_bitpos (rtx ref
front-end routine) and use it. */
attrs.alias = get_alias_set (t);
! MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
MEM_POINTER (ref) = POINTER_TYPE_P (type);
/* Default values from pre-existing memory attributes if present. */
front-end routine) and use it. */
attrs.alias = get_alias_set (t);
! MEM_VOLATILE_P (ref) |= (TYPE_VOLATILE (type) || TYPE_ATOMIC (type));
MEM_POINTER (ref) = POINTER_TYPE_P (type);
/* Default values from pre-existing memory attributes if present. */
===================================================================
*************** assign_stack_temp_for_type (enum machine
/* If a type is specified, set the relevant flags. */
if (type != 0)
! MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
MEM_NOTRAP_P (slot) = 1;
return slot;
/* If a type is specified, set the relevant flags. */
if (type != 0)
! MEM_VOLATILE_P (slot) = (TYPE_VOLATILE (type) || TYPE_ATOMIC (type));
MEM_NOTRAP_P (slot) = 1;
return slot;
===================================================================
*************** print_node (FILE *file, const char *pref
if (TYPE_P (node) ? TYPE_READONLY (node) : TREE_READONLY (node))
fputs (" readonly", file);
+ if (TYPE_P (node) ? TYPE_ATOMIC (node) : TYPE_ATOMIC (node))
+ fputs (" atomic", file);
if (!TYPE_P (node) && TREE_CONSTANT (node))
fputs (" constant", file);
else if (TYPE_P (node) && TYPE_SIZES_GIMPLIFIED (node))
===================================================================
*************** dump_generic_node (pretty_printer *buffe
unsigned int quals = TYPE_QUALS (node);
enum tree_code_class tclass;
+ if (quals & TYPE_QUAL_ATOMIC)
+ pp_string (buffer, "atomic ");
if (quals & TYPE_QUAL_CONST)
pp_string (buffer, "const ");
else if (quals & TYPE_QUAL_VOLATILE)
*************** dump_generic_node (pretty_printer *buffe
{
unsigned int quals = TYPE_QUALS (node);
+ if (quals & TYPE_QUAL_ATOMIC)
+ pp_string (buffer, "atomic ");
if (quals & TYPE_QUAL_CONST)
pp_string (buffer, "const ");
if (quals & TYPE_QUAL_VOLATILE)
===================================================================
*************** set_type_quals (tree type, int type_qual
TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
+ TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
}
*************** check_aligned_type (const_tree cand, con
TYPE_ATTRIBUTES (base)));
}
+ /* This function checks to see if TYPE matches the size one of the built-in
+ atomic types, and returns that core atomic type. */
+
+ tree
+ find_atomic_core_type (tree type)
+ {
+ tree base_atomic_type;
+
+ /* Only handle complete types. */
+ if (TYPE_SIZE (type) == NULL_TREE)
+ return NULL_TREE;
+
+ HOST_WIDE_INT type_size = tree_low_cst (TYPE_SIZE (type), 1);
+ switch (type_size)
+ {
+ case 8:
+ base_atomic_type = atomicQI_type_node;
+ break;
+
+ case 16:
+ base_atomic_type = atomicHI_type_node;
+ break;
+
+ case 32:
+ base_atomic_type = atomicSI_type_node;
+ break;
+
+ case 64:
+ base_atomic_type = atomicDI_type_node;
+ break;
+
+ case 128:
+ base_atomic_type = atomicTI_type_node;
+ break;
+
+ default:
+ base_atomic_type = NULL_TREE;
+ }
+
+ return base_atomic_type;
+ }
+
/* Return a version of the TYPE, qualified as indicated by the
TYPE_QUALS, if one exists. If no qualified version exists yet,
return NULL_TREE. */
*************** build_qualified_type (tree type, int typ
t = build_variant_type_copy (type);
set_type_quals (t, type_quals);
+ if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
+ {
+ /* See if this object can map to a basic atomic type. */
+ tree atomic_type = find_atomic_core_type (type);
+ if (atomic_type)
+ {
+ /* Ensure the alignment of this type is compatible with
+ the required alignment of the atomic type. */
+ if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
+ TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
+ }
+ }
+
if (TYPE_STRUCTURAL_EQUALITY_P (type))
/* Propagate structural equality. */
SET_TYPE_STRUCTURAL_EQUALITY (t);
*************** make_or_reuse_accum_type (unsigned size,
return make_accum_type (size, unsignedp, satp);
}
+
+ /* Create an atomic variant node for TYPE. This routine is called during
+ initialization of data types to create the 5 basic atomic types. The generic
+ build_variant_type function requires these to already be set up in order to
+ function properly, so cannot be called from there.
+ if ALIGN is non-zero, then ensure alignment is overridden to this value. */
+
+ static tree
+ build_atomic_variant (tree type, unsigned int align)
+ {
+ tree t;
+
+ /* Make sure its not already registered. */
+ if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
+ return t;
+
+ t = build_variant_type_copy (type);
+ set_type_quals (t, TYPE_QUAL_ATOMIC);
+
+ if (align)
+ TYPE_ALIGN (t) = align;
+
+ return t;
+ }
+
/* Create nodes for all integer types (and error_mark_node) using the sizes
of C datatypes. SIGNED_CHAR specifies whether char is signed,
SHORT_DOUBLE specifies whether double should be of the same precision
*************** build_common_tree_nodes (bool signed_cha
unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
+ /* Dont call build_qualified type for atomics. That routine does special
+ processing for atomics, and until they are initialized its better not
+ to make that call.
+
+ Check to see if there is a target override for atomic types. */
+
+ #define SET_ATOMIC_TYPE_NODE(TYPE, MODE, DEFAULT) \
+ (TYPE) = build_atomic_variant (DEFAULT, targetm.atomic_align_for_mode (MODE));
+
+ SET_ATOMIC_TYPE_NODE (atomicQI_type_node, QImode, unsigned_intQI_type_node);
+ SET_ATOMIC_TYPE_NODE (atomicHI_type_node, HImode, unsigned_intHI_type_node);
+ SET_ATOMIC_TYPE_NODE (atomicSI_type_node, SImode, unsigned_intSI_type_node);
+ SET_ATOMIC_TYPE_NODE (atomicDI_type_node, DImode, unsigned_intDI_type_node);
+ SET_ATOMIC_TYPE_NODE (atomicTI_type_node, TImode, unsigned_intTI_type_node);
+
access_public_node = get_identifier ("public");
access_protected_node = get_identifier ("protected");
access_private_node = get_identifier ("private");
===================================================================
*************** objects_must_conflict_p (tree t1, tree t
/* If they are the same type, they must conflict. */
if (t1 == t2
! /* Likewise if both are volatile. */
! || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
return 1;
set1 = t1 ? get_alias_set (t1) : 0;
/* If they are the same type, they must conflict. */
if (t1 == t2
! /* Likewise if both are volatile or atomic. */
! || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2))
! || (t1 != 0 && TYPE_ATOMIC (t1) && t2 != 0 && TYPE_ATOMIC (t2)))
return 1;
set1 = t1 ? get_alias_set (t1) : 0;
===================================================================
*************** expand_call (tree exp, rtx target, int i
optimized. */
|| (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
|| TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
+ || TYPE_ATOMIC (TREE_TYPE (TREE_TYPE (addr)))
/* If the called function is nested in the current one, it might access
some of the caller's arguments, but could clobber them beforehand if
the argument areas are shared. */