@@ -1259,7 +1259,9 @@ extern void omp_clause_range_check_faile
/* True if NODE, a FIELD_DECL, is to be processed as a bitfield for
constructor output purposes. */
#define CONSTRUCTOR_BITFIELD_P(NODE) \
- (DECL_BIT_FIELD (FIELD_DECL_CHECK (NODE)) && DECL_MODE (NODE) != BLKmode)
+ (DECL_BIT_FIELD (FIELD_DECL_CHECK (NODE)) \
+ && (DECL_MODE (NODE) != BLKmode \
+ || TREE_CODE (TREE_TYPE (NODE)) == BITINT_TYPE))
/* True if NODE is a clobber right hand side, an expression of indeterminate
value that clobbers the LHS in a copy instruction. We use a volatile
@@ -2148,6 +2148,22 @@ finish_bitfield_representative (tree rep
|| GET_MODE_BITSIZE (mode) > maxbitsize
|| GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
{
+ if (TREE_CODE (TREE_TYPE (field)) == BITINT_TYPE)
+ {
+ struct bitint_info info;
+ unsigned prec = TYPE_PRECISION (TREE_TYPE (field));
+ gcc_assert (targetm.c.bitint_type_info (prec, &info));
+ scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
+ unsigned lprec = GET_MODE_PRECISION (limb_mode);
+ if (prec > lprec)
+ {
+ /* For middle/large/huge _BitInt prefer bitsize being a multiple
+ of limb precision. */
+ unsigned HOST_WIDE_INT bsz = CEIL (bitsize, lprec) * lprec;
+ if (bsz <= maxbitsize)
+ bitsize = bsz;
+ }
+ }
/* We really want a BLKmode representative only as a last resort,
considering the member b in
struct { int a : 7; int b : 17; int c; } __attribute__((packed));
@@ -418,6 +418,7 @@ struct bitint_large_huge
tree handle_plus_minus (tree_code, tree, tree, tree);
tree handle_lshift (tree, tree, tree);
tree handle_cast (tree, tree, tree);
+ tree handle_load (gimple *, tree);
tree handle_stmt (gimple *, tree);
tree handle_operand_addr (tree, gimple *, int *, int *);
tree create_loop (tree, tree *);
@@ -483,7 +484,9 @@ struct bitint_large_huge
iteration handles 2 limbs, plus there can be up to one full limb
and one partial limb processed after the loop, where handle_operand
and/or handle_stmt are called with constant idx. m_upwards_2limb
- is set for this case, false otherwise.
+ is set for this case, false otherwise. m_upwards is true if it
+ is either large or huge _BitInt handled by lower_mergeable_stmt,
+ i.e. indexes always increase.
Another way is used by lower_comparison_stmt, which walks limbs
from most significant to least significant, partial limb if any
@@ -511,10 +514,22 @@ struct bitint_large_huge
just needs to bump m_data_cnt by the same amount as when it was
called with m_first set. The toplevel calls to
handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
- m_data vector when setting m_first to true. */
+ m_data vector when setting m_first to true.
+
+ m_cast_conditional and m_bitfld_load are used when handling a
+ bit-field load inside of a widening cast. handle_cast sometimes
+ needs to do runtime comparisons and handle_operand only conditionally
+ or even in two separate conditional blocks for one idx (once with
+ constant index after comparing the runtime one for equality with the
+ constant). In these cases, m_cast_conditional is set to true and
+ the bit-field load then communicates its m_data_cnt to handle_cast
+ using m_bitfld_load. */
bool m_first;
bool m_var_msb;
unsigned m_upwards_2limb;
+ bool m_upwards;
+ bool m_cast_conditional;
+ unsigned m_bitfld_load;
vec<tree> m_data;
unsigned int m_data_cnt;
};
@@ -598,7 +613,7 @@ bitint_large_huge::limb_access (tree typ
TREE_TYPE (TREE_TYPE (var))))
{
unsigned HOST_WIDE_INT nelts
- = tree_to_uhwi (TYPE_SIZE (type)) / limb_prec;
+ = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
tree atype = build_array_type_nelts (m_limb_type, nelts);
var = build1 (VIEW_CONVERT_EXPR, atype, var);
}
@@ -1142,6 +1157,11 @@ bitint_large_huge::handle_cast (tree lhs
if (TYPE_UNSIGNED (rhs_type))
/* No need to keep state between iterations. */
;
+ else if (m_upwards && !m_upwards_2limb)
+ /* We need to keep state between iterations, but
+ not within any loop, everything is straight line
+ code with only increasing indexes. */
+ ;
else if (!m_upwards_2limb)
{
unsigned save_data_cnt = m_data_cnt;
@@ -1225,16 +1245,26 @@ bitint_large_huge::handle_cast (tree lhs
e2 = find_edge (e2->dest, e3->dest);
}
m_gsi = gsi_after_labels (e2->src);
+ bool save_cast_conditional = m_cast_conditional;
+ m_cast_conditional = true;
+ m_bitfld_load = 0;
tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
if (m_first)
m_data[save_data_cnt + 2]
= build_int_cst (NULL_TREE, m_data_cnt);
tree ext = NULL_TREE;
+ tree bitfld = NULL_TREE;
if (!single_comparison)
{
m_gsi = gsi_after_labels (e4->src);
m_first = false;
m_data_cnt = save_data_cnt + 3;
+ if (m_bitfld_load)
+ {
+ bitfld = m_data[m_bitfld_load];
+ m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
+ m_bitfld_load = 0;
+ }
t2 = handle_operand (rhs1, size_int (low));
if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
t2 = add_cast (m_limb_type, t2);
@@ -1274,6 +1304,25 @@ bitint_large_huge::handle_cast (tree lhs
g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
insert_before (g);
}
+ if (m_bitfld_load)
+ {
+ tree t4;
+ if (!m_first)
+ t4 = m_data[m_bitfld_load + 1];
+ else
+ t4 = make_ssa_name (m_limb_type);
+ phi = create_phi_node (t4, e2->dest);
+ add_phi_arg (phi, e4 ? bitfld : m_data[m_bitfld_load],
+ e2, UNKNOWN_LOCATION);
+ add_phi_arg (phi, m_data[m_bitfld_load + 2],
+ e3, UNKNOWN_LOCATION);
+ if (e4)
+ add_phi_arg (phi, m_data[m_bitfld_load], e4, UNKNOWN_LOCATION);
+ m_data[m_bitfld_load] = t4;
+ m_data[m_bitfld_load + 2] = t4;
+ m_bitfld_load = 0;
+ }
+ m_cast_conditional = save_cast_conditional;
m_first = save_first;
return t;
}
@@ -1295,7 +1344,7 @@ bitint_large_huge::handle_cast (tree lhs
if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
t = add_cast (m_limb_type, t);
tree ext = NULL_TREE;
- if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
+ if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
{
ext = add_cast (signed_type_for (m_limb_type), t);
tree lpm1 = build_int_cst (unsigned_type_node,
@@ -1473,54 +1522,277 @@ bitint_large_huge::handle_cast (tree lhs
return NULL_TREE;
}
-/* Return a limb IDX from a mergeable statement STMT. */
+/* Helper function for handle_stmt method, handle a load from memory. */
tree
-bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
+bitint_large_huge::handle_load (gimple *stmt, tree idx)
{
- tree lhs, rhs1, rhs2 = NULL_TREE;
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ tree rhs_type = TREE_TYPE (rhs1);
+ bool eh = stmt_ends_bb_p (stmt);
+ edge eh_edge = NULL;
gimple *g;
- switch (gimple_code (stmt))
+
+ if (eh)
{
- case GIMPLE_ASSIGN:
- if (gimple_assign_load_p (stmt))
+ edge_iterator ei;
+ basic_block bb = gimple_bb (stmt);
+
+ FOR_EACH_EDGE (eh_edge, ei, bb->succs)
+ if (eh_edge->flags & EDGE_EH)
+ break;
+ }
+
+ if (TREE_CODE (rhs1) == COMPONENT_REF
+ && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
+ {
+ tree fld = TREE_OPERAND (rhs1, 1);
+ /* For little-endian, we can allow as inputs bit-fields
+ which start at a limb boundary. */
+ gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
+ if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
+ && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
+ goto normal_load;
+ /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
+ handle it normally for now. */
+ if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
+ goto normal_load;
+ tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
+ poly_int64 bitoffset;
+ poly_uint64 field_offset, repr_offset;
+ bool var_field_off = false;
+ if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
+ && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
+ bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
+ else
{
- rhs1 = gimple_assign_rhs1 (stmt);
- tree rhs_type = TREE_TYPE (rhs1);
- bool eh = stmt_ends_bb_p (stmt);
- /* Use write_p = true for loads with EH edges to make
- sure limb_access doesn't add a cast as separate
- statement after it. */
- rhs1 = limb_access (rhs_type, rhs1, idx, eh);
- lhs = make_ssa_name (TREE_TYPE (rhs1));
- g = gimple_build_assign (lhs, rhs1);
+ bitoffset = 0;
+ var_field_off = true;
+ }
+ bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
+ tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
+ TREE_OPERAND (rhs1, 0), repr,
+ var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
+ HOST_WIDE_INT bo = bitoffset.to_constant ();
+ unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
+ unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
+ if (m_first)
+ {
+ if (m_upwards)
+ {
+ gimple_stmt_iterator save_gsi = m_gsi;
+ m_gsi = m_init_gsi;
+ if (gsi_end_p (m_gsi))
+ m_gsi = gsi_after_labels (gsi_bb (m_gsi));
+ else
+ gsi_next (&m_gsi);
+ tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
+ tree iv = make_ssa_name (m_limb_type);
+ g = gimple_build_assign (iv, t);
+ insert_before (g);
+ if (eh)
+ {
+ maybe_duplicate_eh_stmt (g, stmt);
+ if (eh_edge)
+ {
+ edge e = split_block (gsi_bb (m_gsi), g);
+ make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
+ = profile_probability::very_unlikely ();
+ m_init_gsi.bb = e->dest;
+ }
+ }
+ m_gsi = save_gsi;
+ tree out;
+ prepare_data_in_out (iv, idx, &out);
+ out = m_data[m_data_cnt];
+ m_data.safe_push (out);
+ }
+ else
+ {
+ m_data.safe_push (NULL_TREE);
+ m_data.safe_push (NULL_TREE);
+ m_data.safe_push (NULL_TREE);
+ }
+ }
+
+ tree nidx0 = NULL_TREE, nidx1;
+ tree iv = m_data[m_data_cnt];
+ if (m_cast_conditional && iv)
+ {
+ gcc_assert (!m_bitfld_load);
+ m_bitfld_load = m_data_cnt;
+ }
+ if (tree_fits_uhwi_p (idx))
+ {
+ unsigned prec = TYPE_PRECISION (rhs_type);
+ unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
+ gcc_assert (i * limb_prec < prec);
+ nidx1 = size_int (i + bo_idx + 1);
+ if ((i + 1) * limb_prec > prec)
+ {
+ prec %= limb_prec;
+ if (prec + bo_bit <= (unsigned) limb_prec)
+ nidx1 = NULL_TREE;
+ }
+ if (!iv)
+ nidx0 = size_int (i + bo_idx);
+ }
+ else
+ {
+ if (!iv)
+ {
+ if (bo_idx == 0)
+ nidx0 = idx;
+ else
+ {
+ nidx0 = make_ssa_name (sizetype);
+ g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
+ size_int (bo_idx));
+ insert_before (g);
+ }
+ }
+ nidx1 = make_ssa_name (sizetype);
+ g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
+ size_int (bo_idx + 1));
+ insert_before (g);
+ }
+
+ tree iv2 = NULL_TREE;
+ if (nidx0)
+ {
+ tree t = limb_access (rhs_type, nrhs1, nidx0, true);
+ iv = make_ssa_name (m_limb_type);
+ g = gimple_build_assign (iv, t);
+ insert_before (g);
+ gcc_assert (!eh);
+ }
+ if (nidx1)
+ {
+ bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
+ unsigned prec = TYPE_PRECISION (rhs_type);
+ if (conditional)
+ {
+ if ((prec % limb_prec) == 0
+ || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
+ conditional = false;
+ }
+ edge e1 = NULL, e2 = NULL, e3 = NULL;
+ if (conditional)
+ {
+ g = gimple_build_cond (EQ_EXPR, idx,
+ size_int (prec / limb_prec),
+ NULL_TREE, NULL_TREE);
+ insert_before (g);
+ e1 = split_block (gsi_bb (m_gsi), g);
+ e2 = split_block (e1->dest, (gimple *) NULL);
+ e3 = make_edge (e1->src, e2->dest, EDGE_TRUE_VALUE);
+ e3->probability = profile_probability::unlikely ();
+ e1->flags = EDGE_FALSE_VALUE;
+ e1->probability = e3->probability.invert ();
+ set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
+ m_gsi = gsi_after_labels (e1->dest);
+ }
+ tree t = limb_access (rhs_type, nrhs1, nidx1, true);
+ if (m_upwards_2limb
+ && !m_first
+ && !m_bitfld_load
+ && !tree_fits_uhwi_p (idx))
+ iv2 = m_data[m_data_cnt + 1];
+ else
+ iv2 = make_ssa_name (m_limb_type);
+ g = gimple_build_assign (iv2, t);
insert_before (g);
if (eh)
{
maybe_duplicate_eh_stmt (g, stmt);
- edge e1;
- edge_iterator ei;
- basic_block bb = gimple_bb (stmt);
-
- FOR_EACH_EDGE (e1, ei, bb->succs)
- if (e1->flags & EDGE_EH)
- break;
- if (e1)
+ if (eh_edge)
{
- edge e2 = split_block (gsi_bb (m_gsi), g);
- m_gsi = gsi_after_labels (e2->dest);
- make_edge (e2->src, e1->dest, EDGE_EH)->probability
+ edge e = split_block (gsi_bb (m_gsi), g);
+ m_gsi = gsi_after_labels (e->dest);
+ make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
= profile_probability::very_unlikely ();
}
- if (tree_fits_uhwi_p (idx))
- {
- tree atype = limb_access_type (rhs_type, idx);
- if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
- lhs = add_cast (atype, lhs);
- }
}
- return lhs;
+ if (conditional)
+ {
+ tree iv3 = make_ssa_name (m_limb_type);
+ if (eh)
+ e2 = find_edge (gsi_bb (m_gsi), e3->dest);
+ gphi *phi = create_phi_node (iv3, e2->dest);
+ add_phi_arg (phi, iv2, e2, UNKNOWN_LOCATION);
+ add_phi_arg (phi, build_zero_cst (m_limb_type),
+ e3, UNKNOWN_LOCATION);
+ m_gsi = gsi_after_labels (e2->dest);
+ }
}
+ g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
+ iv, build_int_cst (unsigned_type_node, bo_bit));
+ insert_before (g);
+ iv = gimple_assign_lhs (g);
+ if (iv2)
+ {
+ g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
+ iv2, build_int_cst (unsigned_type_node,
+ limb_prec - bo_bit));
+ insert_before (g);
+ g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
+ gimple_assign_lhs (g), iv);
+ insert_before (g);
+ iv = gimple_assign_lhs (g);
+ if (m_data[m_data_cnt])
+ m_data[m_data_cnt] = iv2;
+ }
+ if (tree_fits_uhwi_p (idx))
+ {
+ tree atype = limb_access_type (rhs_type, idx);
+ if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
+ iv = add_cast (atype, iv);
+ }
+ m_data_cnt += 3;
+ return iv;
+ }
+
+normal_load:
+ /* Use write_p = true for loads with EH edges to make
+ sure limb_access doesn't add a cast as separate
+ statement after it. */
+ rhs1 = limb_access (rhs_type, rhs1, idx, eh);
+ tree ret = make_ssa_name (TREE_TYPE (rhs1));
+ g = gimple_build_assign (ret, rhs1);
+ insert_before (g);
+ if (eh)
+ {
+ maybe_duplicate_eh_stmt (g, stmt);
+ if (eh_edge)
+ {
+ edge e = split_block (gsi_bb (m_gsi), g);
+ m_gsi = gsi_after_labels (e->dest);
+ make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
+ = profile_probability::very_unlikely ();
+ }
+ if (tree_fits_uhwi_p (idx))
+ {
+ tree atype = limb_access_type (rhs_type, idx);
+ if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
+ ret = add_cast (atype, ret);
+ }
+ }
+ return ret;
+}
+
+/* Return a limb IDX from a mergeable statement STMT. */
+
+tree
+bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
+{
+ tree lhs, rhs1, rhs2 = NULL_TREE;
+ gimple *g;
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_ASSIGN:
+ if (gimple_assign_load_p (stmt))
+ return handle_load (stmt, idx);
switch (gimple_assign_rhs_code (stmt))
{
case BIT_AND_EXPR:
@@ -1899,6 +2171,10 @@ bitint_large_huge::lower_mergeable_stmt
tree ext = NULL_TREE, store_operand = NULL_TREE;
bool eh = false;
basic_block eh_pad = NULL;
+ tree nlhs = NULL_TREE;
+ unsigned HOST_WIDE_INT bo_idx = 0;
+ unsigned HOST_WIDE_INT bo_bit = 0;
+ tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
if (gimple_store_p (stmt))
{
store_operand = gimple_assign_rhs1 (stmt);
@@ -1916,6 +2192,38 @@ bitint_large_huge::lower_mergeable_stmt
break;
}
}
+ if (TREE_CODE (lhs) == COMPONENT_REF
+ && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
+ {
+ tree fld = TREE_OPERAND (lhs, 1);
+ gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
+ tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
+ poly_int64 bitoffset;
+ poly_uint64 field_offset, repr_offset;
+ if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
+ nlhs = lhs;
+ else
+ {
+ bool var_field_off = false;
+ if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
+ && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
+ bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
+ else
+ {
+ bitoffset = 0;
+ var_field_off = true;
+ }
+ bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
+ nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
+ TREE_OPERAND (lhs, 0), repr,
+ var_field_off
+ ? TREE_OPERAND (lhs, 2) : NULL_TREE);
+ HOST_WIDE_INT bo = bitoffset.to_constant ();
+ bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
+ bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
+ }
+ }
}
if ((store_operand
&& TREE_CODE (store_operand) == SSA_NAME
@@ -1976,6 +2284,12 @@ bitint_large_huge::lower_mergeable_stmt
m_after_stmt = stmt;
if (kind != bitint_prec_large)
m_upwards_2limb = end;
+ m_upwards = true;
+
+ bool separate_ext
+ = (prec != (unsigned) TYPE_PRECISION (type)
+ && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
+ > CEIL (prec, limb_prec)));
for (unsigned i = 0; i < cnt; i++)
{
@@ -2005,24 +2319,156 @@ bitint_large_huge::lower_mergeable_stmt
rhs1 = handle_operand (store_operand, idx);
else
rhs1 = handle_stmt (stmt, idx);
- tree l = limb_access (lhs_type, lhs, idx, true);
- if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs1)))
- rhs1 = add_cast (TREE_TYPE (l), rhs1);
+ if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
+ rhs1 = add_cast (m_limb_type, rhs1);
if (sext && i == cnt - 1)
ext = rhs1;
- g = gimple_build_assign (l, rhs1);
- insert_before (g);
- if (eh)
+ tree nidx = idx;
+ if (bo_idx)
{
- maybe_duplicate_eh_stmt (g, stmt);
- if (eh_pad)
+ if (tree_fits_uhwi_p (idx))
+ nidx = size_int (tree_to_uhwi (idx) + bo_idx);
+ else
{
- edge e = split_block (gsi_bb (m_gsi), g);
- m_gsi = gsi_after_labels (e->dest);
- make_edge (e->src, eh_pad, EDGE_EH)->probability
- = profile_probability::very_unlikely ();
+ nidx = make_ssa_name (sizetype);
+ g = gimple_build_assign (nidx, PLUS_EXPR, idx,
+ size_int (bo_idx));
+ insert_before (g);
}
}
+ bool done = false;
+ basic_block new_bb = NULL;
+ /* Handle stores into bit-fields. */
+ if (bo_bit)
+ {
+ if (i == 0)
+ {
+ edge e2 = NULL;
+ if (kind != bitint_prec_large)
+ {
+ prepare_data_in_out (build_zero_cst (m_limb_type),
+ idx, &bf_next);
+ bf_next = m_data.pop ();
+ bf_cur = m_data.pop ();
+ g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
+ NULL_TREE, NULL_TREE);
+ insert_before (g);
+ edge e1 = split_block (gsi_bb (m_gsi), g);
+ e2 = split_block (e1->dest, (gimple *) NULL);
+ basic_block bb = create_empty_bb (e1->dest);
+ add_bb_to_loop (bb, e1->dest->loop_father);
+ edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
+ e1->flags = EDGE_FALSE_VALUE;
+ e1->probability = profile_probability::likely ();
+ e3->probability = e1->probability.invert ();
+ set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
+ set_immediate_dominator (CDI_DOMINATORS, e2->dest,
+ e1->src);
+ make_edge (bb, e2->dest, EDGE_FALLTHRU);
+ m_gsi = gsi_after_labels (bb);
+ new_bb = e2->dest;
+ }
+ tree ftype
+ = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
+ tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
+ bitsize_int (limb_prec - bo_bit),
+ bitsize_int (bo_idx * limb_prec + bo_bit));
+ tree t = add_cast (ftype, rhs1);
+ g = gimple_build_assign (bfr, t);
+ insert_before (g);
+ if (eh)
+ {
+ maybe_duplicate_eh_stmt (g, stmt);
+ if (eh_pad)
+ {
+ edge e = split_block (gsi_bb (m_gsi), g);
+ m_gsi = gsi_after_labels (e->dest);
+ make_edge (e->src, eh_pad, EDGE_EH)->probability
+ = profile_probability::very_unlikely ();
+ }
+ }
+ if (kind == bitint_prec_large)
+ {
+ bf_cur = rhs1;
+ done = true;
+ }
+ else if (e2)
+ m_gsi = gsi_after_labels (e2->src);
+ }
+ if (!done)
+ {
+ tree t1 = make_ssa_name (m_limb_type);
+ tree t2 = make_ssa_name (m_limb_type);
+ tree t3 = make_ssa_name (m_limb_type);
+ g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
+ build_int_cst (unsigned_type_node,
+ limb_prec - bo_bit));
+ insert_before (g);
+ g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
+ build_int_cst (unsigned_type_node,
+ bo_bit));
+ insert_before (g);
+ bf_cur = rhs1;
+ g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
+ insert_before (g);
+ rhs1 = t3;
+ if (bf_next && i == 1)
+ {
+ g = gimple_build_assign (bf_next, bf_cur);
+ insert_before (g);
+ }
+ }
+ }
+ if (!done)
+ {
+ /* Handle bit-field access to partial last limb if needed. */
+ if (nlhs
+ && i == cnt - 1
+ && !separate_ext
+ && tree_fits_uhwi_p (idx))
+ {
+ unsigned int tprec = TYPE_PRECISION (type);
+ unsigned int rprec = tprec % limb_prec;
+ if (rprec + bo_bit < (unsigned) limb_prec)
+ {
+ tree ftype
+ = build_nonstandard_integer_type (rprec + bo_bit, 1);
+ tree bfr = build3 (BIT_FIELD_REF, ftype,
+ unshare_expr (nlhs),
+ bitsize_int (rprec + bo_bit),
+ bitsize_int ((bo_idx
+ + tprec / limb_prec)
+ * limb_prec));
+ tree t = add_cast (ftype, rhs1);
+ g = gimple_build_assign (bfr, t);
+ done = true;
+ bf_cur = NULL_TREE;
+ }
+ else if (rprec + bo_bit == (unsigned) limb_prec)
+ bf_cur = NULL_TREE;
+ }
+ /* Otherwise, stores to any other lhs. */
+ if (!done)
+ {
+ tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
+ nidx, true);
+ g = gimple_build_assign (l, rhs1);
+ }
+ insert_before (g);
+ if (eh)
+ {
+ maybe_duplicate_eh_stmt (g, stmt);
+ if (eh_pad)
+ {
+ edge e = split_block (gsi_bb (m_gsi), g);
+ m_gsi = gsi_after_labels (e->dest);
+ make_edge (e->src, eh_pad, EDGE_EH)->probability
+ = profile_probability::very_unlikely ();
+ }
+ }
+ if (new_bb)
+ m_gsi = gsi_after_labels (new_bb);
+ }
}
m_first = false;
if (kind == bitint_prec_huge && i <= 1)
@@ -2050,9 +2496,7 @@ bitint_large_huge::lower_mergeable_stmt
}
}
- if (prec != (unsigned) TYPE_PRECISION (type)
- && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
- > CEIL (prec, limb_prec)))
+ if (separate_ext)
{
if (sext)
{
@@ -2070,7 +2514,7 @@ bitint_large_huge::lower_mergeable_stmt
unsigned start = CEIL (prec, limb_prec);
prec = TYPE_PRECISION (type);
idx = idx_first = idx_next = NULL_TREE;
- if (prec <= (start + 2) * limb_prec)
+ if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
kind = bitint_prec_large;
if (kind == bitint_prec_large)
cnt = CEIL (prec, limb_prec) - start;
@@ -2078,22 +2522,94 @@ bitint_large_huge::lower_mergeable_stmt
{
rem = prec % limb_prec;
end = (prec - rem) / limb_prec;
- cnt = 1 + (rem != 0);
- idx = create_loop (size_int (start), &idx_next);
+ cnt = (bo_bit != 0) + 1 + (rem != 0);
}
for (unsigned i = 0; i < cnt; i++)
{
- if (kind == bitint_prec_large)
+ if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
idx = size_int (start + i);
- else if (i == 1)
+ else if (i == cnt - 1)
idx = size_int (end);
+ else if (i == (bo_bit != 0))
+ idx = create_loop (size_int (start + i), &idx_next);
rhs1 = ext;
- tree l = limb_access (lhs_type, lhs, idx, true);
- if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs1)))
- rhs1 = add_cast (TREE_TYPE (l), rhs1);
- g = gimple_build_assign (l, rhs1);
+ if (bf_cur != NULL_TREE && bf_cur != ext)
+ {
+ tree t1 = make_ssa_name (m_limb_type);
+ g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
+ build_int_cst (unsigned_type_node,
+ limb_prec - bo_bit));
+ insert_before (g);
+ if (integer_zerop (ext))
+ rhs1 = t1;
+ else
+ {
+ tree t2 = make_ssa_name (m_limb_type);
+ rhs1 = make_ssa_name (m_limb_type);
+ g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
+ build_int_cst (unsigned_type_node,
+ bo_bit));
+ insert_before (g);
+ g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
+ insert_before (g);
+ }
+ bf_cur = ext;
+ }
+ tree nidx = idx;
+ if (bo_idx)
+ {
+ if (tree_fits_uhwi_p (idx))
+ nidx = size_int (tree_to_uhwi (idx) + bo_idx);
+ else
+ {
+ nidx = make_ssa_name (sizetype);
+ g = gimple_build_assign (nidx, PLUS_EXPR, idx,
+ size_int (bo_idx));
+ insert_before (g);
+ }
+ }
+ bool done = false;
+ /* Handle bit-field access to partial last limb if needed. */
+ if (nlhs && i == cnt - 1)
+ {
+ unsigned int tprec = TYPE_PRECISION (type);
+ unsigned int rprec = tprec % limb_prec;
+ if (rprec + bo_bit < (unsigned) limb_prec)
+ {
+ tree ftype
+ = build_nonstandard_integer_type (rprec + bo_bit, 1);
+ tree bfr = build3 (BIT_FIELD_REF, ftype,
+ unshare_expr (nlhs),
+ bitsize_int (rprec + bo_bit),
+ bitsize_int ((bo_idx + tprec / limb_prec)
+ * limb_prec));
+ tree t = add_cast (ftype, rhs1);
+ g = gimple_build_assign (bfr, t);
+ done = true;
+ bf_cur = NULL_TREE;
+ }
+ else if (rprec + bo_bit == (unsigned) limb_prec)
+ bf_cur = NULL_TREE;
+ }
+ /* Otherwise, stores to any other lhs. */
+ if (!done)
+ {
+ tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
+ g = gimple_build_assign (l, rhs1);
+ }
insert_before (g);
- if (kind == bitint_prec_huge && i == 0)
+ if (eh)
+ {
+ maybe_duplicate_eh_stmt (g, stmt);
+ if (eh_pad)
+ {
+ edge e = split_block (gsi_bb (m_gsi), g);
+ m_gsi = gsi_after_labels (e->dest);
+ make_edge (e->src, eh_pad, EDGE_EH)->probability
+ = profile_probability::very_unlikely ();
+ }
+ }
+ if (kind == bitint_prec_huge && i == (bo_bit != 0))
{
g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
size_one_node);
@@ -2105,6 +2621,39 @@ bitint_large_huge::lower_mergeable_stmt
}
}
}
+ if (bf_cur != NULL_TREE)
+ {
+ unsigned int tprec = TYPE_PRECISION (type);
+ unsigned int rprec = tprec % limb_prec;
+ tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
+ tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
+ bitsize_int (rprec + bo_bit),
+ bitsize_int ((bo_idx + tprec / limb_prec)
+ * limb_prec));
+ rhs1 = bf_cur;
+ if (bf_cur != ext)
+ {
+ rhs1 = make_ssa_name (TREE_TYPE (rhs1));
+ g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
+ build_int_cst (unsigned_type_node,
+ limb_prec - bo_bit));
+ insert_before (g);
+ }
+ rhs1 = add_cast (ftype, rhs1);
+ g = gimple_build_assign (bfr, rhs1);
+ insert_before (g);
+ if (eh)
+ {
+ maybe_duplicate_eh_stmt (g, stmt);
+ if (eh_pad)
+ {
+ edge e = split_block (gsi_bb (m_gsi), g);
+ m_gsi = gsi_after_labels (e->dest);
+ make_edge (e->src, eh_pad, EDGE_EH)->probability
+ = profile_probability::very_unlikely ();
+ }
+ }
+ }
if (gimple_store_p (stmt))
{
@@ -3294,6 +3843,7 @@ bitint_large_huge::lower_addsub_overflow
if (kind == bitint_prec_huge)
m_upwards_2limb = fin;
+ m_upwards = true;
tree type0 = TREE_TYPE (arg0);
tree type1 = TREE_TYPE (arg1);
@@ -4047,7 +4597,10 @@ bitint_large_huge::lower_stmt (gimple *s
gsi_prev (&m_init_gsi);
m_preheader_bb = NULL;
m_upwards_2limb = 0;
+ m_upwards = false;
m_var_msb = false;
+ m_cast_conditional = false;
+ m_bitfld_load = 0;
m_loc = gimple_location (stmt);
if (is_gimple_call (stmt))
{
@@ -4276,6 +4829,30 @@ vuse_eq (ao_ref *, tree vuse1, void *dat
return NULL;
}
+/* Return true if STMT uses a library function and needs to take
+ address of its inputs. We need to avoid bit-fields in those
+ cases. */
+
+bool
+stmt_needs_operand_addr (gimple *stmt)
+{
+ if (is_gimple_assign (stmt))
+ switch (gimple_assign_rhs_code (stmt))
+ {
+ case MULT_EXPR:
+ case TRUNC_DIV_EXPR:
+ case TRUNC_MOD_EXPR:
+ case FLOAT_EXPR:
+ return true;
+ default:
+ break;
+ }
+ else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
+ || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
+ return true;
+ return false;
+}
+
/* Dominator walker used to discover which large/huge _BitInt
loads could be sunk into all their uses. */
@@ -4341,14 +4918,16 @@ bitint_dom_walker::before_dom_children (
worklist.safe_push (s);
}
+ bool needs_operand_addr = stmt_needs_operand_addr (stmt);
while (worklist.length () > 0)
{
tree s = worklist.pop ();
if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
{
- FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
- oi, SSA_OP_USE)
+ gimple *g = SSA_NAME_DEF_STMT (s);
+ needs_operand_addr |= stmt_needs_operand_addr (g);
+ FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
{
tree s2 = USE_FROM_PTR (use_p);
if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
@@ -4371,8 +4950,28 @@ bitint_dom_walker::before_dom_children (
else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
continue;
+ tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
+ if (needs_operand_addr
+ && TREE_CODE (rhs1) == COMPONENT_REF
+ && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
+ {
+ tree fld = TREE_OPERAND (rhs1, 1);
+ /* For little-endian, we can allow as inputs bit-fields
+ which start at a limb boundary. */
+ if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
+ && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
+ && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
+ % limb_prec) == 0)
+ ;
+ else
+ {
+ bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
+ continue;
+ }
+ }
+
ao_ref ref;
- ao_ref_init (&ref, gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)));
+ ao_ref_init (&ref, rhs1);
tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
unsigned limit = 64;
tree vuse = cvop;
@@ -4878,7 +5477,20 @@ gimple_lower_bitint (void)
&& is_gimple_assign (use_stmt)
&& !gimple_has_volatile_ops (use_stmt)
&& !stmt_ends_bb_p (use_stmt))
- continue;
+ {
+ tree lhs = gimple_assign_lhs (use_stmt);
+ /* As multiply/division passes address of the lhs
+ to library function and that assumes it can extend
+ it to whole number of limbs, avoid merging those
+ with bit-field stores. Don't allow it for
+ shifts etc. either, so that the bit-field store
+ handling doesn't have to be done everywhere. */
+ if (TREE_CODE (lhs) == COMPONENT_REF
+ && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
+ break;
+ continue;
+ }
+ break;
default:
break;
}
@@ -6382,7 +6382,8 @@ check_bitfield_type_and_width (location_
/* Detect invalid bit-field type. */
if (TREE_CODE (*type) != INTEGER_TYPE
&& TREE_CODE (*type) != BOOLEAN_TYPE
- && TREE_CODE (*type) != ENUMERAL_TYPE)
+ && TREE_CODE (*type) != ENUMERAL_TYPE
+ && TREE_CODE (*type) != BITINT_TYPE)
{
error_at (loc, "bit-field %qs has invalid type", name);
*type = unsigned_type_node;
@@ -9322,8 +9323,14 @@ finish_struct (location_t loc, tree t, t
tree type = TREE_TYPE (field);
if (width != TYPE_PRECISION (type))
{
- TREE_TYPE (field)
- = c_build_bitfield_integer_type (width, TYPE_UNSIGNED (type));
+ if (TREE_CODE (type) == BITINT_TYPE
+ && (width > 1 || TYPE_UNSIGNED (type)))
+ TREE_TYPE (field)
+ = build_bitint_type (width, TYPE_UNSIGNED (type));
+ else
+ TREE_TYPE (field)
+ = c_build_bitfield_integer_type (width,
+ TYPE_UNSIGNED (type));
SET_DECL_MODE (field, TYPE_MODE (TREE_TYPE (field)));
}
DECL_INITIAL (field) = NULL_TREE;
@@ -2279,12 +2279,17 @@ perform_integral_promotions (tree exp)
/* ??? This should no longer be needed now bit-fields have their
proper types. */
if (TREE_CODE (exp) == COMPONENT_REF
- && DECL_C_BIT_FIELD (TREE_OPERAND (exp, 1))
+ && DECL_C_BIT_FIELD (TREE_OPERAND (exp, 1)))
+ {
+ if (TREE_CODE (DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)))
+ == BITINT_TYPE)
+ return convert (DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)), exp);
/* If it's thinner than an int, promote it like a
c_promoting_integer_type_p, otherwise leave it alone. */
- && compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)),
- TYPE_PRECISION (integer_type_node)) < 0)
- return convert (integer_type_node, exp);
+ if (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)),
+ TYPE_PRECISION (integer_type_node)) < 0)
+ return convert (integer_type_node, exp);
+ }
if (c_promoting_integer_type_p (type))
{
@@ -0,0 +1,47 @@
+/* PR c/102989 */
+/* { dg-do compile { target bitint } } */
+/* { dg-options "-O2 -std=c2x -pedantic-errors" } */
+
+#define expr_has_type(e, t) _Generic (e, default : 0, t : 1)
+
+struct S1 { char x; char : 0; char y; };
+struct S2 { char x; int : 0; char y; };
+#if __BITINT_MAXWIDTH__ >= 575
+struct S3 { char x; _BitInt(575) : 0; char y; };
+#endif
+#if __BITINT_MAXWIDTH__ >= 389
+struct S4 { char x; _BitInt(195) a : 63; _BitInt(282) b : 280; _BitInt(389) c : 23; _BitInt(2) d : 1; char y; };
+#endif
+#if __BITINT_MAXWIDTH__ >= 192
+struct S5 { char x; _BitInt(192) a : 191; unsigned _BitInt(192) b : 190; _BitInt(192) c : 189; char y; };
+#endif
+struct S6 { _BitInt(2) a : 1; };
+#if __BITINT_MAXWIDTH__ >= 389
+struct S4 s4;
+static_assert (expr_has_type (s4.a + 1uwb, _BitInt(195)));
+static_assert (expr_has_type (s4.b + 1uwb, _BitInt(282)));
+static_assert (expr_has_type (s4.c + 1uwb, _BitInt(389)));
+static_assert (expr_has_type (s4.d * 0wb, _BitInt(2)));
+#endif
+#if __BITINT_MAXWIDTH__ >= 192
+struct S5 s5;
+static_assert (expr_has_type (s5.a + 1uwb, _BitInt(192)));
+static_assert (expr_has_type (s5.b + 1wb, unsigned _BitInt(192)));
+static_assert (expr_has_type (s5.c + 1uwb, _BitInt(192)));
+#endif
+struct S6 s6;
+static_assert (expr_has_type (s6.a + 0wb, _BitInt(2)));
+#if defined(__x86_64__) && __LP64__ && __BITINT_MAXWIDTH__ >= 575
+static_assert (sizeof (struct S1) == 2);
+static_assert (sizeof (struct S2) == 5);
+static_assert (sizeof (struct S3) == 9);
+static_assert (sizeof (struct S4) == 48);
+static_assert (sizeof (struct S5) == 88);
+static_assert (sizeof (struct S6) == 1);
+static_assert (alignof (struct S1) == 1);
+static_assert (alignof (struct S2) == 1);
+static_assert (alignof (struct S3) == 1);
+static_assert (alignof (struct S4) == 8);
+static_assert (alignof (struct S5) == 8);
+static_assert (alignof (struct S6) == 1);
+#endif
@@ -0,0 +1,184 @@
+/* PR c/102989 */
+/* { dg-do run { target bitint } } */
+/* { dg-options "-std=c2x -pedantic-errors" } */
+/* { dg-skip-if "" { ! run_expensive_tests } { "*" } { "-O0" "-O2" } } */
+/* { dg-skip-if "" { ! run_expensive_tests } { "-flto" } { "" } } */
+
+#if __BITINT_MAXWIDTH__ >= 156
+struct S156 { unsigned _BitInt(156) a : 135; _BitInt(156) b : 2; };
+struct T156 { _BitInt(156) a : 2; unsigned _BitInt(156) b : 135; _BitInt(156) c : 2; };
+
+__attribute__((noipa)) _BitInt(156)
+test156 (struct S156 *p, struct T156 *q, struct T156 *r, int n)
+{
+ r[0].b = p[0].a + q[0].b;
+ r[1].b = p[1].a * q[1].b;
+ r[2].a = p[2].a == q[2].b;
+ r[3].a = p[3].a < q[3].b;
+ r[4].b = p[4].a << n;
+ return p[5].a + q[5].b;
+}
+#endif
+
+#if __BITINT_MAXWIDTH__ >= 495
+struct S495 { unsigned _BitInt(495) a : 471; _BitInt(495) b : 2; };
+struct T495 { _BitInt(495) a : 2; unsigned _BitInt(495) b : 471; _BitInt(495) c : 2; };
+
+__attribute__((noipa)) _BitInt(495)
+test495 (struct S495 *p, struct T495 *q, struct T495 *r, int n)
+{
+ r[0].b = p[0].a + q[0].b;
+ r[1].b = p[1].a * q[1].b;
+ r[2].a = p[2].a == q[2].b;
+ r[3].a = p[3].a < q[3].b;
+ r[4].b = p[4].a << n;
+ return p[5].a + q[5].b;
+}
+#endif
+
+#if __BITINT_MAXWIDTH__ >= 575
+struct T575 { _BitInt(575) a : 2; _BitInt(575) b : 382; _BitInt(575) c : 2; };
+
+__attribute__((noipa)) _BitInt(575)
+test575 (struct T575 *q, _BitInt(575) x)
+{
+ return q->b + x;
+}
+#endif
+
+int
+main ()
+{
+#if __BITINT_MAXWIDTH__ >= 156
+ static struct S156 p156[] = {
+ { 6086908847973295618751425718189955476089uwb, 0wb },
+ { 22782605434509677806291360175224474025979uwb, 1wb },
+ { 37470702016132547913133822619863997855935uwb, -1wb },
+ { 35506973813062189967442086745146532460068uwb, -2wb },
+ { 30541197841349399254932135707861871578815uwb, -2wb },
+ { 42800615576623455179183425579556615564998uwb, 1wb },
+ { 83771790414073692613953222993102430597uwb, 0wb },
+ { 36412564746565547536452588666710660799602uwb, 1wb },
+ { 33301340481967309644890101787523367044846uwb, -1wb },
+ { 12245185680611854260212331160624126801841uwb, -2wb },
+ { 35828279900949208432533857460683046288424uwb, -1wb },
+ { 4123278359205456806488911776768785478962uwb, 1wb }
+ };
+ static struct T156 q156[] = {
+ { -2wb, 20935792668463008606182638244678610336415uwb, 0wb },
+ { -1wb, 209336580249060835473242979959042853484uwb, -1wb },
+ { 0wb, 26553091584512375040647771207085289684805uwb, -2wb },
+ { 1wb, 8584102019879028804166913978503267690027uwb, 1wb },
+ { -2wb, 3364986457594728242491236670969190237662uwb, 0wb },
+ { 1wb, 28487958103578401823549712846248514887291uwb, -1wb },
+ { 1wb, 30323438060061607929914857363700386658179uwb, -2wb },
+ { -2wb, 38436123658875864126535628489050345446219uwb, 1wb },
+ { -1wb, 33301340481967309644890101787523367044846uwb, -1wb },
+ { 0wb, 18081372790322879821963779970917788200718uwb, 0wb },
+ { 1wb, 35310198378318023568520640435634029391465uwb, -2wb },
+ { -2wb, 16532060682830030166332649597929082719904uwb, -1wb }
+ };
+ struct T156 r156[12];
+ static unsigned _BitInt(135) e156[] = {
+ 27022701516436304224934063962868565812504uwb,
+ 29613894605882195495382184794066465590244uwb,
+ 0uwb,
+ 0uwb,
+ 25008039169844990156837660987808592822272uwb,
+ 27732430714321733679421188674538799385921uwb,
+ 30407209850475681622528810586693489088776uwb,
+ 9125928536800138281422179636318960655206uwb,
+ 1uwb,
+ 1uwb,
+ 27059094310193532424702800326126974533632uwb,
+ 20655339042035486972821561374697868198866uwb
+ };
+ for (int i = 0; i < 12; ++i)
+ {
+ r156[i].a = (i & 1) ? 1wb : -2wb;
+ r156[i].b = (i & 1) ? 14518714321960041107770649917088777022122uwb : 29037428643920082215541299834177554044245uwb;
+ r156[i].c = (i & 1) ? -2wb : 1wb;
+ }
+ r156[5].b = test156 (&p156[0], &q156[0], &r156[0], 17);
+ r156[11].b = test156 (&p156[6], &q156[6], &r156[6], 117);
+ for (int i = 0; i < 12; ++i)
+ if ((((i % 6) - 2U <= 1U) ? r156[i].a : r156[i].b) != e156[i])
+ __builtin_abort ();
+ else if ((((i % 6) - 2U > 1U) && r156[i].a != ((i & 1) ? 1wb : -2wb))
+ || (((i % 6) - 2U <= 1U) && r156[i].b != ((i & 1) ? 14518714321960041107770649917088777022122uwb : 29037428643920082215541299834177554044245uwb))
+ || r156[i].c != ((i & 1) ? -2wb : 1wb))
+ __builtin_abort ();
+#endif
+#if __BITINT_MAXWIDTH__ >= 495
+ static struct S495 p495[] = {
+ { 5900641461698162830220261443910312286116186030711054026202132317287171989449954433727605565187406942318472276126021616097079737645240098454013uwb, 0wb },
+ { 5619335266199392590390116416034736345432323405939461237257863293465693217703812518219356211176818397041537700744525965483063880249177661765822uwb, -1wb },
+ { 744675643612988167093151199551285556694415176163561012938968935653355128760309622135847398882788196739068398576440472762370028310765535983503uwb, 1wb },
+ { 4710120587609940729927891635083547264060530096467790505615232580303577049694742313824211264116793595223025831432779325089965241897060863692416uwb, -2wb },
+ { 4700240735957362687308898816111021174982406591648489926246999776223031821022674194534613241738369983608058217658021289655774135981827101958355uwb, -1wb },
+ { 1876262100946684033144524627883805711239772124030259629263615724983251183045150901527994496938087455223125779829665212386349414296844947524925uwb, 0wb },
+ { 2635441776228543412090459941414006837629084474712903013295503578947861806271451688519107975488661564459243720565422264844084425272156107387136uwb, 1wb },
+ { 2630153258749552262848995430888671592867309014306367216498408256861265344131972108995527284590063838830515851856437258905217521364301962720643uwb, -2wb },
+ { 2745338434240621337939928605056928843351970485626602387969150293308450866745407501724727739848971206349186529783705440219203189970551219879381uwb, 1wb },
+ { 2688974451781009079406742432598239379065348006788221659938456041627730645258351669705421830999330571339577238631004906071154604486839621759962uwb, -1wb },
+ { 4974200409958136921019163732486769139044513968318459321751943678326303994594283963911023171617076265195547773578798591476134329493900866070930uwb, -2wb },
+ { 5308892348567193242289576342108929297198516978578910066291782408432432101971103786646801744963140659914127252602243854634890888430591500677669uwb, 0wb }
+ };
+ static struct T495 q495[] = {
+ { 0wb, 4301604554146816407338627211447119428992719156607107928889754501636100741327050147838502142108071735464603549268761292458205988617578325763173uwb, -2wb },
+ { -1wb, 2715194601730000102297343743781623149283369741076005412784114057373119142888076403665408178274328506558203084450300859444566357959934633829946uwb, -1wb },
+ { -2wb, 744675643612988167093151199551285556694415176163561012938968935653355128760309622135847398882788196739068398576440472762370028310765535983503uwb, 0wb },
+ { 1wb, 2905575297593112682176271367301360286079539486548283169761776505152778266677295498111968142984117916873706310434203454796302860902084876845657uwb, 1wb },
+ { -2wb, 1837917528781539509450058185492923533367227689379651913582946492506072644598679412452807791857307066254676899775684162126897712525287848351976uwb, -2wb },
+ { -1wb, 2319680246150988952074978951387405463389161562155916967834748191217991286707290863021474691040798411643120909475843121856853198824610064919029uwb, -1wb },
+ { 0wb, 995091385198857217634636856758679675740255626224651591597840191331701102225958567078470825514618566121501749811346910563358603344922083332530uwb, 0wb },
+ { 1wb, 1935767072657166967374130326547364176008146414333487504728787607207566058414790635912675474199093488415105292641061350133643468776770358124726uwb, 1wb },
+ { -1wb, 2323578462079768493720340989036535936419394491913499982954052044299468738290318204298367303716254084159730145465632640799602734853085685711959uwb, -1wb },
+ { 0wb, 3134743387837581735890167538039234659353143305621277824366534871456819272038615182289564667918939704822338586693965843434870517084569520632192uwb, 1wb },
+ { -2wb, 6069214031750054027161180881734548627596253190931156326304738913835931214674310539698411833298960086513572699541120829870803305974299213978797uwb, -2wb },
+ { 1wb, 3475118701665891960802877416318617212624733636229950713368672781946851382777667645205965216548373630879140490108440746796343158443948723601215uwb, 0wb }
+ };
+ struct T495 r495[12];
+ static unsigned _BitInt(471) e495[] = {
+ 4105080878509056910641706565917653774193674439925640176070095882154968553394649854768634849749595921611538850548285598212613898764208491978338uwb,
+ 4272455060900825442658035799123612713461581925816375155025420465351224978751666594799676382302564435552275519560187020810679530912731598360332uwb,
+ 1uwb,
+ 0uwb,
+ 193936707178394586072944129724741337251602515686017126954836162236154016065720970574348980545042390967692432385246117380757295497134607826944uwb,
+ 4195942347097672985219503579271211174628933686186176597098363916201242469752441764549469187978885866866246689305508334243202613121455012443954uwb,
+ 3630533161427400629725096798172686513369340100937554604893343770279562908497410255597578801003280130580745470376769175407443028617078190719666uwb,
+ 3017199903252810774645258484587879229328468140025245420287647204420639096996138928094942159316263828975261069022192893745856107126530044652322uwb,
+ 0uwb,
+ 1uwb,
+ 4881485695834897810379845656011652834144895329454510745024612553531749469753415260371487385803388022234443037087409795804654957238790836453376uwb,
+ 2686845912897162876175271668987768568908019867416339000638664253610979307366416705055294103965631534621730767864187291088562219375930292040036uwb
+ };
+ for (int i = 0; i < 12; ++i)
+ {
+ r495[i].a = (i & 1) ? 1wb : -2wb;
+ r495[i].b = (i & 1) ? 4064776758223948217944788059626518627276820498261681186014527291178869451588236484531648571697255170781024649897664873561781218332406621492565uwb : 2032388379111974108972394029813259313638410249130840593007263645589434725794118242265824285848627585390512324948832436780890609166203310746282uwb;
+ r495[i].c = (i & 1) ? -2wb : 1wb;
+ }
+ r495[5].b = test495 (&p495[0], &q495[0], &r495[0], 17);
+ r495[11].b = test495 (&p495[6], &q495[6], &r495[6], 117);
+ for (int i = 0; i < 12; ++i)
+ if ((((i % 6) - 2U <= 1U) ? r495[i].a : r495[i].b) != e495[i])
+ __builtin_abort ();
+ else if ((((i % 6) - 2U > 1U) && r495[i].a != ((i & 1) ? 1wb : -2wb))
+ || (((i % 6) - 2U <= 1U) && r495[i].b != ((i & 1) ? 4064776758223948217944788059626518627276820498261681186014527291178869451588236484531648571697255170781024649897664873561781218332406621492565uwb : 2032388379111974108972394029813259313638410249130840593007263645589434725794118242265824285848627585390512324948832436780890609166203310746282uwb))
+ || r495[i].c != ((i & 1) ? -2wb : 1wb))
+ __builtin_abort ();
+#endif
+#if __BITINT_MAXWIDTH__ >= 575
+ struct T575 q575[] = {
+ { 0wb, 96684809381001318096256993724350755663760586347837309196134430400012751231961429238828670682891585656560169817843wb, -2wb },
+ { -1wb, -2736587102891263842950610428227571747319762162345429601728737031966764668220310874246707348170368842425752759862563wb, 1wb }
+ };
+ if (test575 (&q575[0], -20620110861486347200204305994458440394552720887062830768778333216240622422772611435913937143052282943196256683484434522946545384240670168537704102522242938522404309878852322wb)
+ != -20620110861486347200204305994458440394552720887062830768778236531431241421454515178920212792296619182609908846175238388516145371489438207108465273851560046936747749709034479wb)
+ __builtin_abort ();
+ if (test575 (&q575[1], 42621052848920155042866550876502212956983884683310087431575669593007353224147609487103322505341199223508939436382180596125666953651582557283994756434373621037386194691552808wb)
+ != 42621052848920155042866550876502212956983884683310087431572933005904461960304658876675094933593879461346594006780451859093700188983362246409748049086203252194960441931690245wb)
+ __builtin_abort ();
+#endif
+}