lower-bitint: Encode address space qualifiers in VIEW_CONVERT_EXPRs [PR113736]

Message ID ZcIHhEEHSVIBGwF9@tucnak
State Unresolved
Headers
Series lower-bitint: Encode address space qualifiers in VIEW_CONVERT_EXPRs [PR113736] |

Checks

Context Check Description
snail/gcc-patch-check warning Git am fail log

Commit Message

Jakub Jelinek Feb. 6, 2024, 10:18 a.m. UTC
  Hi!

As discussed in the PR, e.g. build_fold_addr_expr needs TYPE_ADDR_SPACE
on the outermost reference rather than just on the base, so the
following patch makes sure to propagate the address space from
the accessed var to the MEM_REFs and/or VIEW_CONVERT_EXPRs used to
access those.

Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?

2024-02-06  Jakub Jelinek  <jakub@redhat.com>

	PR tree-optimization/113736
	* gimple-lower-bitint.cc (bitint_large_huge::limb_access): Use
	var's address space for MEM_REF or VIEW_CONVERT_EXPRs.

	* gcc.dg/bitint-86.c: New test.


	Jakub
  

Comments

Richard Biener Feb. 6, 2024, 11:43 a.m. UTC | #1
On Tue, 6 Feb 2024, Jakub Jelinek wrote:

> Hi!
> 
> As discussed in the PR, e.g. build_fold_addr_expr needs TYPE_ADDR_SPACE
> on the outermost reference rather than just on the base, so the
> following patch makes sure to propagate the address space from
> the accessed var to the MEM_REFs and/or VIEW_CONVERT_EXPRs used to
> access those.
> 
> Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?

OK.

Thanks,
Richard.

> 2024-02-06  Jakub Jelinek  <jakub@redhat.com>
> 
> 	PR tree-optimization/113736
> 	* gimple-lower-bitint.cc (bitint_large_huge::limb_access): Use
> 	var's address space for MEM_REF or VIEW_CONVERT_EXPRs.
> 
> 	* gcc.dg/bitint-86.c: New test.
> 
> --- gcc/gimple-lower-bitint.cc.jj	2024-02-05 10:57:32.946941767 +0100
> +++ gcc/gimple-lower-bitint.cc	2024-02-05 11:41:28.352436669 +0100
> @@ -601,12 +601,17 @@ bitint_large_huge::limb_access (tree typ
>  {
>    tree atype = (tree_fits_uhwi_p (idx)
>  		? limb_access_type (type, idx) : m_limb_type);
> +  tree ltype = m_limb_type;
> +  addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
> +  if (as != TYPE_ADDR_SPACE (ltype))
> +    ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
> +					 | ENCODE_QUAL_ADDR_SPACE (as));
>    tree ret;
>    if (DECL_P (var) && tree_fits_uhwi_p (idx))
>      {
>        tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
>        unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
> -      ret = build2 (MEM_REF, m_limb_type,
> +      ret = build2 (MEM_REF, ltype,
>  		    build_fold_addr_expr (var),
>  		    build_int_cst (ptype, off));
>        TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
> @@ -615,7 +620,7 @@ bitint_large_huge::limb_access (tree typ
>    else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
>      {
>        ret
> -	= build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
> +	= build2 (MEM_REF, ltype, TREE_OPERAND (var, 0),
>  		  size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
>  			      build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
>  					     tree_to_uhwi (idx)
> @@ -633,10 +638,10 @@ bitint_large_huge::limb_access (tree typ
>  	{
>  	  unsigned HOST_WIDE_INT nelts
>  	    = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
> -	  tree atype = build_array_type_nelts (m_limb_type, nelts);
> +	  tree atype = build_array_type_nelts (ltype, nelts);
>  	  var = build1 (VIEW_CONVERT_EXPR, atype, var);
>  	}
> -      ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
> +      ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
>      }
>    if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
>      {
> --- gcc/testsuite/gcc.dg/bitint-86.c.jj	2024-02-05 12:11:03.582868774 +0100
> +++ gcc/testsuite/gcc.dg/bitint-86.c	2024-02-05 12:15:14.322401544 +0100
> @@ -0,0 +1,40 @@
> +/* PR tree-optimization/113736 */
> +/* { dg-do compile { target bitint } } */
> +/* { dg-options "-O2 -std=gnu23 -w" } */
> +
> +#if __BITINT_MAXWIDTH__ >= 710
> +struct S { _BitInt(710) a; };
> +struct T { struct S b[4]; };
> +
> +#ifdef __x86_64__
> +#define SEG __seg_gs
> +#elif defined __i386__
> +#define SEG __seg_fs
> +#else
> +#define SEG
> +#endif
> +
> +void
> +foo (__seg_gs struct T *p)
> +{
> +  struct S s;
> +  p->b[0] = s;
> +}
> +
> +void
> +bar (__seg_gs struct T *p, _BitInt(710) x, int y, double z)
> +{
> +  p->b[0].a = x + 42;
> +  p->b[1].a = x << y;
> +  p->b[2].a = x >> y;
> +  p->b[3].a = z;
> +}
> +
> +int
> +baz (__seg_gs struct T *p, _BitInt(710) x, _BitInt(710) y)
> +{
> +  return __builtin_add_overflow (x, y, &p->b[1].a);
> +}
> +#else
> +int i;
> +#endif
> 
> 	Jakub
> 
>
  

Patch

--- gcc/gimple-lower-bitint.cc.jj	2024-02-05 10:57:32.946941767 +0100
+++ gcc/gimple-lower-bitint.cc	2024-02-05 11:41:28.352436669 +0100
@@ -601,12 +601,17 @@  bitint_large_huge::limb_access (tree typ
 {
   tree atype = (tree_fits_uhwi_p (idx)
 		? limb_access_type (type, idx) : m_limb_type);
+  tree ltype = m_limb_type;
+  addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
+  if (as != TYPE_ADDR_SPACE (ltype))
+    ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
+					 | ENCODE_QUAL_ADDR_SPACE (as));
   tree ret;
   if (DECL_P (var) && tree_fits_uhwi_p (idx))
     {
       tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
       unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
-      ret = build2 (MEM_REF, m_limb_type,
+      ret = build2 (MEM_REF, ltype,
 		    build_fold_addr_expr (var),
 		    build_int_cst (ptype, off));
       TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
@@ -615,7 +620,7 @@  bitint_large_huge::limb_access (tree typ
   else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
     {
       ret
-	= build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
+	= build2 (MEM_REF, ltype, TREE_OPERAND (var, 0),
 		  size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
 			      build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
 					     tree_to_uhwi (idx)
@@ -633,10 +638,10 @@  bitint_large_huge::limb_access (tree typ
 	{
 	  unsigned HOST_WIDE_INT nelts
 	    = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
-	  tree atype = build_array_type_nelts (m_limb_type, nelts);
+	  tree atype = build_array_type_nelts (ltype, nelts);
 	  var = build1 (VIEW_CONVERT_EXPR, atype, var);
 	}
-      ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
+      ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
     }
   if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
     {
--- gcc/testsuite/gcc.dg/bitint-86.c.jj	2024-02-05 12:11:03.582868774 +0100
+++ gcc/testsuite/gcc.dg/bitint-86.c	2024-02-05 12:15:14.322401544 +0100
@@ -0,0 +1,40 @@ 
+/* PR tree-optimization/113736 */
+/* { dg-do compile { target bitint } } */
+/* { dg-options "-O2 -std=gnu23 -w" } */
+
+#if __BITINT_MAXWIDTH__ >= 710
+struct S { _BitInt(710) a; };
+struct T { struct S b[4]; };
+
+#ifdef __x86_64__
+#define SEG __seg_gs
+#elif defined __i386__
+#define SEG __seg_fs
+#else
+#define SEG
+#endif
+
+void
+foo (__seg_gs struct T *p)
+{
+  struct S s;
+  p->b[0] = s;
+}
+
+void
+bar (__seg_gs struct T *p, _BitInt(710) x, int y, double z)
+{
+  p->b[0].a = x + 42;
+  p->b[1].a = x << y;
+  p->b[2].a = x >> y;
+  p->b[3].a = z;
+}
+
+int
+baz (__seg_gs struct T *p, _BitInt(710) x, _BitInt(710) y)
+{
+  return __builtin_add_overflow (x, y, &p->b[1].a);
+}
+#else
+int i;
+#endif