[2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]

Message ID 20230127220250.1896137-2-ppalka@redhat.com
State Accepted
Headers
Series [1/2] c++: make manifestly_const_eval tri-state |

Checks

Context Check Description
snail/gcc-patch-check success Github commit url

Commit Message

Patrick Palka Jan. 27, 2023, 10:02 p.m. UTC
  This PR illustrates that __builtin_is_constant_evaluated currently acts
as an optimization barrier for our speculative constexpr evaluation,
since we don't want to prematurely fold the builtin to false if the
expression in question would be later manifestly constant evaluated (in
which case it must be folded to true).

This patch fixes this by permitting __builtin_is_constant_evaluated
to get folded as false during cp_fold_function, since at that point
we're sure we're doing manifestly constant evaluation.  To that end
we add a flags parameter to cp_fold that controls what mce_value the
CALL_EXPR case passes to maybe_constant_value.

bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
trunk?

	PR c++/108243

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Likewise.
	(cp_fully_fold_init): Likewise.
	(cp_fold): Add fold_flags parameter.  Don't cache if flags
	isn't empty.
	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
	if if ff_genericize is set.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/pr108243.C: New test.
---
 gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
 gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
 2 files changed, 76 insertions(+), 29 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
  

Comments

Patrick Palka Jan. 27, 2023, 10:05 p.m. UTC | #1
On Fri, 27 Jan 2023, Patrick Palka wrote:

> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function, since at that point
> we're sure we're doing manifestly constant evaluation.  To that end

"we're sure we're done with manifestly constant evaluation" rather

> we add a flags parameter to cp_fold that controls what mce_value the
> CALL_EXPR case passes to maybe_constant_value.
> 
> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> trunk?
> 
> 	PR c++/108243
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> 	if if ff_genericize is set.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/pr108243.C: New test.
> ---
>  gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
>  gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>  2 files changed, 76 insertions(+), 29 deletions(-)
>  create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index a35cedd05cc..d023a63768f 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>  #include "omp-general.h"
>  #include "opts.h"
>  
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +};
> +
>  /* Forward declarations.  */
>  
>  static tree cp_genericize_r (tree *, int *, void *);
>  static tree cp_fold_r (tree *, int *, void *);
>  static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags);
>  
>  /* Genericize a TRY_BLOCK.  */
>  
> @@ -996,9 +1004,8 @@ struct cp_genericize_data
>  struct cp_fold_data
>  {
>    hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags flags;
> +  cp_fold_data (fold_flags flags): flags (flags) {}
>  };
>  
>  static tree
> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>        break;
>      }
>  
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>  
>    if (data->pset.add (stmt))
>      {
> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  	 here rather than in cp_genericize to avoid problems with the invisible
>  	 reference transition.  */
>      case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_init_expr (stmt_p);
>        break;
>  
>      case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_target_expr (stmt_p);
>  
>        /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  void
>  cp_fold_function (tree fndecl)
>  {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize);
>    cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>  }
>  
> @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>  {
>    while (true)
>      {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>        if (rval)
>  	x = mark_rvalue_use (x);
>        if (rval && DECL_P (x)
> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>    if (processing_template_decl)
>      return x;
>    x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_none);
>    cp_walk_tree (&x, cp_fold_r, &data, NULL);
>    return x;
>  }
> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>      Function returns X or its folded variant.  */
>  
>  static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags flags)
>  {
>    tree op0, op1, op2, op3;
>    tree org_x = x, r = NULL_TREE;
> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>    if (fold_cache == NULL)
>      fold_cache = hash_map<tree, tree>::create_ggc (101);
>  
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>  
>    uid_sensitive_constexpr_evaluation_checker c;
>  
> @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>  	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>  	     folding of the operand should be in the caches and if in cp_fold_r
>  	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>  	  if (op0 == error_mark_node)
>  	    x = error_mark_node;
>  	  break;
> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>  	{
>  	  tree p = maybe_undo_parenthesized_ref (x);
>  	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>  	}
>        goto unary;
>  
> @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>      case COND_EXPR:
>        loc = EXPR_LOCATION (x);
>        op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>  
>        if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>  	{
> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>  	      {
>  		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>  		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>  		break;
>  	      }
>  	  }
> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>  	int m = call_expr_nargs (x);
>  	for (int i = 0; i < m; i++)
>  	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>  	    if (r != CALL_EXPR_ARG (x, i))
>  	      {
>  		if (r == error_mark_node)
> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>  
>  	if (TREE_CODE (r) != CALL_EXPR)
>  	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>  	    break;
>  	  }
>  
> @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>  	   constant, but the call followed by an INDIRECT_REF is.  */
>  	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>  	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_genericize)
> +	      /* At genericization time it's safe to fold
> +		 __builtin_is_constant_evaluated to false.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>  	optimize = sv;
>  
>          if (TREE_CODE (r) != CALL_EXPR)
> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>  	vec<constructor_elt, va_gc> *nelts = NULL;
>  	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>  	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>  	    if (op != p->value)
>  	      {
>  		if (op == error_mark_node)
> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>  
>  	for (int i = 0; i < n; i++)
>  	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>  	    if (op != TREE_VEC_ELT (x, i))
>  	      {
>  		if (!changed)
> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>      case ARRAY_RANGE_REF:
>  
>        loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>  
>        if (op0 != TREE_OPERAND (x, 0)
>  	  || op1 != TREE_OPERAND (x, 1)
> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>        /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>  	 folding, evaluates to an invariant.  In that case no need to wrap
>  	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>        if (tree_invariant_p (r))
>  	x = r;
>        break;
> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>        copy_warning (x, org_x);
>      }
>  
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>      {
>        fold_cache->put (org_x, x);
>        /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C b/gcc/testsuite/g++.dg/opt/pr108243.C
> new file mode 100644
> index 00000000000..4c45dbba13c
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> @@ -0,0 +1,29 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> -- 
> 2.39.1.348.g5dec958dcf
> 
>
  
Jason Merrill Jan. 30, 2023, 8:05 p.m. UTC | #2
On 1/27/23 17:02, Patrick Palka wrote:
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function, since at that point
> we're sure we're doing manifestly constant evaluation.  To that end
> we add a flags parameter to cp_fold that controls what mce_value the
> CALL_EXPR case passes to maybe_constant_value.
> 
> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> trunk?
> 
> 	PR c++/108243
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> 	if if ff_genericize is set.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/pr108243.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
>   gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>   2 files changed, 76 insertions(+), 29 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index a35cedd05cc..d023a63768f 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>   #include "omp-general.h"
>   #include "opts.h"
>   
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +};
> +
>   /* Forward declarations.  */
>   
>   static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -996,9 +1004,8 @@ struct cp_genericize_data
>   struct cp_fold_data
>   {
>     hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags flags;
> +  cp_fold_data (fold_flags flags): flags (flags) {}
>   };
>   
>   static tree
> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>   
>     if (data->pset.add (stmt))
>       {
> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	 here rather than in cp_genericize to avoid problems with the invisible
>   	 reference transition.  */
>       case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_init_expr (stmt_p);
>         break;
>   
>       case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_target_expr (stmt_p);
>   
>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   void
>   cp_fold_function (tree fndecl)
>   {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize);
>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>   }
>   
> @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>   {
>     while (true)
>       {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>         if (rval)
>   	x = mark_rvalue_use (x);
>         if (rval && DECL_P (x)
> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>     if (processing_template_decl)
>       return x;
>     x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_none);
>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>     return x;
>   }
> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>       Function returns X or its folded variant.  */
>   
>   static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags flags)
>   {
>     tree op0, op1, op2, op3;
>     tree org_x = x, r = NULL_TREE;
> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>     if (fold_cache == NULL)
>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>   
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>   
>     uid_sensitive_constexpr_evaluation_checker c;
>   
> @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>   	     folding of the operand should be in the caches and if in cp_fold_r
>   	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>   	  if (op0 == error_mark_node)
>   	    x = error_mark_node;
>   	  break;
> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>   	{
>   	  tree p = maybe_undo_parenthesized_ref (x);
>   	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>   	}
>         goto unary;
>   
> @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>       case COND_EXPR:
>         loc = EXPR_LOCATION (x);
>         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>   
>         if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>   	{
> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>   	      {
>   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>   		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>   		break;
>   	      }
>   	  }
> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>   	int m = call_expr_nargs (x);
>   	for (int i = 0; i < m; i++)
>   	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>   	    if (r != CALL_EXPR_ARG (x, i))
>   	      {
>   		if (r == error_mark_node)
> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>   
>   	if (TREE_CODE (r) != CALL_EXPR)
>   	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>   	    break;
>   	  }
>   
> @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>   	   constant, but the call followed by an INDIRECT_REF is.  */
>   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>   	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_genericize)
> +	      /* At genericization time it's safe to fold
> +		 __builtin_is_constant_evaluated to false.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>   	vec<constructor_elt, va_gc> *nelts = NULL;
>   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>   	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>   	    if (op != p->value)
>   	      {
>   		if (op == error_mark_node)
> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>   
>   	for (int i = 0; i < n; i++)
>   	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>   	    if (op != TREE_VEC_ELT (x, i))
>   	      {
>   		if (!changed)
> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>       case ARRAY_RANGE_REF:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>   
>         if (op0 != TREE_OPERAND (x, 0)
>   	  || op1 != TREE_OPERAND (x, 1)
> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>   	 folding, evaluates to an invariant.  In that case no need to wrap
>   	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>         if (tree_invariant_p (r))
>   	x = r;
>         break;
> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>         copy_warning (x, org_x);
>       }
>   
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>       {
>         fold_cache->put (org_x, x);
>         /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C b/gcc/testsuite/g++.dg/opt/pr108243.C
> new file mode 100644
> index 00000000000..4c45dbba13c
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> @@ -0,0 +1,29 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" } }

Let's also test a static initializer that can't be fully constant-evaluated.

Jason
  
Patrick Palka Feb. 3, 2023, 8:51 p.m. UTC | #3
On Mon, 30 Jan 2023, Jason Merrill wrote:

> On 1/27/23 17:02, Patrick Palka wrote:
> > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > as an optimization barrier for our speculative constexpr evaluation,
> > since we don't want to prematurely fold the builtin to false if the
> > expression in question would be later manifestly constant evaluated (in
> > which case it must be folded to true).
> > 
> > This patch fixes this by permitting __builtin_is_constant_evaluated
> > to get folded as false during cp_fold_function, since at that point
> > we're sure we're doing manifestly constant evaluation.  To that end
> > we add a flags parameter to cp_fold that controls what mce_value the
> > CALL_EXPR case passes to maybe_constant_value.
> > 
> > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> > trunk?
> > 
> > 	PR c++/108243
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* cp-gimplify.cc (enum fold_flags): Define.
> > 	(cp_fold_data::genericize): Replace this data member with ...
> > 	(cp_fold_data::fold_flags): ... this.
> > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > 	(cp_fold_function): Likewise.
> > 	(cp_fold_maybe_rvalue): Likewise.
> > 	(cp_fully_fold_init): Likewise.
> > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > 	isn't empty.
> > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > 	if if ff_genericize is set.
> > 
> > gcc/testsuite/ChangeLog:
> > 
> > 	* g++.dg/opt/pr108243.C: New test.
> > ---
> >   gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
> >   gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> >   2 files changed, 76 insertions(+), 29 deletions(-)
> >   create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index a35cedd05cc..d023a63768f 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
> >   #include "omp-general.h"
> >   #include "opts.h"
> >   +/* Flags for cp_fold and cp_fold_r.  */
> > +
> > +enum fold_flags {
> > +  ff_none = 0,
> > +  /* Whether we're being called from cp_fold_function.  */
> > +  ff_genericize = 1 << 0,
> > +};
> > +
> >   /* Forward declarations.  */
> >     static tree cp_genericize_r (tree *, int *, void *);
> >   static tree cp_fold_r (tree *, int *, void *);
> >   static void cp_genericize_tree (tree*, bool);
> > -static tree cp_fold (tree);
> > +static tree cp_fold (tree, fold_flags);
> >     /* Genericize a TRY_BLOCK.  */
> >   @@ -996,9 +1004,8 @@ struct cp_genericize_data
> >   struct cp_fold_data
> >   {
> >     hash_set<tree> pset;
> > -  bool genericize; // called from cp_fold_function?
> > -
> > -  cp_fold_data (bool g): genericize (g) {}
> > +  fold_flags flags;
> > +  cp_fold_data (fold_flags flags): flags (flags) {}
> >   };
> >     static tree
> > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >         break;
> >       }
> >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> >       if (data->pset.add (stmt))
> >       {
> > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	 here rather than in cp_genericize to avoid problems with the
> > invisible
> >   	 reference transition.  */
> >       case INIT_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_init_expr (stmt_p);
> >         break;
> >         case TARGET_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_target_expr (stmt_p);
> >           /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   void
> >   cp_fold_function (tree fndecl)
> >   {
> > -  cp_fold_data data (/*genericize*/true);
> > +  cp_fold_data data (ff_genericize);
> >     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> >   }
> >   @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> >   {
> >     while (true)
> >       {
> > -      x = cp_fold (x);
> > +      x = cp_fold (x, ff_none);
> >         if (rval)
> >   	x = mark_rvalue_use (x);
> >         if (rval && DECL_P (x)
> > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> >     if (processing_template_decl)
> >       return x;
> >     x = cp_fully_fold (x);
> > -  cp_fold_data data (/*genericize*/false);
> > +  cp_fold_data data (ff_none);
> >     cp_walk_tree (&x, cp_fold_r, &data, NULL);
> >     return x;
> >   }
> > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> >       Function returns X or its folded variant.  */
> >     static tree
> > -cp_fold (tree x)
> > +cp_fold (tree x, fold_flags flags)
> >   {
> >     tree op0, op1, op2, op3;
> >     tree org_x = x, r = NULL_TREE;
> > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> >     if (fold_cache == NULL)
> >       fold_cache = hash_map<tree, tree>::create_ggc (101);
> >   -  if (tree *cached = fold_cache->get (x))
> > -    return *cached;
> > +  bool cache_p = (flags == ff_none);
> > +
> > +  if (cache_p)
> > +    if (tree *cached = fold_cache->get (x))
> > +      return *cached;
> >       uid_sensitive_constexpr_evaluation_checker c;
> >   @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> >   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
> >   	     folding of the operand should be in the caches and if in
> > cp_fold_r
> >   	     it will modify it in place.  */
> > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> >   	  if (op0 == error_mark_node)
> >   	    x = error_mark_node;
> >   	  break;
> > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> >   	{
> >   	  tree p = maybe_undo_parenthesized_ref (x);
> >   	  if (p != x)
> > -	    return cp_fold (p);
> > +	    return cp_fold (p, flags);
> >   	}
> >         goto unary;
> >   @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> >       case COND_EXPR:
> >         loc = EXPR_LOCATION (x);
> >         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> >           if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> >   	{
> > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> >   	      {
> >   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> >   		  r = build_nop (TREE_TYPE (x), r);
> > -		x = cp_fold (r);
> > +		x = cp_fold (r, flags);
> >   		break;
> >   	      }
> >   	  }
> > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> >   	int m = call_expr_nargs (x);
> >   	for (int i = 0; i < m; i++)
> >   	  {
> > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> >   	    if (r != CALL_EXPR_ARG (x, i))
> >   	      {
> >   		if (r == error_mark_node)
> > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> >     	if (TREE_CODE (r) != CALL_EXPR)
> >   	  {
> > -	    x = cp_fold (r);
> > +	    x = cp_fold (r, flags);
> >   	    break;
> >   	  }
> >   @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> >   	   constant, but the call followed by an INDIRECT_REF is.  */
> >   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> >   	    && !flag_no_inline)
> > -	  r = maybe_constant_value (x);
> > +	  {
> > +	    mce_value manifestly_const_eval = mce_unknown;
> > +	    if (flags & ff_genericize)
> > +	      /* At genericization time it's safe to fold
> > +		 __builtin_is_constant_evaluated to false.  */
> > +	      manifestly_const_eval = mce_false;
> > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > +				      manifestly_const_eval);
> > +	  }
> >   	optimize = sv;
> >             if (TREE_CODE (r) != CALL_EXPR)
> > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> >   	vec<constructor_elt, va_gc> *nelts = NULL;
> >   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> >   	  {
> > -	    tree op = cp_fold (p->value);
> > +	    tree op = cp_fold (p->value, flags);
> >   	    if (op != p->value)
> >   	      {
> >   		if (op == error_mark_node)
> > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> >     	for (int i = 0; i < n; i++)
> >   	  {
> > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> >   	    if (op != TREE_VEC_ELT (x, i))
> >   	      {
> >   		if (!changed)
> > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> >       case ARRAY_RANGE_REF:
> >           loc = EXPR_LOCATION (x);
> > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> >           if (op0 != TREE_OPERAND (x, 0)
> >   	  || op1 != TREE_OPERAND (x, 1)
> > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> >         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
> >   	 folding, evaluates to an invariant.  In that case no need to wrap
> >   	 this folded tree with a SAVE_EXPR.  */
> > -      r = cp_fold (TREE_OPERAND (x, 0));
> > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> >         if (tree_invariant_p (r))
> >   	x = r;
> >         break;
> > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> >         copy_warning (x, org_x);
> >       }
> >   -  if (!c.evaluation_restricted_p ())
> > +  if (cache_p && !c.evaluation_restricted_p ())
> >       {
> >         fold_cache->put (org_x, x);
> >         /* Prevent that we try to fold an already folded result again.  */
> > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > new file mode 100644
> > index 00000000000..4c45dbba13c
> > --- /dev/null
> > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > @@ -0,0 +1,29 @@
> > +// PR c++/108243
> > +// { dg-do compile { target c++11 } }
> > +// { dg-additional-options "-O -fdump-tree-original" }
> > +
> > +constexpr int foo() {
> > +  return __builtin_is_constant_evaluated() + 1;
> > +}
> > +
> > +#if __cpp_if_consteval
> > +constexpr int bar() {
> > +  if consteval {
> > +    return 5;
> > +  } else {
> > +    return 4;
> > +  }
> > +}
> > +#endif
> > +
> > +int p, q;
> > +
> > +int main() {
> > +  p = foo();
> > +#if __cpp_if_consteval
> > +  q = bar();
> > +#endif
> > +}
> > +
> > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> 
> Let's also test a static initializer that can't be fully constant-evaluated.

D'oh, doing so revealed that cp_fold_function doesn't reach static
initializers; that's taken care of by cp_fully_fold_init.  So it seems
we need to make cp_fold when called from the latter entry point to also
assume m_c_e is false.  We can't re-use ff_genericize here because that
flag has additional effects in cp_fold_r, so it seems we need another
flag that that only affects the manifestly constant-eval stuff; I called
it ff_mce_false.  How does the following look?

-- >8 --

Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

This PR illustrates that __builtin_is_constant_evaluated currently acts
as an optimization barrier for our speculative constexpr evaluation,
since we don't want to prematurely fold the builtin to false if the
expression in question would be later manifestly constant evaluated (in
which case it must be folded to true).

This patch fixes this by permitting __builtin_is_constant_evaluated
to get folded as false during cp_fold_function and cp_fully_fold_init,
since at these points we're sure we're done with manifestly constant
evaluation.  To that end we add a flags parameter to cp_fold that
controls whether we pass mce_false or mce_unknown to maybe_constant_value
when folding a CALL_EXPR.

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Likewise.
	(cp_fully_fold_init): Likewise.
	(cp_fold): Add fold_flags parameter.  Don't cache if flags
	isn't empty.
	<case CALL_EXPR>: If ff_genericize is set, fold
	__builtin_is_constant_evaluated to false and pass mce_false to
	maybe_constant_value.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
 .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
 .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
 3 files changed, 104 insertions(+), 30 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..590ed787997 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+  /* Whether we're folding late enough that we could assume
+     we're definitely not in a manifestly constant-evaluated
+     context.  */
+  ff_mce_false = 1 << 1,
+};
+
+using fold_flags_t = int;
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags_t);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1012,9 +1026,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags_t flags;
+  cp_fold_data (fold_flags_t flags): flags (flags) {}
 };
 
 static tree
@@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  cp_fold_data data (ff_genericize | ff_mce_false);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, ff_none);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_mce_false);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2485,7 +2498,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags_t flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2506,8 +2519,11 @@ cp_fold (tree x)
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
-  if (tree *cached = fold_cache->get (x))
-    return *cached;
+  bool cache_p = (flags == ff_none);
+
+  if (cache_p)
+    if (tree *cached = fold_cache->get (x))
+      return *cached;
 
   uid_sensitive_constexpr_evaluation_checker c;
 
@@ -2542,7 +2558,7 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
@@ -2587,7 +2603,7 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
@@ -2779,8 +2795,8 @@ cp_fold (tree x)
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2870,7 +2886,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2890,8 +2906,12 @@ cp_fold (tree x)
 	  {
 	    switch (DECL_FE_FUNCTION_CODE (callee))
 	      {
-		/* Defer folding __builtin_is_constant_evaluated.  */
 	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
+		/* Defer folding __builtin_is_constant_evaluated unless
+		   we can assume this isn't a manifestly constant-evaluated
+		   context.  */
+		if (flags & ff_mce_false)
+		  x = boolean_false_node;
 		break;
 	      case CP_BUILT_IN_SOURCE_LOCATION:
 		x = fold_builtin_source_location (x);
@@ -2924,7 +2944,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2947,7 +2967,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2960,7 +2980,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_mce_false)
+	      /* Allow folding __builtin_is_constant_evaluated to false during
+		 constexpr evaluation of this call.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2987,7 +3015,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3018,7 +3046,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3035,10 +3063,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3066,7 +3094,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
@@ -3085,7 +3113,7 @@ cp_fold (tree x)
       copy_warning (x, org_x);
     }
 
-  if (!c.evaluation_restricted_p ())
+  if (cache_p && !c.evaluation_restricted_p ())
     {
       fold_cache->put (org_x, x);
       /* Prevent that we try to fold an already folded result again.  */
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..ee05cbab785
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,14 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+void f(int n) {
+  static A a = {n, __builtin_is_constant_evaluated()};
+}
+
+// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  
Patrick Palka Feb. 3, 2023, 8:57 p.m. UTC | #4
On Fri, 3 Feb 2023, Patrick Palka wrote:

> On Mon, 30 Jan 2023, Jason Merrill wrote:
> 
> > On 1/27/23 17:02, Patrick Palka wrote:
> > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > as an optimization barrier for our speculative constexpr evaluation,
> > > since we don't want to prematurely fold the builtin to false if the
> > > expression in question would be later manifestly constant evaluated (in
> > > which case it must be folded to true).
> > > 
> > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > to get folded as false during cp_fold_function, since at that point
> > > we're sure we're doing manifestly constant evaluation.  To that end
> > > we add a flags parameter to cp_fold that controls what mce_value the
> > > CALL_EXPR case passes to maybe_constant_value.
> > > 
> > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> > > trunk?
> > > 
> > > 	PR c++/108243
> > > 
> > > gcc/cp/ChangeLog:
> > > 
> > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > 	(cp_fold_data::fold_flags): ... this.
> > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > 	(cp_fold_function): Likewise.
> > > 	(cp_fold_maybe_rvalue): Likewise.
> > > 	(cp_fully_fold_init): Likewise.
> > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > 	isn't empty.
> > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > 	if if ff_genericize is set.
> > > 
> > > gcc/testsuite/ChangeLog:
> > > 
> > > 	* g++.dg/opt/pr108243.C: New test.
> > > ---
> > >   gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
> > >   gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > >   2 files changed, 76 insertions(+), 29 deletions(-)
> > >   create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > 
> > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > index a35cedd05cc..d023a63768f 100644
> > > --- a/gcc/cp/cp-gimplify.cc
> > > +++ b/gcc/cp/cp-gimplify.cc
> > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
> > >   #include "omp-general.h"
> > >   #include "opts.h"
> > >   +/* Flags for cp_fold and cp_fold_r.  */
> > > +
> > > +enum fold_flags {
> > > +  ff_none = 0,
> > > +  /* Whether we're being called from cp_fold_function.  */
> > > +  ff_genericize = 1 << 0,
> > > +};
> > > +
> > >   /* Forward declarations.  */
> > >     static tree cp_genericize_r (tree *, int *, void *);
> > >   static tree cp_fold_r (tree *, int *, void *);
> > >   static void cp_genericize_tree (tree*, bool);
> > > -static tree cp_fold (tree);
> > > +static tree cp_fold (tree, fold_flags);
> > >     /* Genericize a TRY_BLOCK.  */
> > >   @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > >   struct cp_fold_data
> > >   {
> > >     hash_set<tree> pset;
> > > -  bool genericize; // called from cp_fold_function?
> > > -
> > > -  cp_fold_data (bool g): genericize (g) {}
> > > +  fold_flags flags;
> > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > >   };
> > >     static tree
> > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >         break;
> > >       }
> > >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > >       if (data->pset.add (stmt))
> > >       {
> > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >   	 here rather than in cp_genericize to avoid problems with the
> > > invisible
> > >   	 reference transition.  */
> > >       case INIT_EXPR:
> > > -      if (data->genericize)
> > > +      if (data->flags & ff_genericize)
> > >   	cp_genericize_init_expr (stmt_p);
> > >         break;
> > >         case TARGET_EXPR:
> > > -      if (data->genericize)
> > > +      if (data->flags & ff_genericize)
> > >   	cp_genericize_target_expr (stmt_p);
> > >           /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >   void
> > >   cp_fold_function (tree fndecl)
> > >   {
> > > -  cp_fold_data data (/*genericize*/true);
> > > +  cp_fold_data data (ff_genericize);
> > >     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > >   }
> > >   @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > >   {
> > >     while (true)
> > >       {
> > > -      x = cp_fold (x);
> > > +      x = cp_fold (x, ff_none);
> > >         if (rval)
> > >   	x = mark_rvalue_use (x);
> > >         if (rval && DECL_P (x)
> > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > >     if (processing_template_decl)
> > >       return x;
> > >     x = cp_fully_fold (x);
> > > -  cp_fold_data data (/*genericize*/false);
> > > +  cp_fold_data data (ff_none);
> > >     cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > >     return x;
> > >   }
> > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > >       Function returns X or its folded variant.  */
> > >     static tree
> > > -cp_fold (tree x)
> > > +cp_fold (tree x, fold_flags flags)
> > >   {
> > >     tree op0, op1, op2, op3;
> > >     tree org_x = x, r = NULL_TREE;
> > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > >     if (fold_cache == NULL)
> > >       fold_cache = hash_map<tree, tree>::create_ggc (101);
> > >   -  if (tree *cached = fold_cache->get (x))
> > > -    return *cached;
> > > +  bool cache_p = (flags == ff_none);
> > > +
> > > +  if (cache_p)
> > > +    if (tree *cached = fold_cache->get (x))
> > > +      return *cached;
> > >       uid_sensitive_constexpr_evaluation_checker c;
> > >   @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > >   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
> > >   	     folding of the operand should be in the caches and if in
> > > cp_fold_r
> > >   	     it will modify it in place.  */
> > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > >   	  if (op0 == error_mark_node)
> > >   	    x = error_mark_node;
> > >   	  break;
> > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > >   	{
> > >   	  tree p = maybe_undo_parenthesized_ref (x);
> > >   	  if (p != x)
> > > -	    return cp_fold (p);
> > > +	    return cp_fold (p, flags);
> > >   	}
> > >         goto unary;
> > >   @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > >       case COND_EXPR:
> > >         loc = EXPR_LOCATION (x);
> > >         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > >           if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > >   	{
> > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > >   	      {
> > >   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > >   		  r = build_nop (TREE_TYPE (x), r);
> > > -		x = cp_fold (r);
> > > +		x = cp_fold (r, flags);
> > >   		break;
> > >   	      }
> > >   	  }
> > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > >   	int m = call_expr_nargs (x);
> > >   	for (int i = 0; i < m; i++)
> > >   	  {
> > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > >   	    if (r != CALL_EXPR_ARG (x, i))
> > >   	      {
> > >   		if (r == error_mark_node)
> > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > >     	if (TREE_CODE (r) != CALL_EXPR)
> > >   	  {
> > > -	    x = cp_fold (r);
> > > +	    x = cp_fold (r, flags);
> > >   	    break;
> > >   	  }
> > >   @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > >   	   constant, but the call followed by an INDIRECT_REF is.  */
> > >   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > >   	    && !flag_no_inline)
> > > -	  r = maybe_constant_value (x);
> > > +	  {
> > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > +	    if (flags & ff_genericize)
> > > +	      /* At genericization time it's safe to fold
> > > +		 __builtin_is_constant_evaluated to false.  */
> > > +	      manifestly_const_eval = mce_false;
> > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > +				      manifestly_const_eval);
> > > +	  }
> > >   	optimize = sv;
> > >             if (TREE_CODE (r) != CALL_EXPR)
> > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > >   	vec<constructor_elt, va_gc> *nelts = NULL;
> > >   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > >   	  {
> > > -	    tree op = cp_fold (p->value);
> > > +	    tree op = cp_fold (p->value, flags);
> > >   	    if (op != p->value)
> > >   	      {
> > >   		if (op == error_mark_node)
> > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > >     	for (int i = 0; i < n; i++)
> > >   	  {
> > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > >   	    if (op != TREE_VEC_ELT (x, i))
> > >   	      {
> > >   		if (!changed)
> > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > >       case ARRAY_RANGE_REF:
> > >           loc = EXPR_LOCATION (x);
> > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > >           if (op0 != TREE_OPERAND (x, 0)
> > >   	  || op1 != TREE_OPERAND (x, 1)
> > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > >         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
> > >   	 folding, evaluates to an invariant.  In that case no need to wrap
> > >   	 this folded tree with a SAVE_EXPR.  */
> > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > >         if (tree_invariant_p (r))
> > >   	x = r;
> > >         break;
> > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > >         copy_warning (x, org_x);
> > >       }
> > >   -  if (!c.evaluation_restricted_p ())
> > > +  if (cache_p && !c.evaluation_restricted_p ())
> > >       {
> > >         fold_cache->put (org_x, x);
> > >         /* Prevent that we try to fold an already folded result again.  */
> > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > new file mode 100644
> > > index 00000000000..4c45dbba13c
> > > --- /dev/null
> > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > @@ -0,0 +1,29 @@
> > > +// PR c++/108243
> > > +// { dg-do compile { target c++11 } }
> > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > +
> > > +constexpr int foo() {
> > > +  return __builtin_is_constant_evaluated() + 1;
> > > +}
> > > +
> > > +#if __cpp_if_consteval
> > > +constexpr int bar() {
> > > +  if consteval {
> > > +    return 5;
> > > +  } else {
> > > +    return 4;
> > > +  }
> > > +}
> > > +#endif
> > > +
> > > +int p, q;
> > > +
> > > +int main() {
> > > +  p = foo();
> > > +#if __cpp_if_consteval
> > > +  q = bar();
> > > +#endif
> > > +}
> > > +
> > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > 
> > Let's also test a static initializer that can't be fully constant-evaluated.
> 
> D'oh, doing so revealed that cp_fold_function doesn't reach static
> initializers; that's taken care of by cp_fully_fold_init.  So it seems
> we need to make cp_fold when called from the latter entry point to also
> assume m_c_e is false.  We can't re-use ff_genericize here because that
> flag has additional effects in cp_fold_r, so it seems we need another
> flag that that only affects the manifestly constant-eval stuff; I called
> it ff_mce_false.  How does the following look?

N.B. cp_fully_fold_init is called only from three places:

  * from store_init_value shortly after manifestly-constant evualation of the
  initializer
  * from split_nonconstant_init
  * and from check_for_mismatched_contracts

So it seems to always be called late enough that we can safely assume
m_c_e is false as in cp_fold_function.

> 
> -- >8 --
> 
> Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
>  [PR108243]
> 
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function and cp_fully_fold_init,
> since at these points we're sure we're done with manifestly constant
> evaluation.  To that end we add a flags parameter to cp_fold that
> controls whether we pass mce_false or mce_unknown to maybe_constant_value
> when folding a CALL_EXPR.
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: If ff_genericize is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>  gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
>  .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>  .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>  3 files changed, 104 insertions(+), 30 deletions(-)
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..590ed787997 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>  #include "omp-general.h"
>  #include "opts.h"
>  
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding late enough that we could assume
> +     we're definitely not in a manifestly constant-evaluated
> +     context.  */
> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>  /* Forward declarations.  */
>  
>  static tree cp_genericize_r (tree *, int *, void *);
>  static tree cp_fold_r (tree *, int *, void *);
>  static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>  
>  /* Genericize a TRY_BLOCK.  */
>  
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>  struct cp_fold_data
>  {
>    hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>  };
>  
>  static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>        break;
>      }
>  
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>  
>    if (data->pset.add (stmt))
>      {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  	 here rather than in cp_genericize to avoid problems with the invisible
>  	 reference transition.  */
>      case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_init_expr (stmt_p);
>        break;
>  
>      case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_target_expr (stmt_p);
>  
>        /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  void
>  cp_fold_function (tree fndecl)
>  {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize | ff_mce_false);
>    cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>  }
>  
> @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>  {
>    while (true)
>      {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>        if (rval)
>  	x = mark_rvalue_use (x);
>        if (rval && DECL_P (x)
> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>    if (processing_template_decl)
>      return x;
>    x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>    cp_walk_tree (&x, cp_fold_r, &data, NULL);
>    return x;
>  }
> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>      Function returns X or its folded variant.  */
>  
>  static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>  {
>    tree op0, op1, op2, op3;
>    tree org_x = x, r = NULL_TREE;
> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>    if (fold_cache == NULL)
>      fold_cache = hash_map<tree, tree>::create_ggc (101);
>  
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>  
>    uid_sensitive_constexpr_evaluation_checker c;
>  
> @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>  	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>  	     folding of the operand should be in the caches and if in cp_fold_r
>  	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>  	  if (op0 == error_mark_node)
>  	    x = error_mark_node;
>  	  break;
> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>  	{
>  	  tree p = maybe_undo_parenthesized_ref (x);
>  	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>  	}
>        goto unary;
>  
> @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>      case COND_EXPR:
>        loc = EXPR_LOCATION (x);
>        op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>  
>        if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>  	{
> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>  	      {
>  		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>  		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>  		break;
>  	      }
>  	  }
> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>  	  {
>  	    switch (DECL_FE_FUNCTION_CODE (callee))
>  	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>  	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we can assume this isn't a manifestly constant-evaluated
> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>  		break;
>  	      case CP_BUILT_IN_SOURCE_LOCATION:
>  		x = fold_builtin_source_location (x);
> @@ -2924,7 +2944,7 @@ cp_fold (tree x)
>  	int m = call_expr_nargs (x);
>  	for (int i = 0; i < m; i++)
>  	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>  	    if (r != CALL_EXPR_ARG (x, i))
>  	      {
>  		if (r == error_mark_node)
> @@ -2947,7 +2967,7 @@ cp_fold (tree x)
>  
>  	if (TREE_CODE (r) != CALL_EXPR)
>  	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>  	    break;
>  	  }
>  
> @@ -2960,7 +2980,15 @@ cp_fold (tree x)
>  	   constant, but the call followed by an INDIRECT_REF is.  */
>  	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>  	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>  	optimize = sv;
>  
>          if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3015,7 @@ cp_fold (tree x)
>  	vec<constructor_elt, va_gc> *nelts = NULL;
>  	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>  	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>  	    if (op != p->value)
>  	      {
>  		if (op == error_mark_node)
> @@ -3018,7 +3046,7 @@ cp_fold (tree x)
>  
>  	for (int i = 0; i < n; i++)
>  	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>  	    if (op != TREE_VEC_ELT (x, i))
>  	      {
>  		if (!changed)
> @@ -3035,10 +3063,10 @@ cp_fold (tree x)
>      case ARRAY_RANGE_REF:
>  
>        loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>  
>        if (op0 != TREE_OPERAND (x, 0)
>  	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3094,7 @@ cp_fold (tree x)
>        /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>  	 folding, evaluates to an invariant.  In that case no need to wrap
>  	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>        if (tree_invariant_p (r))
>  	x = r;
>        break;
> @@ -3085,7 +3113,7 @@ cp_fold (tree x)
>        copy_warning (x, org_x);
>      }
>  
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>      {
>        fold_cache->put (org_x, x);
>        /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..ee05cbab785
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,14 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +void f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> -- 
> 2.39.1.388.g2fc9e9ca3c
> 
>
  
Jason Merrill Feb. 5, 2023, 8:11 p.m. UTC | #5
On 2/3/23 15:51, Patrick Palka wrote:
> On Mon, 30 Jan 2023, Jason Merrill wrote:
> 
>> On 1/27/23 17:02, Patrick Palka wrote:
>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>> as an optimization barrier for our speculative constexpr evaluation,
>>> since we don't want to prematurely fold the builtin to false if the
>>> expression in question would be later manifestly constant evaluated (in
>>> which case it must be folded to true).
>>>
>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>> to get folded as false during cp_fold_function, since at that point
>>> we're sure we're doing manifestly constant evaluation.  To that end
>>> we add a flags parameter to cp_fold that controls what mce_value the
>>> CALL_EXPR case passes to maybe_constant_value.
>>>
>>> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
>>> trunk?
>>>
>>> 	PR c++/108243
>>>
>>> gcc/cp/ChangeLog:
>>>
>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>> 	(cp_fold_data::fold_flags): ... this.
>>> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
>>> 	(cp_fold_function): Likewise.
>>> 	(cp_fold_maybe_rvalue): Likewise.
>>> 	(cp_fully_fold_init): Likewise.
>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>> 	isn't empty.
>>> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
>>> 	if if ff_genericize is set.
>>>
>>> gcc/testsuite/ChangeLog:
>>>
>>> 	* g++.dg/opt/pr108243.C: New test.
>>> ---
>>>    gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
>>>    gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>>>    2 files changed, 76 insertions(+), 29 deletions(-)
>>>    create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
>>>
>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>> index a35cedd05cc..d023a63768f 100644
>>> --- a/gcc/cp/cp-gimplify.cc
>>> +++ b/gcc/cp/cp-gimplify.cc
>>> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>>>    #include "omp-general.h"
>>>    #include "opts.h"
>>>    +/* Flags for cp_fold and cp_fold_r.  */
>>> +
>>> +enum fold_flags {
>>> +  ff_none = 0,
>>> +  /* Whether we're being called from cp_fold_function.  */
>>> +  ff_genericize = 1 << 0,
>>> +};
>>> +
>>>    /* Forward declarations.  */
>>>      static tree cp_genericize_r (tree *, int *, void *);
>>>    static tree cp_fold_r (tree *, int *, void *);
>>>    static void cp_genericize_tree (tree*, bool);
>>> -static tree cp_fold (tree);
>>> +static tree cp_fold (tree, fold_flags);
>>>      /* Genericize a TRY_BLOCK.  */
>>>    @@ -996,9 +1004,8 @@ struct cp_genericize_data
>>>    struct cp_fold_data
>>>    {
>>>      hash_set<tree> pset;
>>> -  bool genericize; // called from cp_fold_function?
>>> -
>>> -  cp_fold_data (bool g): genericize (g) {}
>>> +  fold_flags flags;
>>> +  cp_fold_data (fold_flags flags): flags (flags) {}
>>>    };
>>>      static tree
>>> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>          break;
>>>        }
>>>    -  *stmt_p = stmt = cp_fold (*stmt_p);
>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>        if (data->pset.add (stmt))
>>>        {
>>> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    	 here rather than in cp_genericize to avoid problems with the
>>> invisible
>>>    	 reference transition.  */
>>>        case INIT_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_init_expr (stmt_p);
>>>          break;
>>>          case TARGET_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_target_expr (stmt_p);
>>>            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
>>> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    void
>>>    cp_fold_function (tree fndecl)
>>>    {
>>> -  cp_fold_data data (/*genericize*/true);
>>> +  cp_fold_data data (ff_genericize);
>>>      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>    }
>>>    @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>    {
>>>      while (true)
>>>        {
>>> -      x = cp_fold (x);
>>> +      x = cp_fold (x, ff_none);
>>>          if (rval)
>>>    	x = mark_rvalue_use (x);
>>>          if (rval && DECL_P (x)
>>> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>>>      if (processing_template_decl)
>>>        return x;
>>>      x = cp_fully_fold (x);
>>> -  cp_fold_data data (/*genericize*/false);
>>> +  cp_fold_data data (ff_none);
>>>      cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>      return x;
>>>    }
>>> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>>>        Function returns X or its folded variant.  */
>>>      static tree
>>> -cp_fold (tree x)
>>> +cp_fold (tree x, fold_flags flags)
>>>    {
>>>      tree op0, op1, op2, op3;
>>>      tree org_x = x, r = NULL_TREE;
>>> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>>>      if (fold_cache == NULL)
>>>        fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>    -  if (tree *cached = fold_cache->get (x))
>>> -    return *cached;
>>> +  bool cache_p = (flags == ff_none);
>>> +
>>> +  if (cache_p)
>>> +    if (tree *cached = fold_cache->get (x))
>>> +      return *cached;
>>>        uid_sensitive_constexpr_evaluation_checker c;
>>>    @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>>>    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>>>    	     folding of the operand should be in the caches and if in
>>> cp_fold_r
>>>    	     it will modify it in place.  */
>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>    	  if (op0 == error_mark_node)
>>>    	    x = error_mark_node;
>>>    	  break;
>>> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>>>    	{
>>>    	  tree p = maybe_undo_parenthesized_ref (x);
>>>    	  if (p != x)
>>> -	    return cp_fold (p);
>>> +	    return cp_fold (p, flags);
>>>    	}
>>>          goto unary;
>>>    @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>>>        case COND_EXPR:
>>>          loc = EXPR_LOCATION (x);
>>>          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>    	{
>>> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>>>    	      {
>>>    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>    		  r = build_nop (TREE_TYPE (x), r);
>>> -		x = cp_fold (r);
>>> +		x = cp_fold (r, flags);
>>>    		break;
>>>    	      }
>>>    	  }
>>> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>>>    	int m = call_expr_nargs (x);
>>>    	for (int i = 0; i < m; i++)
>>>    	  {
>>> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
>>> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>>>    	    if (r != CALL_EXPR_ARG (x, i))
>>>    	      {
>>>    		if (r == error_mark_node)
>>> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>>>      	if (TREE_CODE (r) != CALL_EXPR)
>>>    	  {
>>> -	    x = cp_fold (r);
>>> +	    x = cp_fold (r, flags);
>>>    	    break;
>>>    	  }
>>>    @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>>>    	   constant, but the call followed by an INDIRECT_REF is.  */
>>>    	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>>>    	    && !flag_no_inline)
>>> -	  r = maybe_constant_value (x);
>>> +	  {
>>> +	    mce_value manifestly_const_eval = mce_unknown;
>>> +	    if (flags & ff_genericize)
>>> +	      /* At genericization time it's safe to fold
>>> +		 __builtin_is_constant_evaluated to false.  */
>>> +	      manifestly_const_eval = mce_false;
>>> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
>>> +				      manifestly_const_eval);
>>> +	  }
>>>    	optimize = sv;
>>>              if (TREE_CODE (r) != CALL_EXPR)
>>> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>>>    	vec<constructor_elt, va_gc> *nelts = NULL;
>>>    	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>>>    	  {
>>> -	    tree op = cp_fold (p->value);
>>> +	    tree op = cp_fold (p->value, flags);
>>>    	    if (op != p->value)
>>>    	      {
>>>    		if (op == error_mark_node)
>>> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>>>      	for (int i = 0; i < n; i++)
>>>    	  {
>>> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
>>> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>>>    	    if (op != TREE_VEC_ELT (x, i))
>>>    	      {
>>>    		if (!changed)
>>> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>>>        case ARRAY_RANGE_REF:
>>>            loc = EXPR_LOCATION (x);
>>> -      op0 = cp_fold (TREE_OPERAND (x, 0));
>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>> -      op3 = cp_fold (TREE_OPERAND (x, 3));
>>> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>>>            if (op0 != TREE_OPERAND (x, 0)
>>>    	  || op1 != TREE_OPERAND (x, 1)
>>> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>>>          /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>>>    	 folding, evaluates to an invariant.  In that case no need to wrap
>>>    	 this folded tree with a SAVE_EXPR.  */
>>> -      r = cp_fold (TREE_OPERAND (x, 0));
>>> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>>>          if (tree_invariant_p (r))
>>>    	x = r;
>>>          break;
>>> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>>>          copy_warning (x, org_x);
>>>        }
>>>    -  if (!c.evaluation_restricted_p ())
>>> +  if (cache_p && !c.evaluation_restricted_p ())
>>>        {
>>>          fold_cache->put (org_x, x);
>>>          /* Prevent that we try to fold an already folded result again.  */
>>> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
>>> b/gcc/testsuite/g++.dg/opt/pr108243.C
>>> new file mode 100644
>>> index 00000000000..4c45dbba13c
>>> --- /dev/null
>>> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
>>> @@ -0,0 +1,29 @@
>>> +// PR c++/108243
>>> +// { dg-do compile { target c++11 } }
>>> +// { dg-additional-options "-O -fdump-tree-original" }
>>> +
>>> +constexpr int foo() {
>>> +  return __builtin_is_constant_evaluated() + 1;
>>> +}
>>> +
>>> +#if __cpp_if_consteval
>>> +constexpr int bar() {
>>> +  if consteval {
>>> +    return 5;
>>> +  } else {
>>> +    return 4;
>>> +  }
>>> +}
>>> +#endif
>>> +
>>> +int p, q;
>>> +
>>> +int main() {
>>> +  p = foo();
>>> +#if __cpp_if_consteval
>>> +  q = bar();
>>> +#endif
>>> +}
>>> +
>>> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
>>> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
>>
>> Let's also test a static initializer that can't be fully constant-evaluated.
> 
> D'oh, doing so revealed that cp_fold_function doesn't reach static
> initializers; that's taken care of by cp_fully_fold_init.  So it seems
> we need to make cp_fold when called from the latter entry point to also
> assume m_c_e is false.  We can't re-use ff_genericize here because that
> flag has additional effects in cp_fold_r, so it seems we need another
> flag that that only affects the manifestly constant-eval stuff; I called
> it ff_mce_false.  How does the following look?
> 
> -- >8 --
> 
> Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
>   [PR108243]
> 
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function and cp_fully_fold_init,
> since at these points we're sure we're done with manifestly constant
> evaluation.  To that end we add a flags parameter to cp_fold that
> controls whether we pass mce_false or mce_unknown to maybe_constant_value
> when folding a CALL_EXPR.
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: If ff_genericize is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
>   .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>   3 files changed, 104 insertions(+), 30 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..590ed787997 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>   #include "omp-general.h"
>   #include "opts.h"
>   
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding late enough that we could assume
> +     we're definitely not in a manifestly constant-evaluated
> +     context.  */

It's not necessarily a matter of late enough; we could fold sooner and 
still know that, as in cp_fully_fold_init.  We could do the same at 
other full-expression points, but we don't because we want to delay 
folding as much as possible.  So let's say "folding at a point where we 
know we're..."

> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>   /* Forward declarations.  */
>   
>   static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>   struct cp_fold_data
>   {
>     hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>   };
>   
>   static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>   
>     if (data->pset.add (stmt))
>       {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	 here rather than in cp_genericize to avoid problems with the invisible
>   	 reference transition.  */
>       case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_init_expr (stmt_p);
>         break;
>   
>       case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_target_expr (stmt_p);
>   
>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   void
>   cp_fold_function (tree fndecl)
>   {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize | ff_mce_false);

Here would be a good place for a comment about passing mce_false because 
all manifestly-constant-evaluated expressions will have been 
constant-evaluated already if possible.

>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>   }
>   
> @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>   {
>     while (true)
>       {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>         if (rval)
>   	x = mark_rvalue_use (x);
>         if (rval && DECL_P (x)
> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>     if (processing_template_decl)
>       return x;
>     x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>     return x;
>   }
> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>       Function returns X or its folded variant.  */
>   
>   static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>   {
>     tree op0, op1, op2, op3;
>     tree org_x = x, r = NULL_TREE;
> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>     if (fold_cache == NULL)
>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>   
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>   
>     uid_sensitive_constexpr_evaluation_checker c;
>   
> @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>   	     folding of the operand should be in the caches and if in cp_fold_r
>   	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>   	  if (op0 == error_mark_node)
>   	    x = error_mark_node;
>   	  break;
> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>   	{
>   	  tree p = maybe_undo_parenthesized_ref (x);
>   	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>   	}
>         goto unary;
>   
> @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>       case COND_EXPR:
>         loc = EXPR_LOCATION (x);
>         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>   
>         if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>   	{
> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>   	      {
>   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>   		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>   		break;
>   	      }
>   	  }
> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>   	  {
>   	    switch (DECL_FE_FUNCTION_CODE (callee))
>   	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>   	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we can assume this isn't a manifestly constant-evaluated

s/can assume/know/

OK with those comment changes.

> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>   		break;
>   	      case CP_BUILT_IN_SOURCE_LOCATION:
>   		x = fold_builtin_source_location (x);
> @@ -2924,7 +2944,7 @@ cp_fold (tree x)
>   	int m = call_expr_nargs (x);
>   	for (int i = 0; i < m; i++)
>   	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>   	    if (r != CALL_EXPR_ARG (x, i))
>   	      {
>   		if (r == error_mark_node)
> @@ -2947,7 +2967,7 @@ cp_fold (tree x)
>   
>   	if (TREE_CODE (r) != CALL_EXPR)
>   	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>   	    break;
>   	  }
>   
> @@ -2960,7 +2980,15 @@ cp_fold (tree x)
>   	   constant, but the call followed by an INDIRECT_REF is.  */
>   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>   	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3015,7 @@ cp_fold (tree x)
>   	vec<constructor_elt, va_gc> *nelts = NULL;
>   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>   	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>   	    if (op != p->value)
>   	      {
>   		if (op == error_mark_node)
> @@ -3018,7 +3046,7 @@ cp_fold (tree x)
>   
>   	for (int i = 0; i < n; i++)
>   	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>   	    if (op != TREE_VEC_ELT (x, i))
>   	      {
>   		if (!changed)
> @@ -3035,10 +3063,10 @@ cp_fold (tree x)
>       case ARRAY_RANGE_REF:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>   
>         if (op0 != TREE_OPERAND (x, 0)
>   	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3094,7 @@ cp_fold (tree x)
>         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>   	 folding, evaluates to an invariant.  In that case no need to wrap
>   	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>         if (tree_invariant_p (r))
>   	x = r;
>         break;
> @@ -3085,7 +3113,7 @@ cp_fold (tree x)
>         copy_warning (x, org_x);
>       }
>   
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>       {
>         fold_cache->put (org_x, x);
>         /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..ee05cbab785
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,14 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +void f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  
Patrick Palka Feb. 9, 2023, 5:36 p.m. UTC | #6
On Sun, 5 Feb 2023, Jason Merrill wrote:

> On 2/3/23 15:51, Patrick Palka wrote:
> > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > 
> > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > since we don't want to prematurely fold the builtin to false if the
> > > > expression in question would be later manifestly constant evaluated (in
> > > > which case it must be folded to true).
> > > > 
> > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > to get folded as false during cp_fold_function, since at that point
> > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > CALL_EXPR case passes to maybe_constant_value.
> > > > 
> > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> > > > trunk?
> > > > 
> > > > 	PR c++/108243
> > > > 
> > > > gcc/cp/ChangeLog:
> > > > 
> > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > 	(cp_fold_data::fold_flags): ... this.
> > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > 	(cp_fold_function): Likewise.
> > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > 	(cp_fully_fold_init): Likewise.
> > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > 	isn't empty.
> > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > 	if if ff_genericize is set.
> > > > 
> > > > gcc/testsuite/ChangeLog:
> > > > 
> > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > ---
> > > >    gcc/cp/cp-gimplify.cc               | 76
> > > > ++++++++++++++++++-----------
> > > >    gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > >    2 files changed, 76 insertions(+), 29 deletions(-)
> > > >    create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > 
> > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > index a35cedd05cc..d023a63768f 100644
> > > > --- a/gcc/cp/cp-gimplify.cc
> > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
> > > >    #include "omp-general.h"
> > > >    #include "opts.h"
> > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > +
> > > > +enum fold_flags {
> > > > +  ff_none = 0,
> > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > +  ff_genericize = 1 << 0,
> > > > +};
> > > > +
> > > >    /* Forward declarations.  */
> > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > >    static tree cp_fold_r (tree *, int *, void *);
> > > >    static void cp_genericize_tree (tree*, bool);
> > > > -static tree cp_fold (tree);
> > > > +static tree cp_fold (tree, fold_flags);
> > > >      /* Genericize a TRY_BLOCK.  */
> > > >    @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > >    struct cp_fold_data
> > > >    {
> > > >      hash_set<tree> pset;
> > > > -  bool genericize; // called from cp_fold_function?
> > > > -
> > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > +  fold_flags flags;
> > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > >    };
> > > >      static tree
> > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >          break;
> > > >        }
> > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > >        if (data->pset.add (stmt))
> > > >        {
> > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > void
> > > > *data_)
> > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > invisible
> > > >    	 reference transition.  */
> > > >        case INIT_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_init_expr (stmt_p);
> > > >          break;
> > > >          case TARGET_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_target_expr (stmt_p);
> > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > in
> > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >    void
> > > >    cp_fold_function (tree fndecl)
> > > >    {
> > > > -  cp_fold_data data (/*genericize*/true);
> > > > +  cp_fold_data data (ff_genericize);
> > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > >    }
> > > >    @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > >    {
> > > >      while (true)
> > > >        {
> > > > -      x = cp_fold (x);
> > > > +      x = cp_fold (x, ff_none);
> > > >          if (rval)
> > > >    	x = mark_rvalue_use (x);
> > > >          if (rval && DECL_P (x)
> > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > >      if (processing_template_decl)
> > > >        return x;
> > > >      x = cp_fully_fold (x);
> > > > -  cp_fold_data data (/*genericize*/false);
> > > > +  cp_fold_data data (ff_none);
> > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > >      return x;
> > > >    }
> > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > >        Function returns X or its folded variant.  */
> > > >      static tree
> > > > -cp_fold (tree x)
> > > > +cp_fold (tree x, fold_flags flags)
> > > >    {
> > > >      tree op0, op1, op2, op3;
> > > >      tree org_x = x, r = NULL_TREE;
> > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > >      if (fold_cache == NULL)
> > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > >    -  if (tree *cached = fold_cache->get (x))
> > > > -    return *cached;
> > > > +  bool cache_p = (flags == ff_none);
> > > > +
> > > > +  if (cache_p)
> > > > +    if (tree *cached = fold_cache->get (x))
> > > > +      return *cached;
> > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > >    @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > the
> > > >    	     folding of the operand should be in the caches and if in
> > > > cp_fold_r
> > > >    	     it will modify it in place.  */
> > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > >    	  if (op0 == error_mark_node)
> > > >    	    x = error_mark_node;
> > > >    	  break;
> > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > >    	{
> > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > >    	  if (p != x)
> > > > -	    return cp_fold (p);
> > > > +	    return cp_fold (p, flags);
> > > >    	}
> > > >          goto unary;
> > > >    @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > >        case COND_EXPR:
> > > >          loc = EXPR_LOCATION (x);
> > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > >    	{
> > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > >    	      {
> > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > -		x = cp_fold (r);
> > > > +		x = cp_fold (r, flags);
> > > >    		break;
> > > >    	      }
> > > >    	  }
> > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > >    	int m = call_expr_nargs (x);
> > > >    	for (int i = 0; i < m; i++)
> > > >    	  {
> > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > >    	    if (r != CALL_EXPR_ARG (x, i))
> > > >    	      {
> > > >    		if (r == error_mark_node)
> > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > >      	if (TREE_CODE (r) != CALL_EXPR)
> > > >    	  {
> > > > -	    x = cp_fold (r);
> > > > +	    x = cp_fold (r, flags);
> > > >    	    break;
> > > >    	  }
> > > >    @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > >    	   constant, but the call followed by an INDIRECT_REF is.  */
> > > >    	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > >    	    && !flag_no_inline)
> > > > -	  r = maybe_constant_value (x);
> > > > +	  {
> > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > +	    if (flags & ff_genericize)
> > > > +	      /* At genericization time it's safe to fold
> > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > +	      manifestly_const_eval = mce_false;
> > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > +				      manifestly_const_eval);
> > > > +	  }
> > > >    	optimize = sv;
> > > >              if (TREE_CODE (r) != CALL_EXPR)
> > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > >    	vec<constructor_elt, va_gc> *nelts = NULL;
> > > >    	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > >    	  {
> > > > -	    tree op = cp_fold (p->value);
> > > > +	    tree op = cp_fold (p->value, flags);
> > > >    	    if (op != p->value)
> > > >    	      {
> > > >    		if (op == error_mark_node)
> > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > >      	for (int i = 0; i < n; i++)
> > > >    	  {
> > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > >    	    if (op != TREE_VEC_ELT (x, i))
> > > >    	      {
> > > >    		if (!changed)
> > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > >        case ARRAY_RANGE_REF:
> > > >            loc = EXPR_LOCATION (x);
> > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > >            if (op0 != TREE_OPERAND (x, 0)
> > > >    	  || op1 != TREE_OPERAND (x, 1)
> > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > >          /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which,
> > > > after
> > > >    	 folding, evaluates to an invariant.  In that case no need to
> > > > wrap
> > > >    	 this folded tree with a SAVE_EXPR.  */
> > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > >          if (tree_invariant_p (r))
> > > >    	x = r;
> > > >          break;
> > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > >          copy_warning (x, org_x);
> > > >        }
> > > >    -  if (!c.evaluation_restricted_p ())
> > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > >        {
> > > >          fold_cache->put (org_x, x);
> > > >          /* Prevent that we try to fold an already folded result again.
> > > > */
> > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > new file mode 100644
> > > > index 00000000000..4c45dbba13c
> > > > --- /dev/null
> > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > @@ -0,0 +1,29 @@
> > > > +// PR c++/108243
> > > > +// { dg-do compile { target c++11 } }
> > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > +
> > > > +constexpr int foo() {
> > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > +}
> > > > +
> > > > +#if __cpp_if_consteval
> > > > +constexpr int bar() {
> > > > +  if consteval {
> > > > +    return 5;
> > > > +  } else {
> > > > +    return 4;
> > > > +  }
> > > > +}
> > > > +#endif
> > > > +
> > > > +int p, q;
> > > > +
> > > > +int main() {
> > > > +  p = foo();
> > > > +#if __cpp_if_consteval
> > > > +  q = bar();
> > > > +#endif
> > > > +}
> > > > +
> > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > 
> > > Let's also test a static initializer that can't be fully
> > > constant-evaluated.
> > 
> > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > we need to make cp_fold when called from the latter entry point to also
> > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > flag has additional effects in cp_fold_r, so it seems we need another
> > flag that that only affects the manifestly constant-eval stuff; I called
> > it ff_mce_false.  How does the following look?
> > 
> > -- >8 --
> > 
> > Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
> >   [PR108243]
> > 
> > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > as an optimization barrier for our speculative constexpr evaluation,
> > since we don't want to prematurely fold the builtin to false if the
> > expression in question would be later manifestly constant evaluated (in
> > which case it must be folded to true).
> > 
> > This patch fixes this by permitting __builtin_is_constant_evaluated
> > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > since at these points we're sure we're done with manifestly constant
> > evaluation.  To that end we add a flags parameter to cp_fold that
> > controls whether we pass mce_false or mce_unknown to maybe_constant_value
> > when folding a CALL_EXPR.
> > 
> > 	PR c++/108243
> > 	PR c++/97553
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* cp-gimplify.cc (enum fold_flags): Define.
> > 	(cp_fold_data::genericize): Replace this data member with ...
> > 	(cp_fold_data::fold_flags): ... this.
> > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > 	(cp_fold_function): Likewise.
> > 	(cp_fold_maybe_rvalue): Likewise.
> > 	(cp_fully_fold_init): Likewise.
> > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > 	isn't empty.
> > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > 	maybe_constant_value.
> > 
> > gcc/testsuite/ChangeLog:
> > 
> > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > ---
> >   gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
> >   .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> >   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> >   3 files changed, 104 insertions(+), 30 deletions(-)
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index 9929d29981a..590ed787997 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> >   #include "omp-general.h"
> >   #include "opts.h"
> >   +/* Flags for cp_fold and cp_fold_r.  */
> > +
> > +enum fold_flags {
> > +  ff_none = 0,
> > +  /* Whether we're being called from cp_fold_function.  */
> > +  ff_genericize = 1 << 0,
> > +  /* Whether we're folding late enough that we could assume
> > +     we're definitely not in a manifestly constant-evaluated
> > +     context.  */
> 
> It's not necessarily a matter of late enough; we could fold sooner and still
> know that, as in cp_fully_fold_init.  We could do the same at other
> full-expression points, but we don't because we want to delay folding as much
> as possible.  So let's say "folding at a point where we know we're..."
> 
> > +  ff_mce_false = 1 << 1,
> > +};
> > +
> > +using fold_flags_t = int;
> > +
> >   /* Forward declarations.  */
> >     static tree cp_genericize_r (tree *, int *, void *);
> >   static tree cp_fold_r (tree *, int *, void *);
> >   static void cp_genericize_tree (tree*, bool);
> > -static tree cp_fold (tree);
> > +static tree cp_fold (tree, fold_flags_t);
> >     /* Genericize a TRY_BLOCK.  */
> >   @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> >   struct cp_fold_data
> >   {
> >     hash_set<tree> pset;
> > -  bool genericize; // called from cp_fold_function?
> > -
> > -  cp_fold_data (bool g): genericize (g) {}
> > +  fold_flags_t flags;
> > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> >   };
> >     static tree
> > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >         break;
> >       }
> >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> >       if (data->pset.add (stmt))
> >       {
> > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	 here rather than in cp_genericize to avoid problems with the
> > invisible
> >   	 reference transition.  */
> >       case INIT_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_init_expr (stmt_p);
> >         break;
> >         case TARGET_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_target_expr (stmt_p);
> >           /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   void
> >   cp_fold_function (tree fndecl)
> >   {
> > -  cp_fold_data data (/*genericize*/true);
> > +  cp_fold_data data (ff_genericize | ff_mce_false);
> 
> Here would be a good place for a comment about passing mce_false because all
> manifestly-constant-evaluated expressions will have been constant-evaluated
> already if possible.
> 
> >     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> >   }
> >   @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> >   {
> >     while (true)
> >       {
> > -      x = cp_fold (x);
> > +      x = cp_fold (x, ff_none);
> >         if (rval)
> >   	x = mark_rvalue_use (x);
> >         if (rval && DECL_P (x)
> > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> >     if (processing_template_decl)
> >       return x;
> >     x = cp_fully_fold (x);
> > -  cp_fold_data data (/*genericize*/false);
> > +  cp_fold_data data (ff_mce_false);
> >     cp_walk_tree (&x, cp_fold_r, &data, NULL);
> >     return x;
> >   }
> > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> >       Function returns X or its folded variant.  */
> >     static tree
> > -cp_fold (tree x)
> > +cp_fold (tree x, fold_flags_t flags)
> >   {
> >     tree op0, op1, op2, op3;
> >     tree org_x = x, r = NULL_TREE;
> > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> >     if (fold_cache == NULL)
> >       fold_cache = hash_map<tree, tree>::create_ggc (101);
> >   -  if (tree *cached = fold_cache->get (x))
> > -    return *cached;
> > +  bool cache_p = (flags == ff_none);
> > +
> > +  if (cache_p)
> > +    if (tree *cached = fold_cache->get (x))
> > +      return *cached;
> >       uid_sensitive_constexpr_evaluation_checker c;
> >   @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> >   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
> >   	     folding of the operand should be in the caches and if in
> > cp_fold_r
> >   	     it will modify it in place.  */
> > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> >   	  if (op0 == error_mark_node)
> >   	    x = error_mark_node;
> >   	  break;
> > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> >   	{
> >   	  tree p = maybe_undo_parenthesized_ref (x);
> >   	  if (p != x)
> > -	    return cp_fold (p);
> > +	    return cp_fold (p, flags);
> >   	}
> >         goto unary;
> >   @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> >       case COND_EXPR:
> >         loc = EXPR_LOCATION (x);
> >         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> >           if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> >   	{
> > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> >   	      {
> >   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> >   		  r = build_nop (TREE_TYPE (x), r);
> > -		x = cp_fold (r);
> > +		x = cp_fold (r, flags);
> >   		break;
> >   	      }
> >   	  }
> > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> >   	  {
> >   	    switch (DECL_FE_FUNCTION_CODE (callee))
> >   	      {
> > -		/* Defer folding __builtin_is_constant_evaluated.  */
> >   	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > +		/* Defer folding __builtin_is_constant_evaluated unless
> > +		   we can assume this isn't a manifestly constant-evaluated
> 
> s/can assume/know/
> 
> OK with those comment changes.

Thanks a lot.  Unfortunately I think the patch has a significant problem
that only just occurred to me -- disabling the cp_fold cache when the
flag ff_mce_false is set effectively makes cp_fold_function and
cp_fully_fold_init quadratic in the size of the expression (since
cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
disabled will end up fully walking each subtree).  Note that the reason
we must disable the cache is because cp_fold with ff_mce_false might
give a different folded result than without that flag if the expression
contains a suitable CALL_EXPR subexpression.

One approach to fix this complexity issue would be to parameterize the
cache according to the flags that were passed to cp_fold, which would
allow us to keep the cache enabled when ff_mce_false is set.  A downside
to this approach is that the size of the cp_fold cache would essentially
double since for each tree we'd now have two cache entries, one for
flags=ff_none and another for flags=ff_mce_false.

Another approach would be to split out the trial constexpr evaluation
part of cp_fold's CALL_EXPR handling, parameterize that, and call it
directly from cp_fold_r.  With this approach we wouldn't perform as much
folding, e.g.

  int n = 41 + !std::is_constant_evaluated();

would get folded to 1 + 41 rather than 42.  But I suspect this would
give us 95% of the reapable benefits of the above approach.

I think I'm leaning towards this second approach, which the below patch
implements instead.  What do you think?  Bootstrapped and regtested on
x86_64-pc-linux-gnu.

-- >8 --

Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

This PR illustrates that __builtin_is_constant_evaluated currently acts
as an optimization barrier for our speculative constexpr evaluation,
since we don't want to prematurely fold the builtin to false before the
expression in question undergoes constant evaluation as in a manifestly
constant-evaluated context (in which case the builtin must instead be
folded to true).

This patch fixes this by permitting __builtin_is_constant_evaluated
to get folded to false from cp_fold_r, where we know we're done with
proper constant evaluation (of manifestly constant-evaluated contexts).

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc
	(cp_fold_r): Remove redundant *stmt_p assignments.  After
	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
	into ...
	(maybe_fold_constexpr_call): ... here.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
 .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
 .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
 3 files changed, 95 insertions(+), 12 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..dca55056b2c 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
 static tree cp_fold (tree);
+static tree maybe_fold_constexpr_call (tree, mce_value);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	    error_at (PTRMEM_CST_LOCATION (stmt),
 		      "taking address of an immediate function %qD",
 		      PTRMEM_CST_MEMBER (stmt));
-	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
+	  stmt = build_zero_cst (TREE_TYPE (stmt));
 	  break;
 	}
       break;
@@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	  error_at (EXPR_LOCATION (stmt),
 		    "taking address of an immediate function %qD",
 		    TREE_OPERAND (stmt, 0));
-	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
+	  stmt = build_zero_cst (TREE_TYPE (stmt));
 	  break;
 	}
       break;
@@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  stmt = cp_fold (stmt);
+
+  if (TREE_CODE (stmt) == CALL_EXPR)
+    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
+       and cp_fully_fold_init) only after the overall expression has been
+       considered for constant-evaluation, we can by now safely fold any
+       remaining __builtin_is_constant_evaluated calls to false, so try
+       constexpr expansion with mce_false.  */
+    stmt = maybe_fold_constexpr_call (stmt, mce_false);
+
+  *stmt_p = stmt;
 
   if (data->pset.add (stmt))
     {
@@ -2952,15 +2963,10 @@ cp_fold (tree x)
 	  }
 
 	optimize = nw;
-
-	/* Invoke maybe_constant_value for functions declared
-	   constexpr and not called with AGGR_INIT_EXPRs.
-	   TODO:
-	   Do constexpr expansion of expressions where the call itself is not
-	   constant, but the call followed by an INDIRECT_REF is.  */
-	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
-	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
+	   since we don't know if we're in a manifestly constant-evaluated
+	   context that hasn't yet been constant-evaluated.  */
+	r = maybe_fold_constexpr_call (x, mce_unknown);
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -3096,6 +3102,31 @@ cp_fold (tree x)
   return x;
 }
 
+/* If the CALL_EXPR X calls a constexpr function, try expanding it via
+   constexpr evaluation.  Returns the expanded result or X if constexpr
+   evaluation wasn't possible.
+
+   TODO: Do constexpr expansion of expressions where the call itself
+   is not constant, but the call followed by an INDIRECT_REF is.  */
+
+static tree
+maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
+{
+  if (flag_no_inline)
+    return x;
+  tree callee = get_callee_fndecl (x);
+  if (!callee)
+    return x;
+  if (DECL_DECLARED_CONSTEXPR_P (callee))
+    {
+      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				     manifestly_const_eval);
+      if (TREE_CODE (r) != CALL_EXPR)
+	return r;
+    }
+  return x;
+}
+
 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.  */
 
 tree
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..2123f20e3e5
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,20 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+constexpr int foo(int n) {
+  return n + !__builtin_is_constant_evaluated();
+}
+
+A* f(int n) {
+  static A a = {n, foo(41)};
+  return &a;
+}
+
+// { dg-final { scan-tree-dump "42" "original" } }
+// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  
Jason Merrill Feb. 9, 2023, 11:36 p.m. UTC | #7
On 2/9/23 09:36, Patrick Palka wrote:
> On Sun, 5 Feb 2023, Jason Merrill wrote:
> 
>> On 2/3/23 15:51, Patrick Palka wrote:
>>> On Mon, 30 Jan 2023, Jason Merrill wrote:
>>>
>>>> On 1/27/23 17:02, Patrick Palka wrote:
>>>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>>>> as an optimization barrier for our speculative constexpr evaluation,
>>>>> since we don't want to prematurely fold the builtin to false if the
>>>>> expression in question would be later manifestly constant evaluated (in
>>>>> which case it must be folded to true).
>>>>>
>>>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>>>> to get folded as false during cp_fold_function, since at that point
>>>>> we're sure we're doing manifestly constant evaluation.  To that end
>>>>> we add a flags parameter to cp_fold that controls what mce_value the
>>>>> CALL_EXPR case passes to maybe_constant_value.
>>>>>
>>>>> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
>>>>> trunk?
>>>>>
>>>>> 	PR c++/108243
>>>>>
>>>>> gcc/cp/ChangeLog:
>>>>>
>>>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>>>> 	(cp_fold_data::fold_flags): ... this.
>>>>> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
>>>>> 	(cp_fold_function): Likewise.
>>>>> 	(cp_fold_maybe_rvalue): Likewise.
>>>>> 	(cp_fully_fold_init): Likewise.
>>>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>>>> 	isn't empty.
>>>>> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
>>>>> 	if if ff_genericize is set.
>>>>>
>>>>> gcc/testsuite/ChangeLog:
>>>>>
>>>>> 	* g++.dg/opt/pr108243.C: New test.
>>>>> ---
>>>>>     gcc/cp/cp-gimplify.cc               | 76
>>>>> ++++++++++++++++++-----------
>>>>>     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>>>>>     2 files changed, 76 insertions(+), 29 deletions(-)
>>>>>     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>
>>>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>>>> index a35cedd05cc..d023a63768f 100644
>>>>> --- a/gcc/cp/cp-gimplify.cc
>>>>> +++ b/gcc/cp/cp-gimplify.cc
>>>>> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>>>>>     #include "omp-general.h"
>>>>>     #include "opts.h"
>>>>>     +/* Flags for cp_fold and cp_fold_r.  */
>>>>> +
>>>>> +enum fold_flags {
>>>>> +  ff_none = 0,
>>>>> +  /* Whether we're being called from cp_fold_function.  */
>>>>> +  ff_genericize = 1 << 0,
>>>>> +};
>>>>> +
>>>>>     /* Forward declarations.  */
>>>>>       static tree cp_genericize_r (tree *, int *, void *);
>>>>>     static tree cp_fold_r (tree *, int *, void *);
>>>>>     static void cp_genericize_tree (tree*, bool);
>>>>> -static tree cp_fold (tree);
>>>>> +static tree cp_fold (tree, fold_flags);
>>>>>       /* Genericize a TRY_BLOCK.  */
>>>>>     @@ -996,9 +1004,8 @@ struct cp_genericize_data
>>>>>     struct cp_fold_data
>>>>>     {
>>>>>       hash_set<tree> pset;
>>>>> -  bool genericize; // called from cp_fold_function?
>>>>> -
>>>>> -  cp_fold_data (bool g): genericize (g) {}
>>>>> +  fold_flags flags;
>>>>> +  cp_fold_data (fold_flags flags): flags (flags) {}
>>>>>     };
>>>>>       static tree
>>>>> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>> *data_)
>>>>>           break;
>>>>>         }
>>>>>     -  *stmt_p = stmt = cp_fold (*stmt_p);
>>>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>>>         if (data->pset.add (stmt))
>>>>>         {
>>>>> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>> void
>>>>> *data_)
>>>>>     	 here rather than in cp_genericize to avoid problems with the
>>>>> invisible
>>>>>     	 reference transition.  */
>>>>>         case INIT_EXPR:
>>>>> -      if (data->genericize)
>>>>> +      if (data->flags & ff_genericize)
>>>>>     	cp_genericize_init_expr (stmt_p);
>>>>>           break;
>>>>>           case TARGET_EXPR:
>>>>> -      if (data->genericize)
>>>>> +      if (data->flags & ff_genericize)
>>>>>     	cp_genericize_target_expr (stmt_p);
>>>>>             /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
>>>>> in
>>>>> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>> *data_)
>>>>>     void
>>>>>     cp_fold_function (tree fndecl)
>>>>>     {
>>>>> -  cp_fold_data data (/*genericize*/true);
>>>>> +  cp_fold_data data (ff_genericize);
>>>>>       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>>>     }
>>>>>     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>>>     {
>>>>>       while (true)
>>>>>         {
>>>>> -      x = cp_fold (x);
>>>>> +      x = cp_fold (x, ff_none);
>>>>>           if (rval)
>>>>>     	x = mark_rvalue_use (x);
>>>>>           if (rval && DECL_P (x)
>>>>> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>>>>>       if (processing_template_decl)
>>>>>         return x;
>>>>>       x = cp_fully_fold (x);
>>>>> -  cp_fold_data data (/*genericize*/false);
>>>>> +  cp_fold_data data (ff_none);
>>>>>       cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>>>       return x;
>>>>>     }
>>>>> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>>>>>         Function returns X or its folded variant.  */
>>>>>       static tree
>>>>> -cp_fold (tree x)
>>>>> +cp_fold (tree x, fold_flags flags)
>>>>>     {
>>>>>       tree op0, op1, op2, op3;
>>>>>       tree org_x = x, r = NULL_TREE;
>>>>> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>>>>>       if (fold_cache == NULL)
>>>>>         fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>>>     -  if (tree *cached = fold_cache->get (x))
>>>>> -    return *cached;
>>>>> +  bool cache_p = (flags == ff_none);
>>>>> +
>>>>> +  if (cache_p)
>>>>> +    if (tree *cached = fold_cache->get (x))
>>>>> +      return *cached;
>>>>>         uid_sensitive_constexpr_evaluation_checker c;
>>>>>     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>>>>>     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
>>>>> the
>>>>>     	     folding of the operand should be in the caches and if in
>>>>> cp_fold_r
>>>>>     	     it will modify it in place.  */
>>>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>     	  if (op0 == error_mark_node)
>>>>>     	    x = error_mark_node;
>>>>>     	  break;
>>>>> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>>>>>     	{
>>>>>     	  tree p = maybe_undo_parenthesized_ref (x);
>>>>>     	  if (p != x)
>>>>> -	    return cp_fold (p);
>>>>> +	    return cp_fold (p, flags);
>>>>>     	}
>>>>>           goto unary;
>>>>>     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>>>>>         case COND_EXPR:
>>>>>           loc = EXPR_LOCATION (x);
>>>>>           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>>>     	{
>>>>> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>>>>>     	      {
>>>>>     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>>>     		  r = build_nop (TREE_TYPE (x), r);
>>>>> -		x = cp_fold (r);
>>>>> +		x = cp_fold (r, flags);
>>>>>     		break;
>>>>>     	      }
>>>>>     	  }
>>>>> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>>>>>     	int m = call_expr_nargs (x);
>>>>>     	for (int i = 0; i < m; i++)
>>>>>     	  {
>>>>> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
>>>>> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>>>>>     	    if (r != CALL_EXPR_ARG (x, i))
>>>>>     	      {
>>>>>     		if (r == error_mark_node)
>>>>> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>>>>>       	if (TREE_CODE (r) != CALL_EXPR)
>>>>>     	  {
>>>>> -	    x = cp_fold (r);
>>>>> +	    x = cp_fold (r, flags);
>>>>>     	    break;
>>>>>     	  }
>>>>>     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>>>>>     	   constant, but the call followed by an INDIRECT_REF is.  */
>>>>>     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>>>>>     	    && !flag_no_inline)
>>>>> -	  r = maybe_constant_value (x);
>>>>> +	  {
>>>>> +	    mce_value manifestly_const_eval = mce_unknown;
>>>>> +	    if (flags & ff_genericize)
>>>>> +	      /* At genericization time it's safe to fold
>>>>> +		 __builtin_is_constant_evaluated to false.  */
>>>>> +	      manifestly_const_eval = mce_false;
>>>>> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
>>>>> +				      manifestly_const_eval);
>>>>> +	  }
>>>>>     	optimize = sv;
>>>>>               if (TREE_CODE (r) != CALL_EXPR)
>>>>> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>>>>>     	vec<constructor_elt, va_gc> *nelts = NULL;
>>>>>     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>>>>>     	  {
>>>>> -	    tree op = cp_fold (p->value);
>>>>> +	    tree op = cp_fold (p->value, flags);
>>>>>     	    if (op != p->value)
>>>>>     	      {
>>>>>     		if (op == error_mark_node)
>>>>> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>>>>>       	for (int i = 0; i < n; i++)
>>>>>     	  {
>>>>> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
>>>>> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>>>>>     	    if (op != TREE_VEC_ELT (x, i))
>>>>>     	      {
>>>>>     		if (!changed)
>>>>> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>>>>>         case ARRAY_RANGE_REF:
>>>>>             loc = EXPR_LOCATION (x);
>>>>> -      op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>> -      op3 = cp_fold (TREE_OPERAND (x, 3));
>>>>> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>>>>>             if (op0 != TREE_OPERAND (x, 0)
>>>>>     	  || op1 != TREE_OPERAND (x, 1)
>>>>> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>>>>>           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which,
>>>>> after
>>>>>     	 folding, evaluates to an invariant.  In that case no need to
>>>>> wrap
>>>>>     	 this folded tree with a SAVE_EXPR.  */
>>>>> -      r = cp_fold (TREE_OPERAND (x, 0));
>>>>> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>           if (tree_invariant_p (r))
>>>>>     	x = r;
>>>>>           break;
>>>>> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>>>>>           copy_warning (x, org_x);
>>>>>         }
>>>>>     -  if (!c.evaluation_restricted_p ())
>>>>> +  if (cache_p && !c.evaluation_restricted_p ())
>>>>>         {
>>>>>           fold_cache->put (org_x, x);
>>>>>           /* Prevent that we try to fold an already folded result again.
>>>>> */
>>>>> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>> b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>> new file mode 100644
>>>>> index 00000000000..4c45dbba13c
>>>>> --- /dev/null
>>>>> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>> @@ -0,0 +1,29 @@
>>>>> +// PR c++/108243
>>>>> +// { dg-do compile { target c++11 } }
>>>>> +// { dg-additional-options "-O -fdump-tree-original" }
>>>>> +
>>>>> +constexpr int foo() {
>>>>> +  return __builtin_is_constant_evaluated() + 1;
>>>>> +}
>>>>> +
>>>>> +#if __cpp_if_consteval
>>>>> +constexpr int bar() {
>>>>> +  if consteval {
>>>>> +    return 5;
>>>>> +  } else {
>>>>> +    return 4;
>>>>> +  }
>>>>> +}
>>>>> +#endif
>>>>> +
>>>>> +int p, q;
>>>>> +
>>>>> +int main() {
>>>>> +  p = foo();
>>>>> +#if __cpp_if_consteval
>>>>> +  q = bar();
>>>>> +#endif
>>>>> +}
>>>>> +
>>>>> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
>>>>> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
>>>>
>>>> Let's also test a static initializer that can't be fully
>>>> constant-evaluated.
>>>
>>> D'oh, doing so revealed that cp_fold_function doesn't reach static
>>> initializers; that's taken care of by cp_fully_fold_init.  So it seems
>>> we need to make cp_fold when called from the latter entry point to also
>>> assume m_c_e is false.  We can't re-use ff_genericize here because that
>>> flag has additional effects in cp_fold_r, so it seems we need another
>>> flag that that only affects the manifestly constant-eval stuff; I called
>>> it ff_mce_false.  How does the following look?
>>>
>>> -- >8 --
>>>
>>> Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
>>>    [PR108243]
>>>
>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>> as an optimization barrier for our speculative constexpr evaluation,
>>> since we don't want to prematurely fold the builtin to false if the
>>> expression in question would be later manifestly constant evaluated (in
>>> which case it must be folded to true).
>>>
>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>> to get folded as false during cp_fold_function and cp_fully_fold_init,
>>> since at these points we're sure we're done with manifestly constant
>>> evaluation.  To that end we add a flags parameter to cp_fold that
>>> controls whether we pass mce_false or mce_unknown to maybe_constant_value
>>> when folding a CALL_EXPR.
>>>
>>> 	PR c++/108243
>>> 	PR c++/97553
>>>
>>> gcc/cp/ChangeLog:
>>>
>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>> 	(cp_fold_data::fold_flags): ... this.
>>> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
>>> 	(cp_fold_function): Likewise.
>>> 	(cp_fold_maybe_rvalue): Likewise.
>>> 	(cp_fully_fold_init): Likewise.
>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>> 	isn't empty.
>>> 	<case CALL_EXPR>: If ff_genericize is set, fold
>>> 	__builtin_is_constant_evaluated to false and pass mce_false to
>>> 	maybe_constant_value.
>>>
>>> gcc/testsuite/ChangeLog:
>>>
>>> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
>>> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
>>> ---
>>>    gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
>>>    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>>>    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>>>    3 files changed, 104 insertions(+), 30 deletions(-)
>>>    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>>>    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
>>>
>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>> index 9929d29981a..590ed787997 100644
>>> --- a/gcc/cp/cp-gimplify.cc
>>> +++ b/gcc/cp/cp-gimplify.cc
>>> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>>>    #include "omp-general.h"
>>>    #include "opts.h"
>>>    +/* Flags for cp_fold and cp_fold_r.  */
>>> +
>>> +enum fold_flags {
>>> +  ff_none = 0,
>>> +  /* Whether we're being called from cp_fold_function.  */
>>> +  ff_genericize = 1 << 0,
>>> +  /* Whether we're folding late enough that we could assume
>>> +     we're definitely not in a manifestly constant-evaluated
>>> +     context.  */
>>
>> It's not necessarily a matter of late enough; we could fold sooner and still
>> know that, as in cp_fully_fold_init.  We could do the same at other
>> full-expression points, but we don't because we want to delay folding as much
>> as possible.  So let's say "folding at a point where we know we're..."
>>
>>> +  ff_mce_false = 1 << 1,
>>> +};
>>> +
>>> +using fold_flags_t = int;
>>> +
>>>    /* Forward declarations.  */
>>>      static tree cp_genericize_r (tree *, int *, void *);
>>>    static tree cp_fold_r (tree *, int *, void *);
>>>    static void cp_genericize_tree (tree*, bool);
>>> -static tree cp_fold (tree);
>>> +static tree cp_fold (tree, fold_flags_t);
>>>      /* Genericize a TRY_BLOCK.  */
>>>    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>>>    struct cp_fold_data
>>>    {
>>>      hash_set<tree> pset;
>>> -  bool genericize; // called from cp_fold_function?
>>> -
>>> -  cp_fold_data (bool g): genericize (g) {}
>>> +  fold_flags_t flags;
>>> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>>>    };
>>>      static tree
>>> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>          break;
>>>        }
>>>    -  *stmt_p = stmt = cp_fold (*stmt_p);
>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>        if (data->pset.add (stmt))
>>>        {
>>> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    	 here rather than in cp_genericize to avoid problems with the
>>> invisible
>>>    	 reference transition.  */
>>>        case INIT_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_init_expr (stmt_p);
>>>          break;
>>>          case TARGET_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_target_expr (stmt_p);
>>>            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
>>> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    void
>>>    cp_fold_function (tree fndecl)
>>>    {
>>> -  cp_fold_data data (/*genericize*/true);
>>> +  cp_fold_data data (ff_genericize | ff_mce_false);
>>
>> Here would be a good place for a comment about passing mce_false because all
>> manifestly-constant-evaluated expressions will have been constant-evaluated
>> already if possible.
>>
>>>      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>    }
>>>    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>    {
>>>      while (true)
>>>        {
>>> -      x = cp_fold (x);
>>> +      x = cp_fold (x, ff_none);
>>>          if (rval)
>>>    	x = mark_rvalue_use (x);
>>>          if (rval && DECL_P (x)
>>> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>>>      if (processing_template_decl)
>>>        return x;
>>>      x = cp_fully_fold (x);
>>> -  cp_fold_data data (/*genericize*/false);
>>> +  cp_fold_data data (ff_mce_false);
>>>      cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>      return x;
>>>    }
>>> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>>>        Function returns X or its folded variant.  */
>>>      static tree
>>> -cp_fold (tree x)
>>> +cp_fold (tree x, fold_flags_t flags)
>>>    {
>>>      tree op0, op1, op2, op3;
>>>      tree org_x = x, r = NULL_TREE;
>>> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>>>      if (fold_cache == NULL)
>>>        fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>    -  if (tree *cached = fold_cache->get (x))
>>> -    return *cached;
>>> +  bool cache_p = (flags == ff_none);
>>> +
>>> +  if (cache_p)
>>> +    if (tree *cached = fold_cache->get (x))
>>> +      return *cached;
>>>        uid_sensitive_constexpr_evaluation_checker c;
>>>    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>>>    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>>>    	     folding of the operand should be in the caches and if in
>>> cp_fold_r
>>>    	     it will modify it in place.  */
>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>    	  if (op0 == error_mark_node)
>>>    	    x = error_mark_node;
>>>    	  break;
>>> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>>>    	{
>>>    	  tree p = maybe_undo_parenthesized_ref (x);
>>>    	  if (p != x)
>>> -	    return cp_fold (p);
>>> +	    return cp_fold (p, flags);
>>>    	}
>>>          goto unary;
>>>    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>>>        case COND_EXPR:
>>>          loc = EXPR_LOCATION (x);
>>>          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>    	{
>>> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>>>    	      {
>>>    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>    		  r = build_nop (TREE_TYPE (x), r);
>>> -		x = cp_fold (r);
>>> +		x = cp_fold (r, flags);
>>>    		break;
>>>    	      }
>>>    	  }
>>> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>>>    	  {
>>>    	    switch (DECL_FE_FUNCTION_CODE (callee))
>>>    	      {
>>> -		/* Defer folding __builtin_is_constant_evaluated.  */
>>>    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
>>> +		/* Defer folding __builtin_is_constant_evaluated unless
>>> +		   we can assume this isn't a manifestly constant-evaluated
>>
>> s/can assume/know/
>>
>> OK with those comment changes.
> 
> Thanks a lot.  Unfortunately I think the patch has a significant problem
> that only just occurred to me -- disabling the cp_fold cache when the
> flag ff_mce_false is set effectively makes cp_fold_function and
> cp_fully_fold_init quadratic in the size of the expression (since
> cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> disabled will end up fully walking each subtree).  Note that the reason
> we must disable the cache is because cp_fold with ff_mce_false might
> give a different folded result than without that flag if the expression
> contains a suitable CALL_EXPR subexpression.

Good point.

> One approach to fix this complexity issue would be to parameterize the
> cache according to the flags that were passed to cp_fold, which would
> allow us to keep the cache enabled when ff_mce_false is set.  A downside
> to this approach is that the size of the cp_fold cache would essentially
> double since for each tree we'd now have two cache entries, one for
> flags=ff_none and another for flags=ff_mce_false.

We could also clear the cache before cp_fold_function since the two 
folds shouldn't overlap (much).

> Another approach would be to split out the trial constexpr evaluation
> part of cp_fold's CALL_EXPR handling, parameterize that, and call it
> directly from cp_fold_r.  With this approach we wouldn't perform as much
> folding, e.g.
> 
>    int n = 41 + !std::is_constant_evaluated();
> 
> would get folded to 1 + 41 rather than 42.  But I suspect this would
> give us 95% of the reapable benefits of the above approach.
> 
> I think I'm leaning towards this second approach, which the below patch
> implements instead.  What do you think?  Bootstrapped and regtested on
> x86_64-pc-linux-gnu.

That sounds reasonable, but...

> -- >8 --
> 
> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>   [PR108243]
> 
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false before the
> expression in question undergoes constant evaluation as in a manifestly
> constant-evaluated context (in which case the builtin must instead be
> folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded to false from cp_fold_r, where we know we're done with
> proper constant evaluation (of manifestly constant-evaluated contexts).
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc
> 	(cp_fold_r): Remove redundant *stmt_p assignments.  After
> 	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
> 	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
> 	into ...
> 	(maybe_fold_constexpr_call): ... here.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
>   .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
>   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
>   3 files changed, 95 insertions(+), 12 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..dca55056b2c 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
>   static tree cp_fold (tree);
> +static tree maybe_fold_constexpr_call (tree, mce_value);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	    error_at (PTRMEM_CST_LOCATION (stmt),
>   		      "taking address of an immediate function %qD",
>   		      PTRMEM_CST_MEMBER (stmt));
> -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> +	  stmt = build_zero_cst (TREE_TYPE (stmt));
>   	  break;
>   	}
>         break;
> @@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	  error_at (EXPR_LOCATION (stmt),
>   		    "taking address of an immediate function %qD",
>   		    TREE_OPERAND (stmt, 0));
> -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> +	  stmt = build_zero_cst (TREE_TYPE (stmt));
>   	  break;
>   	}
>         break;
> @@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  stmt = cp_fold (stmt);
> +
> +  if (TREE_CODE (stmt) == CALL_EXPR)
> +    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
> +       and cp_fully_fold_init) only after the overall expression has been
> +       considered for constant-evaluation, we can by now safely fold any
> +       remaining __builtin_is_constant_evaluated calls to false, so try
> +       constexpr expansion with mce_false.  */
> +    stmt = maybe_fold_constexpr_call (stmt, mce_false);
> +
> +  *stmt_p = stmt;
>   
>     if (data->pset.add (stmt))
>       {
> @@ -2952,15 +2963,10 @@ cp_fold (tree x)
>   	  }
>   
>   	optimize = nw;
> -
> -	/* Invoke maybe_constant_value for functions declared
> -	   constexpr and not called with AGGR_INIT_EXPRs.
> -	   TODO:
> -	   Do constexpr expansion of expressions where the call itself is not
> -	   constant, but the call followed by an INDIRECT_REF is.  */
> -	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> -	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
> +	   since we don't know if we're in a manifestly constant-evaluated
> +	   context that hasn't yet been constant-evaluated.  */
> +	r = maybe_fold_constexpr_call (x, mce_unknown);

It seems unfortunate to try to fold both here and in cp_fold_r.

Does this patch still fold __builtin_is_constant_evaluated() even though 
it no longer touches the cp_fold builtin handling?

>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -3096,6 +3102,31 @@ cp_fold (tree x)
>     return x;
>   }
>   
> +/* If the CALL_EXPR X calls a constexpr function, try expanding it via
> +   constexpr evaluation.  Returns the expanded result or X if constexpr
> +   evaluation wasn't possible.
> +
> +   TODO: Do constexpr expansion of expressions where the call itself
> +   is not constant, but the call followed by an INDIRECT_REF is.  */
> +
> +static tree
> +maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> +{
> +  if (flag_no_inline)
> +    return x;
> +  tree callee = get_callee_fndecl (x);
> +  if (!callee)
> +    return x;
> +  if (DECL_DECLARED_CONSTEXPR_P (callee))
> +    {
> +      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				     manifestly_const_eval);
> +      if (TREE_CODE (r) != CALL_EXPR)
> +	return r;
> +    }
> +  return x;
> +}
> +
>   /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.  */
>   
>   tree
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..2123f20e3e5
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,20 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +constexpr int foo(int n) {
> +  return n + !__builtin_is_constant_evaluated();
> +}
> +
> +A* f(int n) {
> +  static A a = {n, foo(41)};
> +  return &a;
> +}
> +
> +// { dg-final { scan-tree-dump "42" "original" } }
> +// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  
Patrick Palka Feb. 10, 2023, 1:32 a.m. UTC | #8
On Thu, 9 Feb 2023, Jason Merrill wrote:

> On 2/9/23 09:36, Patrick Palka wrote:
> > On Sun, 5 Feb 2023, Jason Merrill wrote:
> > 
> > > On 2/3/23 15:51, Patrick Palka wrote:
> > > > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > > > 
> > > > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > > > This PR illustrates that __builtin_is_constant_evaluated currently
> > > > > > acts
> > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > expression in question would be later manifestly constant evaluated
> > > > > > (in
> > > > > > which case it must be folded to true).
> > > > > > 
> > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > to get folded as false during cp_fold_function, since at that point
> > > > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > > > CALL_EXPR case passes to maybe_constant_value.
> > > > > > 
> > > > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
> > > > > > for
> > > > > > trunk?
> > > > > > 
> > > > > > 	PR c++/108243
> > > > > > 
> > > > > > gcc/cp/ChangeLog:
> > > > > > 
> > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > > > 	(cp_fold_function): Likewise.
> > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > 	isn't empty.
> > > > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > > > 	if if ff_genericize is set.
> > > > > > 
> > > > > > gcc/testsuite/ChangeLog:
> > > > > > 
> > > > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > > > ---
> > > > > >     gcc/cp/cp-gimplify.cc               | 76
> > > > > > ++++++++++++++++++-----------
> > > > > >     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > > > >     2 files changed, 76 insertions(+), 29 deletions(-)
> > > > > >     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > 
> > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > index a35cedd05cc..d023a63768f 100644
> > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
> > > > > > see
> > > > > >     #include "omp-general.h"
> > > > > >     #include "opts.h"
> > > > > >     +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > +
> > > > > > +enum fold_flags {
> > > > > > +  ff_none = 0,
> > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > +  ff_genericize = 1 << 0,
> > > > > > +};
> > > > > > +
> > > > > >     /* Forward declarations.  */
> > > > > >       static tree cp_genericize_r (tree *, int *, void *);
> > > > > >     static tree cp_fold_r (tree *, int *, void *);
> > > > > >     static void cp_genericize_tree (tree*, bool);
> > > > > > -static tree cp_fold (tree);
> > > > > > +static tree cp_fold (tree, fold_flags);
> > > > > >       /* Genericize a TRY_BLOCK.  */
> > > > > >     @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > > > >     struct cp_fold_data
> > > > > >     {
> > > > > >       hash_set<tree> pset;
> > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > -
> > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > +  fold_flags flags;
> > > > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > > > >     };
> > > > > >       static tree
> > > > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >           break;
> > > > > >         }
> > > > > >     -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > >         if (data->pset.add (stmt))
> > > > > >         {
> > > > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >     	 here rather than in cp_genericize to avoid problems with the
> > > > > > invisible
> > > > > >     	 reference transition.  */
> > > > > >         case INIT_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >     	cp_genericize_init_expr (stmt_p);
> > > > > >           break;
> > > > > >           case TARGET_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >     	cp_genericize_target_expr (stmt_p);
> > > > > >             /* Folding might replace e.g. a COND_EXPR with a
> > > > > > TARGET_EXPR;
> > > > > > in
> > > > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >     void
> > > > > >     cp_fold_function (tree fndecl)
> > > > > >     {
> > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > +  cp_fold_data data (ff_genericize);
> > > > > >       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
> > > > > > NULL);
> > > > > >     }
> > > > > >     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > >     {
> > > > > >       while (true)
> > > > > >         {
> > > > > > -      x = cp_fold (x);
> > > > > > +      x = cp_fold (x, ff_none);
> > > > > >           if (rval)
> > > > > >     	x = mark_rvalue_use (x);
> > > > > >           if (rval && DECL_P (x)
> > > > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > > > >       if (processing_template_decl)
> > > > > >         return x;
> > > > > >       x = cp_fully_fold (x);
> > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > +  cp_fold_data data (ff_none);
> > > > > >       cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > >       return x;
> > > > > >     }
> > > > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > > > >         Function returns X or its folded variant.  */
> > > > > >       static tree
> > > > > > -cp_fold (tree x)
> > > > > > +cp_fold (tree x, fold_flags flags)
> > > > > >     {
> > > > > >       tree op0, op1, op2, op3;
> > > > > >       tree org_x = x, r = NULL_TREE;
> > > > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > > > >       if (fold_cache == NULL)
> > > > > >         fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > >     -  if (tree *cached = fold_cache->get (x))
> > > > > > -    return *cached;
> > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > +
> > > > > > +  if (cache_p)
> > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > +      return *cached;
> > > > > >         uid_sensitive_constexpr_evaluation_checker c;
> > > > > >     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > > > >     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > the
> > > > > >     	     folding of the operand should be in the caches and if in
> > > > > > cp_fold_r
> > > > > >     	     it will modify it in place.  */
> > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > >     	  if (op0 == error_mark_node)
> > > > > >     	    x = error_mark_node;
> > > > > >     	  break;
> > > > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > > > >     	{
> > > > > >     	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > >     	  if (p != x)
> > > > > > -	    return cp_fold (p);
> > > > > > +	    return cp_fold (p, flags);
> > > > > >     	}
> > > > > >           goto unary;
> > > > > >     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > > > >         case COND_EXPR:
> > > > > >           loc = EXPR_LOCATION (x);
> > > > > >           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > >             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > >     	{
> > > > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > > > >     	      {
> > > > > >     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > >     		  r = build_nop (TREE_TYPE (x), r);
> > > > > > -		x = cp_fold (r);
> > > > > > +		x = cp_fold (r, flags);
> > > > > >     		break;
> > > > > >     	      }
> > > > > >     	  }
> > > > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > > > >     	int m = call_expr_nargs (x);
> > > > > >     	for (int i = 0; i < m; i++)
> > > > > >     	  {
> > > > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > > > >     	    if (r != CALL_EXPR_ARG (x, i))
> > > > > >     	      {
> > > > > >     		if (r == error_mark_node)
> > > > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > > > >       	if (TREE_CODE (r) != CALL_EXPR)
> > > > > >     	  {
> > > > > > -	    x = cp_fold (r);
> > > > > > +	    x = cp_fold (r, flags);
> > > > > >     	    break;
> > > > > >     	  }
> > > > > >     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > > > >     	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > > >     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > > >     	    && !flag_no_inline)
> > > > > > -	  r = maybe_constant_value (x);
> > > > > > +	  {
> > > > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > > > +	    if (flags & ff_genericize)
> > > > > > +	      /* At genericization time it's safe to fold
> > > > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > > > +	      manifestly_const_eval = mce_false;
> > > > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > > > +				      manifestly_const_eval);
> > > > > > +	  }
> > > > > >     	optimize = sv;
> > > > > >               if (TREE_CODE (r) != CALL_EXPR)
> > > > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > > > >     	vec<constructor_elt, va_gc> *nelts = NULL;
> > > > > >     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > > > >     	  {
> > > > > > -	    tree op = cp_fold (p->value);
> > > > > > +	    tree op = cp_fold (p->value, flags);
> > > > > >     	    if (op != p->value)
> > > > > >     	      {
> > > > > >     		if (op == error_mark_node)
> > > > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > > > >       	for (int i = 0; i < n; i++)
> > > > > >     	  {
> > > > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > > > >     	    if (op != TREE_VEC_ELT (x, i))
> > > > > >     	      {
> > > > > >     		if (!changed)
> > > > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > > > >         case ARRAY_RANGE_REF:
> > > > > >             loc = EXPR_LOCATION (x);
> > > > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > > > >             if (op0 != TREE_OPERAND (x, 0)
> > > > > >     	  || op1 != TREE_OPERAND (x, 1)
> > > > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > > > >           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
> > > > > > which,
> > > > > > after
> > > > > >     	 folding, evaluates to an invariant.  In that case no need to
> > > > > > wrap
> > > > > >     	 this folded tree with a SAVE_EXPR.  */
> > > > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > >           if (tree_invariant_p (r))
> > > > > >     	x = r;
> > > > > >           break;
> > > > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > > > >           copy_warning (x, org_x);
> > > > > >         }
> > > > > >     -  if (!c.evaluation_restricted_p ())
> > > > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > > > >         {
> > > > > >           fold_cache->put (org_x, x);
> > > > > >           /* Prevent that we try to fold an already folded result
> > > > > > again.
> > > > > > */
> > > > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > new file mode 100644
> > > > > > index 00000000000..4c45dbba13c
> > > > > > --- /dev/null
> > > > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > @@ -0,0 +1,29 @@
> > > > > > +// PR c++/108243
> > > > > > +// { dg-do compile { target c++11 } }
> > > > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > > > +
> > > > > > +constexpr int foo() {
> > > > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > > > +}
> > > > > > +
> > > > > > +#if __cpp_if_consteval
> > > > > > +constexpr int bar() {
> > > > > > +  if consteval {
> > > > > > +    return 5;
> > > > > > +  } else {
> > > > > > +    return 4;
> > > > > > +  }
> > > > > > +}
> > > > > > +#endif
> > > > > > +
> > > > > > +int p, q;
> > > > > > +
> > > > > > +int main() {
> > > > > > +  p = foo();
> > > > > > +#if __cpp_if_consteval
> > > > > > +  q = bar();
> > > > > > +#endif
> > > > > > +}
> > > > > > +
> > > > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > > > 
> > > > > Let's also test a static initializer that can't be fully
> > > > > constant-evaluated.
> > > > 
> > > > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > > > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > > > we need to make cp_fold when called from the latter entry point to also
> > > > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > > > flag has additional effects in cp_fold_r, so it seems we need another
> > > > flag that that only affects the manifestly constant-eval stuff; I called
> > > > it ff_mce_false.  How does the following look?
> > > > 
> > > > -- >8 --
> > > > 
> > > > Subject: [PATCH 2/2] c++: speculative constexpr and
> > > > is_constant_evaluated
> > > >    [PR108243]
> > > > 
> > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > since we don't want to prematurely fold the builtin to false if the
> > > > expression in question would be later manifestly constant evaluated (in
> > > > which case it must be folded to true).
> > > > 
> > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > > > since at these points we're sure we're done with manifestly constant
> > > > evaluation.  To that end we add a flags parameter to cp_fold that
> > > > controls whether we pass mce_false or mce_unknown to
> > > > maybe_constant_value
> > > > when folding a CALL_EXPR.
> > > > 
> > > > 	PR c++/108243
> > > > 	PR c++/97553
> > > > 
> > > > gcc/cp/ChangeLog:
> > > > 
> > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > 	(cp_fold_data::fold_flags): ... this.
> > > > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > > > 	(cp_fold_function): Likewise.
> > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > 	(cp_fully_fold_init): Likewise.
> > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > 	isn't empty.
> > > > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > > > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > > > 	maybe_constant_value.
> > > > 
> > > > gcc/testsuite/ChangeLog:
> > > > 
> > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > ---
> > > >    gcc/cp/cp-gimplify.cc                         | 88
> > > > ++++++++++++-------
> > > >    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> > > >    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> > > >    3 files changed, 104 insertions(+), 30 deletions(-)
> > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > 
> > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > index 9929d29981a..590ed787997 100644
> > > > --- a/gcc/cp/cp-gimplify.cc
> > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> > > >    #include "omp-general.h"
> > > >    #include "opts.h"
> > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > +
> > > > +enum fold_flags {
> > > > +  ff_none = 0,
> > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > +  ff_genericize = 1 << 0,
> > > > +  /* Whether we're folding late enough that we could assume
> > > > +     we're definitely not in a manifestly constant-evaluated
> > > > +     context.  */
> > > 
> > > It's not necessarily a matter of late enough; we could fold sooner and
> > > still
> > > know that, as in cp_fully_fold_init.  We could do the same at other
> > > full-expression points, but we don't because we want to delay folding as
> > > much
> > > as possible.  So let's say "folding at a point where we know we're..."
> > > 
> > > > +  ff_mce_false = 1 << 1,
> > > > +};
> > > > +
> > > > +using fold_flags_t = int;
> > > > +
> > > >    /* Forward declarations.  */
> > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > >    static tree cp_fold_r (tree *, int *, void *);
> > > >    static void cp_genericize_tree (tree*, bool);
> > > > -static tree cp_fold (tree);
> > > > +static tree cp_fold (tree, fold_flags_t);
> > > >      /* Genericize a TRY_BLOCK.  */
> > > >    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> > > >    struct cp_fold_data
> > > >    {
> > > >      hash_set<tree> pset;
> > > > -  bool genericize; // called from cp_fold_function?
> > > > -
> > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > +  fold_flags_t flags;
> > > > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> > > >    };
> > > >      static tree
> > > > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >          break;
> > > >        }
> > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > >        if (data->pset.add (stmt))
> > > >        {
> > > > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > void
> > > > *data_)
> > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > invisible
> > > >    	 reference transition.  */
> > > >        case INIT_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_init_expr (stmt_p);
> > > >          break;
> > > >          case TARGET_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_target_expr (stmt_p);
> > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > in
> > > > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >    void
> > > >    cp_fold_function (tree fndecl)
> > > >    {
> > > > -  cp_fold_data data (/*genericize*/true);
> > > > +  cp_fold_data data (ff_genericize | ff_mce_false);
> > > 
> > > Here would be a good place for a comment about passing mce_false because
> > > all
> > > manifestly-constant-evaluated expressions will have been
> > > constant-evaluated
> > > already if possible.
> > > 
> > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > >    }
> > > >    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > >    {
> > > >      while (true)
> > > >        {
> > > > -      x = cp_fold (x);
> > > > +      x = cp_fold (x, ff_none);
> > > >          if (rval)
> > > >    	x = mark_rvalue_use (x);
> > > >          if (rval && DECL_P (x)
> > > > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> > > >      if (processing_template_decl)
> > > >        return x;
> > > >      x = cp_fully_fold (x);
> > > > -  cp_fold_data data (/*genericize*/false);
> > > > +  cp_fold_data data (ff_mce_false);
> > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > >      return x;
> > > >    }
> > > > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> > > >        Function returns X or its folded variant.  */
> > > >      static tree
> > > > -cp_fold (tree x)
> > > > +cp_fold (tree x, fold_flags_t flags)
> > > >    {
> > > >      tree op0, op1, op2, op3;
> > > >      tree org_x = x, r = NULL_TREE;
> > > > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> > > >      if (fold_cache == NULL)
> > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > >    -  if (tree *cached = fold_cache->get (x))
> > > > -    return *cached;
> > > > +  bool cache_p = (flags == ff_none);
> > > > +
> > > > +  if (cache_p)
> > > > +    if (tree *cached = fold_cache->get (x))
> > > > +      return *cached;
> > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > >    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > the
> > > >    	     folding of the operand should be in the caches and if in
> > > > cp_fold_r
> > > >    	     it will modify it in place.  */
> > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > >    	  if (op0 == error_mark_node)
> > > >    	    x = error_mark_node;
> > > >    	  break;
> > > > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> > > >    	{
> > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > >    	  if (p != x)
> > > > -	    return cp_fold (p);
> > > > +	    return cp_fold (p, flags);
> > > >    	}
> > > >          goto unary;
> > > >    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> > > >        case COND_EXPR:
> > > >          loc = EXPR_LOCATION (x);
> > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > >    	{
> > > > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> > > >    	      {
> > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > -		x = cp_fold (r);
> > > > +		x = cp_fold (r, flags);
> > > >    		break;
> > > >    	      }
> > > >    	  }
> > > > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> > > >    	  {
> > > >    	    switch (DECL_FE_FUNCTION_CODE (callee))
> > > >    	      {
> > > > -		/* Defer folding __builtin_is_constant_evaluated.  */
> > > >    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > > > +		/* Defer folding __builtin_is_constant_evaluated unless
> > > > +		   we can assume this isn't a manifestly constant-evaluated
> > > 
> > > s/can assume/know/
> > > 
> > > OK with those comment changes.
> > 
> > Thanks a lot.  Unfortunately I think the patch has a significant problem
> > that only just occurred to me -- disabling the cp_fold cache when the
> > flag ff_mce_false is set effectively makes cp_fold_function and
> > cp_fully_fold_init quadratic in the size of the expression (since
> > cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> > disabled will end up fully walking each subtree).  Note that the reason
> > we must disable the cache is because cp_fold with ff_mce_false might
> > give a different folded result than without that flag if the expression
> > contains a suitable CALL_EXPR subexpression.
> 
> Good point.
> 
> > One approach to fix this complexity issue would be to parameterize the
> > cache according to the flags that were passed to cp_fold, which would
> > allow us to keep the cache enabled when ff_mce_false is set.  A downside
> > to this approach is that the size of the cp_fold cache would essentially
> > double since for each tree we'd now have two cache entries, one for
> > flags=ff_none and another for flags=ff_mce_false.
> 
> We could also clear the cache before cp_fold_function since the two folds
> shouldn't overlap (much).

Makes sense, but IIUC we'd also have to clear it before (and after)
cp_fully_fold_init too, which unlike cp_fold_function may get called
in the middle of a function body.

> 
> > Another approach would be to split out the trial constexpr evaluation
> > part of cp_fold's CALL_EXPR handling, parameterize that, and call it
> > directly from cp_fold_r.  With this approach we wouldn't perform as much
> > folding, e.g.
> > 
> >    int n = 41 + !std::is_constant_evaluated();
> > 
> > would get folded to 1 + 41 rather than 42.  But I suspect this would
> > give us 95% of the reapable benefits of the above approach.
> > 
> > I think I'm leaning towards this second approach, which the below patch
> > implements instead.  What do you think?  Bootstrapped and regtested on
> > x86_64-pc-linux-gnu.
> 
> That sounds reasonable, but...
> 
> > -- >8 --
> > 
> > Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
> >   [PR108243]
> > 
> > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > as an optimization barrier for our speculative constexpr evaluation,
> > since we don't want to prematurely fold the builtin to false before the
> > expression in question undergoes constant evaluation as in a manifestly
> > constant-evaluated context (in which case the builtin must instead be
> > folded to true).
> > 
> > This patch fixes this by permitting __builtin_is_constant_evaluated
> > to get folded to false from cp_fold_r, where we know we're done with
> > proper constant evaluation (of manifestly constant-evaluated contexts).
> > 
> > 	PR c++/108243
> > 	PR c++/97553
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* cp-gimplify.cc
> > 	(cp_fold_r): Remove redundant *stmt_p assignments.  After
> > 	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
> > 	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
> > 	into ...
> > 	(maybe_fold_constexpr_call): ... here.
> > 
> > gcc/testsuite/ChangeLog:
> > 
> > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > ---
> >   gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
> >   .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
> >   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
> >   3 files changed, 95 insertions(+), 12 deletions(-)
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index 9929d29981a..dca55056b2c 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
> >   static tree cp_fold_r (tree *, int *, void *);
> >   static void cp_genericize_tree (tree*, bool);
> >   static tree cp_fold (tree);
> > +static tree maybe_fold_constexpr_call (tree, mce_value);
> >     /* Genericize a TRY_BLOCK.  */
> >   @@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	    error_at (PTRMEM_CST_LOCATION (stmt),
> >   		      "taking address of an immediate function %qD",
> >   		      PTRMEM_CST_MEMBER (stmt));
> > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> >   	  break;
> >   	}
> >         break;
> > @@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	  error_at (EXPR_LOCATION (stmt),
> >   		    "taking address of an immediate function %qD",
> >   		    TREE_OPERAND (stmt, 0));
> > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> >   	  break;
> >   	}
> >         break;
> > @@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >         break;
> >       }
> >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > +  stmt = cp_fold (stmt);
> > +
> > +  if (TREE_CODE (stmt) == CALL_EXPR)
> > +    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
> > +       and cp_fully_fold_init) only after the overall expression has been
> > +       considered for constant-evaluation, we can by now safely fold any
> > +       remaining __builtin_is_constant_evaluated calls to false, so try
> > +       constexpr expansion with mce_false.  */
> > +    stmt = maybe_fold_constexpr_call (stmt, mce_false);
> > +
> > +  *stmt_p = stmt;
> >       if (data->pset.add (stmt))
> >       {
> > @@ -2952,15 +2963,10 @@ cp_fold (tree x)
> >   	  }
> >     	optimize = nw;
> > -
> > -	/* Invoke maybe_constant_value for functions declared
> > -	   constexpr and not called with AGGR_INIT_EXPRs.
> > -	   TODO:
> > -	   Do constexpr expansion of expressions where the call itself is not
> > -	   constant, but the call followed by an INDIRECT_REF is.  */
> > -	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > -	    && !flag_no_inline)
> > -	  r = maybe_constant_value (x);
> > +	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
> > +	   since we don't know if we're in a manifestly constant-evaluated
> > +	   context that hasn't yet been constant-evaluated.  */
> > +	r = maybe_fold_constexpr_call (x, mce_unknown);
> 
> It seems unfortunate to try to fold both here and in cp_fold_r.

Yes, though I'm afraid some duplication of work is pretty much
unavoidable.  Even if in cp_fold_r we did something like

  if (TREE_CODE (stmt) == CALL_EXPR)
    /* cp_fold_call_expr is the entire CALL_EXPR case of cp_fold.  */
    stmt = cp_fold_call_expr (stmt, mce_false);
  else
    stmt = cp_fold (stmt);

instead of

  stmt = cp_fold (stmt);
  if (TREE_CODE (stmt) == CALL_EXPR)
    stmt = maybe_fold_constexpr_call (stmt, mce_false);

we would still end up doing cp_fold on the CALL_EXPR if it's a
subexpression of some larger expression (since cp_fold is recursive).

And even if we went with the original approach of parameterizing cp_fold
according to manifestly_const_eval totally, we would still end up trying
to fold some constexpr calls twice, first with flags=ff_none (during
some on-the-spot folding) and again with flags=ff_mce_false (during
cp_fold_function), I think.

> 
> Does this patch still fold __builtin_is_constant_evaluated() even though it no
> longer touches the cp_fold builtin handling?

Indeed it doesn't, instead __builtin_is_constant_evaluated() will
continue to get folded during gimplification.  I thought folding it
might not be benefical with this approach, but on second thought we
should do it for consistency at least.  The following incremental
patch seems to work:

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index dca55056b2c..250147bde0f 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -3124,6 +3124,14 @@ maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
       if (TREE_CODE (r) != CALL_EXPR)
 	return r;
     }
+  if (fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
+			 BUILT_IN_FRONTEND))
+    {
+      if (manifestly_const_eval == mce_true)
+	return boolean_true_node;
+      else if (manifestly_const_eval == mce_false)
+	return boolean_false_node;
+    }
   return x;
 }
 

> 
> >   	optimize = sv;
> >             if (TREE_CODE (r) != CALL_EXPR)
> > @@ -3096,6 +3102,31 @@ cp_fold (tree x)
> >     return x;
> >   }
> >   +/* If the CALL_EXPR X calls a constexpr function, try expanding it via
> > +   constexpr evaluation.  Returns the expanded result or X if constexpr
> > +   evaluation wasn't possible.
> > +
> > +   TODO: Do constexpr expansion of expressions where the call itself
> > +   is not constant, but the call followed by an INDIRECT_REF is.  */
> > +
> > +static tree
> > +maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> > +{
> > +  if (flag_no_inline)
> > +    return x;
> > +  tree callee = get_callee_fndecl (x);
> > +  if (!callee)
> > +    return x;
> > +  if (DECL_DECLARED_CONSTEXPR_P (callee))
> > +    {
> > +      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > +				     manifestly_const_eval);
> > +      if (TREE_CODE (r) != CALL_EXPR)
> > +	return r;
> > +    }
> > +  return x;
> > +}
> > +
> >   /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.
> > */
> >     tree
> > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > new file mode 100644
> > index 00000000000..2123f20e3e5
> > --- /dev/null
> > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > @@ -0,0 +1,20 @@
> > +// PR c++/108243
> > +// { dg-do compile { target c++11 } }
> > +// { dg-additional-options "-O -fdump-tree-original" }
> > +
> > +struct A {
> > +  constexpr A(int n, int m) : n(n), m(m) { }
> > +  int n, m;
> > +};
> > +
> > +constexpr int foo(int n) {
> > +  return n + !__builtin_is_constant_evaluated();
> > +}
> > +
> > +A* f(int n) {
> > +  static A a = {n, foo(41)};
> > +  return &a;
> > +}
> > +
> > +// { dg-final { scan-tree-dump "42" "original" } }
> > +// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
> > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > new file mode 100644
> > index 00000000000..ed964e20a7a
> > --- /dev/null
> > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > @@ -0,0 +1,32 @@
> > +// PR c++/97553
> > +// { dg-do compile { target c++11 } }
> > +// { dg-additional-options "-O -fdump-tree-original" }
> > +
> > +constexpr int foo() {
> > +  return __builtin_is_constant_evaluated() + 1;
> > +}
> > +
> > +#if __cpp_if_consteval
> > +constexpr int bar() {
> > +  if consteval {
> > +    return 5;
> > +  } else {
> > +    return 4;
> > +  }
> > +}
> > +#endif
> > +
> > +int p, q;
> > +
> > +int main() {
> > +  p = foo();
> > +#if __cpp_if_consteval
> > +  q = bar();
> > +#endif
> > +}
> > +
> > +// { dg-final { scan-tree-dump "p = 1" "original" } }
> > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > +
> > +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> > +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> 
>
  
Patrick Palka Feb. 10, 2023, 2:48 p.m. UTC | #9
On Thu, 9 Feb 2023, Patrick Palka wrote:

> On Thu, 9 Feb 2023, Jason Merrill wrote:
> 
> > On 2/9/23 09:36, Patrick Palka wrote:
> > > On Sun, 5 Feb 2023, Jason Merrill wrote:
> > > 
> > > > On 2/3/23 15:51, Patrick Palka wrote:
> > > > > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > > > > 
> > > > > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > > > > This PR illustrates that __builtin_is_constant_evaluated currently
> > > > > > > acts
> > > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > > expression in question would be later manifestly constant evaluated
> > > > > > > (in
> > > > > > > which case it must be folded to true).
> > > > > > > 
> > > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > > to get folded as false during cp_fold_function, since at that point
> > > > > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > > > > CALL_EXPR case passes to maybe_constant_value.
> > > > > > > 
> > > > > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
> > > > > > > for
> > > > > > > trunk?
> > > > > > > 
> > > > > > > 	PR c++/108243
> > > > > > > 
> > > > > > > gcc/cp/ChangeLog:
> > > > > > > 
> > > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > > > > 	(cp_fold_function): Likewise.
> > > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > > 	isn't empty.
> > > > > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > > > > 	if if ff_genericize is set.
> > > > > > > 
> > > > > > > gcc/testsuite/ChangeLog:
> > > > > > > 
> > > > > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > > > > ---
> > > > > > >     gcc/cp/cp-gimplify.cc               | 76
> > > > > > > ++++++++++++++++++-----------
> > > > > > >     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > > > > >     2 files changed, 76 insertions(+), 29 deletions(-)
> > > > > > >     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > 
> > > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > > index a35cedd05cc..d023a63768f 100644
> > > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
> > > > > > > see
> > > > > > >     #include "omp-general.h"
> > > > > > >     #include "opts.h"
> > > > > > >     +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > > +
> > > > > > > +enum fold_flags {
> > > > > > > +  ff_none = 0,
> > > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > > +  ff_genericize = 1 << 0,
> > > > > > > +};
> > > > > > > +
> > > > > > >     /* Forward declarations.  */
> > > > > > >       static tree cp_genericize_r (tree *, int *, void *);
> > > > > > >     static tree cp_fold_r (tree *, int *, void *);
> > > > > > >     static void cp_genericize_tree (tree*, bool);
> > > > > > > -static tree cp_fold (tree);
> > > > > > > +static tree cp_fold (tree, fold_flags);
> > > > > > >       /* Genericize a TRY_BLOCK.  */
> > > > > > >     @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > > > > >     struct cp_fold_data
> > > > > > >     {
> > > > > > >       hash_set<tree> pset;
> > > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > > -
> > > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > > +  fold_flags flags;
> > > > > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > > > > >     };
> > > > > > >       static tree
> > > > > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > void
> > > > > > > *data_)
> > > > > > >           break;
> > > > > > >         }
> > > > > > >     -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > > >         if (data->pset.add (stmt))
> > > > > > >         {
> > > > > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > void
> > > > > > > *data_)
> > > > > > >     	 here rather than in cp_genericize to avoid problems with the
> > > > > > > invisible
> > > > > > >     	 reference transition.  */
> > > > > > >         case INIT_EXPR:
> > > > > > > -      if (data->genericize)
> > > > > > > +      if (data->flags & ff_genericize)
> > > > > > >     	cp_genericize_init_expr (stmt_p);
> > > > > > >           break;
> > > > > > >           case TARGET_EXPR:
> > > > > > > -      if (data->genericize)
> > > > > > > +      if (data->flags & ff_genericize)
> > > > > > >     	cp_genericize_target_expr (stmt_p);
> > > > > > >             /* Folding might replace e.g. a COND_EXPR with a
> > > > > > > TARGET_EXPR;
> > > > > > > in
> > > > > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > void
> > > > > > > *data_)
> > > > > > >     void
> > > > > > >     cp_fold_function (tree fndecl)
> > > > > > >     {
> > > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > > +  cp_fold_data data (ff_genericize);
> > > > > > >       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
> > > > > > > NULL);
> > > > > > >     }
> > > > > > >     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > > >     {
> > > > > > >       while (true)
> > > > > > >         {
> > > > > > > -      x = cp_fold (x);
> > > > > > > +      x = cp_fold (x, ff_none);
> > > > > > >           if (rval)
> > > > > > >     	x = mark_rvalue_use (x);
> > > > > > >           if (rval && DECL_P (x)
> > > > > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > > > > >       if (processing_template_decl)
> > > > > > >         return x;
> > > > > > >       x = cp_fully_fold (x);
> > > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > > +  cp_fold_data data (ff_none);
> > > > > > >       cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > > >       return x;
> > > > > > >     }
> > > > > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > > > > >         Function returns X or its folded variant.  */
> > > > > > >       static tree
> > > > > > > -cp_fold (tree x)
> > > > > > > +cp_fold (tree x, fold_flags flags)
> > > > > > >     {
> > > > > > >       tree op0, op1, op2, op3;
> > > > > > >       tree org_x = x, r = NULL_TREE;
> > > > > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > > > > >       if (fold_cache == NULL)
> > > > > > >         fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > > >     -  if (tree *cached = fold_cache->get (x))
> > > > > > > -    return *cached;
> > > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > > +
> > > > > > > +  if (cache_p)
> > > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > > +      return *cached;
> > > > > > >         uid_sensitive_constexpr_evaluation_checker c;
> > > > > > >     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > > > > >     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > > the
> > > > > > >     	     folding of the operand should be in the caches and if in
> > > > > > > cp_fold_r
> > > > > > >     	     it will modify it in place.  */
> > > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > >     	  if (op0 == error_mark_node)
> > > > > > >     	    x = error_mark_node;
> > > > > > >     	  break;
> > > > > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > > > > >     	{
> > > > > > >     	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > > >     	  if (p != x)
> > > > > > > -	    return cp_fold (p);
> > > > > > > +	    return cp_fold (p, flags);
> > > > > > >     	}
> > > > > > >           goto unary;
> > > > > > >     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > > > > >         case COND_EXPR:
> > > > > > >           loc = EXPR_LOCATION (x);
> > > > > > >           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > >             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > > >     	{
> > > > > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > > > > >     	      {
> > > > > > >     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > > >     		  r = build_nop (TREE_TYPE (x), r);
> > > > > > > -		x = cp_fold (r);
> > > > > > > +		x = cp_fold (r, flags);
> > > > > > >     		break;
> > > > > > >     	      }
> > > > > > >     	  }
> > > > > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > > > > >     	int m = call_expr_nargs (x);
> > > > > > >     	for (int i = 0; i < m; i++)
> > > > > > >     	  {
> > > > > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > > > > >     	    if (r != CALL_EXPR_ARG (x, i))
> > > > > > >     	      {
> > > > > > >     		if (r == error_mark_node)
> > > > > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > > > > >       	if (TREE_CODE (r) != CALL_EXPR)
> > > > > > >     	  {
> > > > > > > -	    x = cp_fold (r);
> > > > > > > +	    x = cp_fold (r, flags);
> > > > > > >     	    break;
> > > > > > >     	  }
> > > > > > >     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > > > > >     	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > > > >     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > > > >     	    && !flag_no_inline)
> > > > > > > -	  r = maybe_constant_value (x);
> > > > > > > +	  {
> > > > > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > > > > +	    if (flags & ff_genericize)
> > > > > > > +	      /* At genericization time it's safe to fold
> > > > > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > > > > +	      manifestly_const_eval = mce_false;
> > > > > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > > > > +				      manifestly_const_eval);
> > > > > > > +	  }
> > > > > > >     	optimize = sv;
> > > > > > >               if (TREE_CODE (r) != CALL_EXPR)
> > > > > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > > > > >     	vec<constructor_elt, va_gc> *nelts = NULL;
> > > > > > >     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > > > > >     	  {
> > > > > > > -	    tree op = cp_fold (p->value);
> > > > > > > +	    tree op = cp_fold (p->value, flags);
> > > > > > >     	    if (op != p->value)
> > > > > > >     	      {
> > > > > > >     		if (op == error_mark_node)
> > > > > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > > > > >       	for (int i = 0; i < n; i++)
> > > > > > >     	  {
> > > > > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > > > > >     	    if (op != TREE_VEC_ELT (x, i))
> > > > > > >     	      {
> > > > > > >     		if (!changed)
> > > > > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > > > > >         case ARRAY_RANGE_REF:
> > > > > > >             loc = EXPR_LOCATION (x);
> > > > > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > > > > >             if (op0 != TREE_OPERAND (x, 0)
> > > > > > >     	  || op1 != TREE_OPERAND (x, 1)
> > > > > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > > > > >           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
> > > > > > > which,
> > > > > > > after
> > > > > > >     	 folding, evaluates to an invariant.  In that case no need to
> > > > > > > wrap
> > > > > > >     	 this folded tree with a SAVE_EXPR.  */
> > > > > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > >           if (tree_invariant_p (r))
> > > > > > >     	x = r;
> > > > > > >           break;
> > > > > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > > > > >           copy_warning (x, org_x);
> > > > > > >         }
> > > > > > >     -  if (!c.evaluation_restricted_p ())
> > > > > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > > > > >         {
> > > > > > >           fold_cache->put (org_x, x);
> > > > > > >           /* Prevent that we try to fold an already folded result
> > > > > > > again.
> > > > > > > */
> > > > > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > new file mode 100644
> > > > > > > index 00000000000..4c45dbba13c
> > > > > > > --- /dev/null
> > > > > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > @@ -0,0 +1,29 @@
> > > > > > > +// PR c++/108243
> > > > > > > +// { dg-do compile { target c++11 } }
> > > > > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > > > > +
> > > > > > > +constexpr int foo() {
> > > > > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > > > > +}
> > > > > > > +
> > > > > > > +#if __cpp_if_consteval
> > > > > > > +constexpr int bar() {
> > > > > > > +  if consteval {
> > > > > > > +    return 5;
> > > > > > > +  } else {
> > > > > > > +    return 4;
> > > > > > > +  }
> > > > > > > +}
> > > > > > > +#endif
> > > > > > > +
> > > > > > > +int p, q;
> > > > > > > +
> > > > > > > +int main() {
> > > > > > > +  p = foo();
> > > > > > > +#if __cpp_if_consteval
> > > > > > > +  q = bar();
> > > > > > > +#endif
> > > > > > > +}
> > > > > > > +
> > > > > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > > > > 
> > > > > > Let's also test a static initializer that can't be fully
> > > > > > constant-evaluated.
> > > > > 
> > > > > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > > > > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > > > > we need to make cp_fold when called from the latter entry point to also
> > > > > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > > > > flag has additional effects in cp_fold_r, so it seems we need another
> > > > > flag that that only affects the manifestly constant-eval stuff; I called
> > > > > it ff_mce_false.  How does the following look?
> > > > > 
> > > > > -- >8 --
> > > > > 
> > > > > Subject: [PATCH 2/2] c++: speculative constexpr and
> > > > > is_constant_evaluated
> > > > >    [PR108243]
> > > > > 
> > > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > expression in question would be later manifestly constant evaluated (in
> > > > > which case it must be folded to true).
> > > > > 
> > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > > > > since at these points we're sure we're done with manifestly constant
> > > > > evaluation.  To that end we add a flags parameter to cp_fold that
> > > > > controls whether we pass mce_false or mce_unknown to
> > > > > maybe_constant_value
> > > > > when folding a CALL_EXPR.
> > > > > 
> > > > > 	PR c++/108243
> > > > > 	PR c++/97553
> > > > > 
> > > > > gcc/cp/ChangeLog:
> > > > > 
> > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > > > > 	(cp_fold_function): Likewise.
> > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > 	(cp_fully_fold_init): Likewise.
> > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > 	isn't empty.
> > > > > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > > > > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > > > > 	maybe_constant_value.
> > > > > 
> > > > > gcc/testsuite/ChangeLog:
> > > > > 
> > > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > > ---
> > > > >    gcc/cp/cp-gimplify.cc                         | 88
> > > > > ++++++++++++-------
> > > > >    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> > > > >    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> > > > >    3 files changed, 104 insertions(+), 30 deletions(-)
> > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > > 
> > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > index 9929d29981a..590ed787997 100644
> > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> > > > >    #include "omp-general.h"
> > > > >    #include "opts.h"
> > > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > > +
> > > > > +enum fold_flags {
> > > > > +  ff_none = 0,
> > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > +  ff_genericize = 1 << 0,
> > > > > +  /* Whether we're folding late enough that we could assume
> > > > > +     we're definitely not in a manifestly constant-evaluated
> > > > > +     context.  */
> > > > 
> > > > It's not necessarily a matter of late enough; we could fold sooner and
> > > > still
> > > > know that, as in cp_fully_fold_init.  We could do the same at other
> > > > full-expression points, but we don't because we want to delay folding as
> > > > much
> > > > as possible.  So let's say "folding at a point where we know we're..."
> > > > 
> > > > > +  ff_mce_false = 1 << 1,
> > > > > +};
> > > > > +
> > > > > +using fold_flags_t = int;
> > > > > +
> > > > >    /* Forward declarations.  */
> > > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > > >    static tree cp_fold_r (tree *, int *, void *);
> > > > >    static void cp_genericize_tree (tree*, bool);
> > > > > -static tree cp_fold (tree);
> > > > > +static tree cp_fold (tree, fold_flags_t);
> > > > >      /* Genericize a TRY_BLOCK.  */
> > > > >    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> > > > >    struct cp_fold_data
> > > > >    {
> > > > >      hash_set<tree> pset;
> > > > > -  bool genericize; // called from cp_fold_function?
> > > > > -
> > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > +  fold_flags_t flags;
> > > > > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> > > > >    };
> > > > >      static tree
> > > > > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > *data_)
> > > > >          break;
> > > > >        }
> > > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > >        if (data->pset.add (stmt))
> > > > >        {
> > > > > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > void
> > > > > *data_)
> > > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > > invisible
> > > > >    	 reference transition.  */
> > > > >        case INIT_EXPR:
> > > > > -      if (data->genericize)
> > > > > +      if (data->flags & ff_genericize)
> > > > >    	cp_genericize_init_expr (stmt_p);
> > > > >          break;
> > > > >          case TARGET_EXPR:
> > > > > -      if (data->genericize)
> > > > > +      if (data->flags & ff_genericize)
> > > > >    	cp_genericize_target_expr (stmt_p);
> > > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > > in
> > > > > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > *data_)
> > > > >    void
> > > > >    cp_fold_function (tree fndecl)
> > > > >    {
> > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > +  cp_fold_data data (ff_genericize | ff_mce_false);
> > > > 
> > > > Here would be a good place for a comment about passing mce_false because
> > > > all
> > > > manifestly-constant-evaluated expressions will have been
> > > > constant-evaluated
> > > > already if possible.
> > > > 
> > > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > > >    }
> > > > >    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > >    {
> > > > >      while (true)
> > > > >        {
> > > > > -      x = cp_fold (x);
> > > > > +      x = cp_fold (x, ff_none);
> > > > >          if (rval)
> > > > >    	x = mark_rvalue_use (x);
> > > > >          if (rval && DECL_P (x)
> > > > > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> > > > >      if (processing_template_decl)
> > > > >        return x;
> > > > >      x = cp_fully_fold (x);
> > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > +  cp_fold_data data (ff_mce_false);
> > > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > >      return x;
> > > > >    }
> > > > > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> > > > >        Function returns X or its folded variant.  */
> > > > >      static tree
> > > > > -cp_fold (tree x)
> > > > > +cp_fold (tree x, fold_flags_t flags)
> > > > >    {
> > > > >      tree op0, op1, op2, op3;
> > > > >      tree org_x = x, r = NULL_TREE;
> > > > > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> > > > >      if (fold_cache == NULL)
> > > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > >    -  if (tree *cached = fold_cache->get (x))
> > > > > -    return *cached;
> > > > > +  bool cache_p = (flags == ff_none);
> > > > > +
> > > > > +  if (cache_p)
> > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > +      return *cached;
> > > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > > >    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> > > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > the
> > > > >    	     folding of the operand should be in the caches and if in
> > > > > cp_fold_r
> > > > >    	     it will modify it in place.  */
> > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > >    	  if (op0 == error_mark_node)
> > > > >    	    x = error_mark_node;
> > > > >    	  break;
> > > > > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> > > > >    	{
> > > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > > >    	  if (p != x)
> > > > > -	    return cp_fold (p);
> > > > > +	    return cp_fold (p, flags);
> > > > >    	}
> > > > >          goto unary;
> > > > >    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> > > > >        case COND_EXPR:
> > > > >          loc = EXPR_LOCATION (x);
> > > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > >    	{
> > > > > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> > > > >    	      {
> > > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > > -		x = cp_fold (r);
> > > > > +		x = cp_fold (r, flags);
> > > > >    		break;
> > > > >    	      }
> > > > >    	  }
> > > > > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> > > > >    	  {
> > > > >    	    switch (DECL_FE_FUNCTION_CODE (callee))
> > > > >    	      {
> > > > > -		/* Defer folding __builtin_is_constant_evaluated.  */
> > > > >    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > > > > +		/* Defer folding __builtin_is_constant_evaluated unless
> > > > > +		   we can assume this isn't a manifestly constant-evaluated
> > > > 
> > > > s/can assume/know/
> > > > 
> > > > OK with those comment changes.
> > > 
> > > Thanks a lot.  Unfortunately I think the patch has a significant problem
> > > that only just occurred to me -- disabling the cp_fold cache when the
> > > flag ff_mce_false is set effectively makes cp_fold_function and
> > > cp_fully_fold_init quadratic in the size of the expression (since
> > > cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> > > disabled will end up fully walking each subtree).  Note that the reason
> > > we must disable the cache is because cp_fold with ff_mce_false might
> > > give a different folded result than without that flag if the expression
> > > contains a suitable CALL_EXPR subexpression.
> > 
> > Good point.
> > 
> > > One approach to fix this complexity issue would be to parameterize the
> > > cache according to the flags that were passed to cp_fold, which would
> > > allow us to keep the cache enabled when ff_mce_false is set.  A downside
> > > to this approach is that the size of the cp_fold cache would essentially
> > > double since for each tree we'd now have two cache entries, one for
> > > flags=ff_none and another for flags=ff_mce_false.
> > 
> > We could also clear the cache before cp_fold_function since the two folds
> > shouldn't overlap (much).
> 
> Makes sense, but IIUC we'd also have to clear it before (and after)
> cp_fully_fold_init too, which unlike cp_fold_function may get called
> in the middle of a function body.

Ah sorry, I think I misunderstood your idea.  Clearing the cache between
cp_fold_function would definitely help with controlling the size of the
cache, and indeed there shouldn't be much overlap because there isn't
much sharing of expression trees across function bodies.

However, I was curious about how big the fold_cache gets in practice,
and it turns out it doesn't get very big at all since we regularly clear
the fold_cache via clear_cv_and_fold_caches anyway.  According to my
experiments it doesn't get larger than about ~10k elements.  So a
doubling of that is pretty much insignificant.

So ISTM parameterizing the cache is the way to go.  How does the
following look?

-- >8 --

Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(fold_flags_t): Declare.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Likewise.
	(cp_fully_fold_init): Likewise.
	(fold_cache): Replace with ...
	(fold_caches): ... this 2-element array of caches.
	(get_fold_cache): Define.
	(clear_fold_cache): Adjust.
	(cp_fold): Add flags parameter.  Call get_fold_cache.
	<case CALL_EXPR>: If ff_mce_false is set, fold
	__builtin_is_constant_evaluated to false and pass mce_false to
	maybe_constant_value.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 103 +++++++++++++-----
 .../g++.dg/opt/is_constant_evaluated1.C       |  15 +++
 .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++++
 3 files changed, 120 insertions(+), 30 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..01e624bc9de 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+  /* Whether we're folding a point where we know we're
+     definitely not in a manifestly constant-evaluated
+     context.  */
+  ff_mce_false = 1 << 1,
+};
+
+using fold_flags_t = int;
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags_t);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1012,9 +1026,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags_t flags;
+  cp_fold_data (fold_flags_t flags): flags (flags) {}
 };
 
 static tree
@@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  /* By now all manifestly-constant-evaluated expressions will have
+     been constant-evaluated already if possible, so we can safely
+     pass ff_mce_false.  */
+  cp_fold_data data (ff_genericize | ff_mce_false);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2391,7 +2407,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, ff_none);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2450,7 +2466,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_mce_false);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2466,15 +2482,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
   return cp_fold_maybe_rvalue (x, !lval);
 }
 
-static GTY((deletable)) hash_map<tree, tree> *fold_cache;
+static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
+
+/* Subroutine of cp_fold.  Returns which fold cache to use according
+   to the given flags.  We need multiple caches since the result of
+   folding may depend on which flags are used.  */
+
+static hash_map<tree, tree> *&
+get_fold_cache (fold_flags_t flags)
+{
+  if (flags & ff_mce_false)
+    return fold_caches[1];
+  else
+    return fold_caches[0];
+}
 
 /* Dispose of the whole FOLD_CACHE.  */
 
 void
 clear_fold_cache (void)
 {
-  if (fold_cache != NULL)
-    fold_cache->empty ();
+  for (auto& fold_cache : fold_caches)
+    if (fold_cache != NULL)
+      fold_cache->empty ();
 }
 
 /*  This function tries to fold an expression X.
@@ -2485,7 +2515,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags_t flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2503,6 +2533,7 @@ cp_fold (tree x)
   if (DECL_P (x) || CONSTANT_CLASS_P (x))
     return x;
 
+  auto& fold_cache = get_fold_cache (flags);
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
@@ -2542,7 +2573,7 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
@@ -2587,7 +2618,7 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
@@ -2779,8 +2810,8 @@ cp_fold (tree x)
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2870,7 +2901,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2890,8 +2921,12 @@ cp_fold (tree x)
 	  {
 	    switch (DECL_FE_FUNCTION_CODE (callee))
 	      {
-		/* Defer folding __builtin_is_constant_evaluated.  */
 	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
+		/* Defer folding __builtin_is_constant_evaluated unless
+		   we know this isn't a manifestly constant-evaluated
+		   context.  */
+		if (flags & ff_mce_false)
+		  x = boolean_false_node;
 		break;
 	      case CP_BUILT_IN_SOURCE_LOCATION:
 		x = fold_builtin_source_location (x);
@@ -2924,7 +2959,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2947,7 +2982,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2960,7 +2995,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_mce_false)
+	      /* Allow folding __builtin_is_constant_evaluated to false during
+		 constexpr evaluation of this call.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2987,7 +3030,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3018,7 +3061,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3035,10 +3078,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3066,7 +3109,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..983410b9e83
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,15 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+A* f(int n) {
+  static A a = {n, __builtin_is_constant_evaluated()};
+  return &a;
+}
+
+// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  
Patrick Palka Feb. 10, 2023, 4:51 p.m. UTC | #10
On Fri, 10 Feb 2023, Patrick Palka wrote:

> On Thu, 9 Feb 2023, Patrick Palka wrote:
> 
> > On Thu, 9 Feb 2023, Jason Merrill wrote:
> > 
> > > On 2/9/23 09:36, Patrick Palka wrote:
> > > > On Sun, 5 Feb 2023, Jason Merrill wrote:
> > > > 
> > > > > On 2/3/23 15:51, Patrick Palka wrote:
> > > > > > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > > > > > 
> > > > > > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > > > > > This PR illustrates that __builtin_is_constant_evaluated currently
> > > > > > > > acts
> > > > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > > > expression in question would be later manifestly constant evaluated
> > > > > > > > (in
> > > > > > > > which case it must be folded to true).
> > > > > > > > 
> > > > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > > > to get folded as false during cp_fold_function, since at that point
> > > > > > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > > > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > > > > > CALL_EXPR case passes to maybe_constant_value.
> > > > > > > > 
> > > > > > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
> > > > > > > > for
> > > > > > > > trunk?
> > > > > > > > 
> > > > > > > > 	PR c++/108243
> > > > > > > > 
> > > > > > > > gcc/cp/ChangeLog:
> > > > > > > > 
> > > > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > > > > > 	(cp_fold_function): Likewise.
> > > > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > > > 	isn't empty.
> > > > > > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > > > > > 	if if ff_genericize is set.
> > > > > > > > 
> > > > > > > > gcc/testsuite/ChangeLog:
> > > > > > > > 
> > > > > > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > > > > > ---
> > > > > > > >     gcc/cp/cp-gimplify.cc               | 76
> > > > > > > > ++++++++++++++++++-----------
> > > > > > > >     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > > > > > >     2 files changed, 76 insertions(+), 29 deletions(-)
> > > > > > > >     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > 
> > > > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > > > index a35cedd05cc..d023a63768f 100644
> > > > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
> > > > > > > > see
> > > > > > > >     #include "omp-general.h"
> > > > > > > >     #include "opts.h"
> > > > > > > >     +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > > > +
> > > > > > > > +enum fold_flags {
> > > > > > > > +  ff_none = 0,
> > > > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > > > +  ff_genericize = 1 << 0,
> > > > > > > > +};
> > > > > > > > +
> > > > > > > >     /* Forward declarations.  */
> > > > > > > >       static tree cp_genericize_r (tree *, int *, void *);
> > > > > > > >     static tree cp_fold_r (tree *, int *, void *);
> > > > > > > >     static void cp_genericize_tree (tree*, bool);
> > > > > > > > -static tree cp_fold (tree);
> > > > > > > > +static tree cp_fold (tree, fold_flags);
> > > > > > > >       /* Genericize a TRY_BLOCK.  */
> > > > > > > >     @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > > > > > >     struct cp_fold_data
> > > > > > > >     {
> > > > > > > >       hash_set<tree> pset;
> > > > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > > > -
> > > > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > > > +  fold_flags flags;
> > > > > > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > > > > > >     };
> > > > > > > >       static tree
> > > > > > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > > void
> > > > > > > > *data_)
> > > > > > > >           break;
> > > > > > > >         }
> > > > > > > >     -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > > > >         if (data->pset.add (stmt))
> > > > > > > >         {
> > > > > > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > > void
> > > > > > > > *data_)
> > > > > > > >     	 here rather than in cp_genericize to avoid problems with the
> > > > > > > > invisible
> > > > > > > >     	 reference transition.  */
> > > > > > > >         case INIT_EXPR:
> > > > > > > > -      if (data->genericize)
> > > > > > > > +      if (data->flags & ff_genericize)
> > > > > > > >     	cp_genericize_init_expr (stmt_p);
> > > > > > > >           break;
> > > > > > > >           case TARGET_EXPR:
> > > > > > > > -      if (data->genericize)
> > > > > > > > +      if (data->flags & ff_genericize)
> > > > > > > >     	cp_genericize_target_expr (stmt_p);
> > > > > > > >             /* Folding might replace e.g. a COND_EXPR with a
> > > > > > > > TARGET_EXPR;
> > > > > > > > in
> > > > > > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > > void
> > > > > > > > *data_)
> > > > > > > >     void
> > > > > > > >     cp_fold_function (tree fndecl)
> > > > > > > >     {
> > > > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > > > +  cp_fold_data data (ff_genericize);
> > > > > > > >       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
> > > > > > > > NULL);
> > > > > > > >     }
> > > > > > > >     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > > > >     {
> > > > > > > >       while (true)
> > > > > > > >         {
> > > > > > > > -      x = cp_fold (x);
> > > > > > > > +      x = cp_fold (x, ff_none);
> > > > > > > >           if (rval)
> > > > > > > >     	x = mark_rvalue_use (x);
> > > > > > > >           if (rval && DECL_P (x)
> > > > > > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > > > > > >       if (processing_template_decl)
> > > > > > > >         return x;
> > > > > > > >       x = cp_fully_fold (x);
> > > > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > > > +  cp_fold_data data (ff_none);
> > > > > > > >       cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > > > >       return x;
> > > > > > > >     }
> > > > > > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > > > > > >         Function returns X or its folded variant.  */
> > > > > > > >       static tree
> > > > > > > > -cp_fold (tree x)
> > > > > > > > +cp_fold (tree x, fold_flags flags)
> > > > > > > >     {
> > > > > > > >       tree op0, op1, op2, op3;
> > > > > > > >       tree org_x = x, r = NULL_TREE;
> > > > > > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > > > > > >       if (fold_cache == NULL)
> > > > > > > >         fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > > > >     -  if (tree *cached = fold_cache->get (x))
> > > > > > > > -    return *cached;
> > > > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > > > +
> > > > > > > > +  if (cache_p)
> > > > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > > > +      return *cached;
> > > > > > > >         uid_sensitive_constexpr_evaluation_checker c;
> > > > > > > >     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > > > > > >     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > > > the
> > > > > > > >     	     folding of the operand should be in the caches and if in
> > > > > > > > cp_fold_r
> > > > > > > >     	     it will modify it in place.  */
> > > > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > >     	  if (op0 == error_mark_node)
> > > > > > > >     	    x = error_mark_node;
> > > > > > > >     	  break;
> > > > > > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > > > > > >     	{
> > > > > > > >     	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > > > >     	  if (p != x)
> > > > > > > > -	    return cp_fold (p);
> > > > > > > > +	    return cp_fold (p, flags);
> > > > > > > >     	}
> > > > > > > >           goto unary;
> > > > > > > >     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > > > > > >         case COND_EXPR:
> > > > > > > >           loc = EXPR_LOCATION (x);
> > > > > > > >           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > > >             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > > > >     	{
> > > > > > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > > > > > >     	      {
> > > > > > > >     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > > > >     		  r = build_nop (TREE_TYPE (x), r);
> > > > > > > > -		x = cp_fold (r);
> > > > > > > > +		x = cp_fold (r, flags);
> > > > > > > >     		break;
> > > > > > > >     	      }
> > > > > > > >     	  }
> > > > > > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > > > > > >     	int m = call_expr_nargs (x);
> > > > > > > >     	for (int i = 0; i < m; i++)
> > > > > > > >     	  {
> > > > > > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > > > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > > > > > >     	    if (r != CALL_EXPR_ARG (x, i))
> > > > > > > >     	      {
> > > > > > > >     		if (r == error_mark_node)
> > > > > > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > > > > > >       	if (TREE_CODE (r) != CALL_EXPR)
> > > > > > > >     	  {
> > > > > > > > -	    x = cp_fold (r);
> > > > > > > > +	    x = cp_fold (r, flags);
> > > > > > > >     	    break;
> > > > > > > >     	  }
> > > > > > > >     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > > > > > >     	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > > > > >     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > > > > >     	    && !flag_no_inline)
> > > > > > > > -	  r = maybe_constant_value (x);
> > > > > > > > +	  {
> > > > > > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > > > > > +	    if (flags & ff_genericize)
> > > > > > > > +	      /* At genericization time it's safe to fold
> > > > > > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > > > > > +	      manifestly_const_eval = mce_false;
> > > > > > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > > > > > +				      manifestly_const_eval);
> > > > > > > > +	  }
> > > > > > > >     	optimize = sv;
> > > > > > > >               if (TREE_CODE (r) != CALL_EXPR)
> > > > > > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > > > > > >     	vec<constructor_elt, va_gc> *nelts = NULL;
> > > > > > > >     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > > > > > >     	  {
> > > > > > > > -	    tree op = cp_fold (p->value);
> > > > > > > > +	    tree op = cp_fold (p->value, flags);
> > > > > > > >     	    if (op != p->value)
> > > > > > > >     	      {
> > > > > > > >     		if (op == error_mark_node)
> > > > > > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > > > > > >       	for (int i = 0; i < n; i++)
> > > > > > > >     	  {
> > > > > > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > > > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > > > > > >     	    if (op != TREE_VEC_ELT (x, i))
> > > > > > > >     	      {
> > > > > > > >     		if (!changed)
> > > > > > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > > > > > >         case ARRAY_RANGE_REF:
> > > > > > > >             loc = EXPR_LOCATION (x);
> > > > > > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > > > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > > > > > >             if (op0 != TREE_OPERAND (x, 0)
> > > > > > > >     	  || op1 != TREE_OPERAND (x, 1)
> > > > > > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > > > > > >           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
> > > > > > > > which,
> > > > > > > > after
> > > > > > > >     	 folding, evaluates to an invariant.  In that case no need to
> > > > > > > > wrap
> > > > > > > >     	 this folded tree with a SAVE_EXPR.  */
> > > > > > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > >           if (tree_invariant_p (r))
> > > > > > > >     	x = r;
> > > > > > > >           break;
> > > > > > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > > > > > >           copy_warning (x, org_x);
> > > > > > > >         }
> > > > > > > >     -  if (!c.evaluation_restricted_p ())
> > > > > > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > > > > > >         {
> > > > > > > >           fold_cache->put (org_x, x);
> > > > > > > >           /* Prevent that we try to fold an already folded result
> > > > > > > > again.
> > > > > > > > */
> > > > > > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > new file mode 100644
> > > > > > > > index 00000000000..4c45dbba13c
> > > > > > > > --- /dev/null
> > > > > > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > @@ -0,0 +1,29 @@
> > > > > > > > +// PR c++/108243
> > > > > > > > +// { dg-do compile { target c++11 } }
> > > > > > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > > > > > +
> > > > > > > > +constexpr int foo() {
> > > > > > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > > > > > +}
> > > > > > > > +
> > > > > > > > +#if __cpp_if_consteval
> > > > > > > > +constexpr int bar() {
> > > > > > > > +  if consteval {
> > > > > > > > +    return 5;
> > > > > > > > +  } else {
> > > > > > > > +    return 4;
> > > > > > > > +  }
> > > > > > > > +}
> > > > > > > > +#endif
> > > > > > > > +
> > > > > > > > +int p, q;
> > > > > > > > +
> > > > > > > > +int main() {
> > > > > > > > +  p = foo();
> > > > > > > > +#if __cpp_if_consteval
> > > > > > > > +  q = bar();
> > > > > > > > +#endif
> > > > > > > > +}
> > > > > > > > +
> > > > > > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > > > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > > > > > 
> > > > > > > Let's also test a static initializer that can't be fully
> > > > > > > constant-evaluated.
> > > > > > 
> > > > > > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > > > > > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > > > > > we need to make cp_fold when called from the latter entry point to also
> > > > > > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > > > > > flag has additional effects in cp_fold_r, so it seems we need another
> > > > > > flag that that only affects the manifestly constant-eval stuff; I called
> > > > > > it ff_mce_false.  How does the following look?
> > > > > > 
> > > > > > -- >8 --
> > > > > > 
> > > > > > Subject: [PATCH 2/2] c++: speculative constexpr and
> > > > > > is_constant_evaluated
> > > > > >    [PR108243]
> > > > > > 
> > > > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > expression in question would be later manifestly constant evaluated (in
> > > > > > which case it must be folded to true).
> > > > > > 
> > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > > > > > since at these points we're sure we're done with manifestly constant
> > > > > > evaluation.  To that end we add a flags parameter to cp_fold that
> > > > > > controls whether we pass mce_false or mce_unknown to
> > > > > > maybe_constant_value
> > > > > > when folding a CALL_EXPR.
> > > > > > 
> > > > > > 	PR c++/108243
> > > > > > 	PR c++/97553
> > > > > > 
> > > > > > gcc/cp/ChangeLog:
> > > > > > 
> > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > > > > > 	(cp_fold_function): Likewise.
> > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > 	isn't empty.
> > > > > > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > > > > > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > > > > > 	maybe_constant_value.
> > > > > > 
> > > > > > gcc/testsuite/ChangeLog:
> > > > > > 
> > > > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > > > ---
> > > > > >    gcc/cp/cp-gimplify.cc                         | 88
> > > > > > ++++++++++++-------
> > > > > >    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> > > > > >    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> > > > > >    3 files changed, 104 insertions(+), 30 deletions(-)
> > > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > > > 
> > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > index 9929d29981a..590ed787997 100644
> > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> > > > > >    #include "omp-general.h"
> > > > > >    #include "opts.h"
> > > > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > +
> > > > > > +enum fold_flags {
> > > > > > +  ff_none = 0,
> > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > +  ff_genericize = 1 << 0,
> > > > > > +  /* Whether we're folding late enough that we could assume
> > > > > > +     we're definitely not in a manifestly constant-evaluated
> > > > > > +     context.  */
> > > > > 
> > > > > It's not necessarily a matter of late enough; we could fold sooner and
> > > > > still
> > > > > know that, as in cp_fully_fold_init.  We could do the same at other
> > > > > full-expression points, but we don't because we want to delay folding as
> > > > > much
> > > > > as possible.  So let's say "folding at a point where we know we're..."
> > > > > 
> > > > > > +  ff_mce_false = 1 << 1,
> > > > > > +};
> > > > > > +
> > > > > > +using fold_flags_t = int;
> > > > > > +
> > > > > >    /* Forward declarations.  */
> > > > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > > > >    static tree cp_fold_r (tree *, int *, void *);
> > > > > >    static void cp_genericize_tree (tree*, bool);
> > > > > > -static tree cp_fold (tree);
> > > > > > +static tree cp_fold (tree, fold_flags_t);
> > > > > >      /* Genericize a TRY_BLOCK.  */
> > > > > >    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> > > > > >    struct cp_fold_data
> > > > > >    {
> > > > > >      hash_set<tree> pset;
> > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > -
> > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > +  fold_flags_t flags;
> > > > > > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> > > > > >    };
> > > > > >      static tree
> > > > > > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > > *data_)
> > > > > >          break;
> > > > > >        }
> > > > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > >        if (data->pset.add (stmt))
> > > > > >        {
> > > > > > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > > > invisible
> > > > > >    	 reference transition.  */
> > > > > >        case INIT_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >    	cp_genericize_init_expr (stmt_p);
> > > > > >          break;
> > > > > >          case TARGET_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >    	cp_genericize_target_expr (stmt_p);
> > > > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > > > in
> > > > > > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > > *data_)
> > > > > >    void
> > > > > >    cp_fold_function (tree fndecl)
> > > > > >    {
> > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > +  cp_fold_data data (ff_genericize | ff_mce_false);
> > > > > 
> > > > > Here would be a good place for a comment about passing mce_false because
> > > > > all
> > > > > manifestly-constant-evaluated expressions will have been
> > > > > constant-evaluated
> > > > > already if possible.
> > > > > 
> > > > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > > > >    }
> > > > > >    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > >    {
> > > > > >      while (true)
> > > > > >        {
> > > > > > -      x = cp_fold (x);
> > > > > > +      x = cp_fold (x, ff_none);
> > > > > >          if (rval)
> > > > > >    	x = mark_rvalue_use (x);
> > > > > >          if (rval && DECL_P (x)
> > > > > > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> > > > > >      if (processing_template_decl)
> > > > > >        return x;
> > > > > >      x = cp_fully_fold (x);
> > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > +  cp_fold_data data (ff_mce_false);
> > > > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > >      return x;
> > > > > >    }
> > > > > > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> > > > > >        Function returns X or its folded variant.  */
> > > > > >      static tree
> > > > > > -cp_fold (tree x)
> > > > > > +cp_fold (tree x, fold_flags_t flags)
> > > > > >    {
> > > > > >      tree op0, op1, op2, op3;
> > > > > >      tree org_x = x, r = NULL_TREE;
> > > > > > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> > > > > >      if (fold_cache == NULL)
> > > > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > >    -  if (tree *cached = fold_cache->get (x))
> > > > > > -    return *cached;
> > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > +
> > > > > > +  if (cache_p)
> > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > +      return *cached;
> > > > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > > > >    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> > > > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > the
> > > > > >    	     folding of the operand should be in the caches and if in
> > > > > > cp_fold_r
> > > > > >    	     it will modify it in place.  */
> > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > >    	  if (op0 == error_mark_node)
> > > > > >    	    x = error_mark_node;
> > > > > >    	  break;
> > > > > > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> > > > > >    	{
> > > > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > >    	  if (p != x)
> > > > > > -	    return cp_fold (p);
> > > > > > +	    return cp_fold (p, flags);
> > > > > >    	}
> > > > > >          goto unary;
> > > > > >    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> > > > > >        case COND_EXPR:
> > > > > >          loc = EXPR_LOCATION (x);
> > > > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > >    	{
> > > > > > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> > > > > >    	      {
> > > > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > > > -		x = cp_fold (r);
> > > > > > +		x = cp_fold (r, flags);
> > > > > >    		break;
> > > > > >    	      }
> > > > > >    	  }
> > > > > > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> > > > > >    	  {
> > > > > >    	    switch (DECL_FE_FUNCTION_CODE (callee))
> > > > > >    	      {
> > > > > > -		/* Defer folding __builtin_is_constant_evaluated.  */
> > > > > >    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > > > > > +		/* Defer folding __builtin_is_constant_evaluated unless
> > > > > > +		   we can assume this isn't a manifestly constant-evaluated
> > > > > 
> > > > > s/can assume/know/
> > > > > 
> > > > > OK with those comment changes.
> > > > 
> > > > Thanks a lot.  Unfortunately I think the patch has a significant problem
> > > > that only just occurred to me -- disabling the cp_fold cache when the
> > > > flag ff_mce_false is set effectively makes cp_fold_function and
> > > > cp_fully_fold_init quadratic in the size of the expression (since
> > > > cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> > > > disabled will end up fully walking each subtree).  Note that the reason
> > > > we must disable the cache is because cp_fold with ff_mce_false might
> > > > give a different folded result than without that flag if the expression
> > > > contains a suitable CALL_EXPR subexpression.
> > > 
> > > Good point.
> > > 
> > > > One approach to fix this complexity issue would be to parameterize the
> > > > cache according to the flags that were passed to cp_fold, which would
> > > > allow us to keep the cache enabled when ff_mce_false is set.  A downside
> > > > to this approach is that the size of the cp_fold cache would essentially
> > > > double since for each tree we'd now have two cache entries, one for
> > > > flags=ff_none and another for flags=ff_mce_false.
> > > 
> > > We could also clear the cache before cp_fold_function since the two folds
> > > shouldn't overlap (much).
> > 
> > Makes sense, but IIUC we'd also have to clear it before (and after)
> > cp_fully_fold_init too, which unlike cp_fold_function may get called
> > in the middle of a function body.
> 
> Ah sorry, I think I misunderstood your idea.  Clearing the cache between
> cp_fold_function would definitely help with controlling the size of the
> cache, and indeed there shouldn't be much overlap because there isn't
> much sharing of expression trees across function bodies.
> 
> However, I was curious about how big the fold_cache gets in practice,
> and it turns out it doesn't get very big at all since we regularly clear
> the fold_cache via clear_cv_and_fold_caches anyway.  According to my
> experiments it doesn't get larger than about ~10k elements.  So a
> doubling of that is pretty much insignificant.
> 
> So ISTM parameterizing the cache is the way to go.  How does the
> following look?
> 
> -- >8 --
> 
> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>  [PR108243]
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(fold_flags_t): Declare.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(fold_cache): Replace with ...
> 	(fold_caches): ... this 2-element array of caches.
> 	(get_fold_cache): Define.
> 	(clear_fold_cache): Adjust.
> 	(cp_fold): Add flags parameter.  Call get_fold_cache.
> 	<case CALL_EXPR>: If ff_mce_false is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>  gcc/cp/cp-gimplify.cc                         | 103 +++++++++++++-----
>  .../g++.dg/opt/is_constant_evaluated1.C       |  15 +++
>  .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++++
>  3 files changed, 120 insertions(+), 30 deletions(-)
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..01e624bc9de 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>  #include "omp-general.h"
>  #include "opts.h"
>  
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding a point where we know we're
> +     definitely not in a manifestly constant-evaluated
> +     context.  */
> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>  /* Forward declarations.  */
>  
>  static tree cp_genericize_r (tree *, int *, void *);
>  static tree cp_fold_r (tree *, int *, void *);
>  static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>  
>  /* Genericize a TRY_BLOCK.  */
>  
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>  struct cp_fold_data
>  {
>    hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>  };
>  
>  static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>        break;
>      }
>  
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>  
>    if (data->pset.add (stmt))
>      {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  	 here rather than in cp_genericize to avoid problems with the invisible
>  	 reference transition.  */
>      case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_init_expr (stmt_p);
>        break;
>  
>      case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_target_expr (stmt_p);
>  
>        /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  void
>  cp_fold_function (tree fndecl)
>  {
> -  cp_fold_data data (/*genericize*/true);
> +  /* By now all manifestly-constant-evaluated expressions will have
> +     been constant-evaluated already if possible, so we can safely
> +     pass ff_mce_false.  */
> +  cp_fold_data data (ff_genericize | ff_mce_false);
>    cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>  }
>  
> @@ -2391,7 +2407,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>  {
>    while (true)
>      {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>        if (rval)
>  	x = mark_rvalue_use (x);
>        if (rval && DECL_P (x)
> @@ -2450,7 +2466,7 @@ cp_fully_fold_init (tree x)
>    if (processing_template_decl)
>      return x;
>    x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>    cp_walk_tree (&x, cp_fold_r, &data, NULL);
>    return x;
>  }
> @@ -2466,15 +2482,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
>    return cp_fold_maybe_rvalue (x, !lval);
>  }
>  
> -static GTY((deletable)) hash_map<tree, tree> *fold_cache;
> +static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
> +
> +/* Subroutine of cp_fold.  Returns which fold cache to use according
> +   to the given flags.  We need multiple caches since the result of
> +   folding may depend on which flags are used.  */
> +
> +static hash_map<tree, tree> *&
> +get_fold_cache (fold_flags_t flags)
> +{
> +  if (flags & ff_mce_false)
> +    return fold_caches[1];
> +  else
> +    return fold_caches[0];
> +}
>  
>  /* Dispose of the whole FOLD_CACHE.  */
>  
>  void
>  clear_fold_cache (void)
>  {
> -  if (fold_cache != NULL)
> -    fold_cache->empty ();
> +  for (auto& fold_cache : fold_caches)
> +    if (fold_cache != NULL)
> +      fold_cache->empty ();
>  }
>  
>  /*  This function tries to fold an expression X.
> @@ -2485,7 +2515,7 @@ clear_fold_cache (void)
>      Function returns X or its folded variant.  */
>  
>  static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>  {
>    tree op0, op1, op2, op3;
>    tree org_x = x, r = NULL_TREE;
> @@ -2503,6 +2533,7 @@ cp_fold (tree x)
>    if (DECL_P (x) || CONSTANT_CLASS_P (x))
>      return x;
>  
> +  auto& fold_cache = get_fold_cache (flags);
>    if (fold_cache == NULL)
>      fold_cache = hash_map<tree, tree>::create_ggc (101);
>  
> @@ -2542,7 +2573,7 @@ cp_fold (tree x)
>  	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>  	     folding of the operand should be in the caches and if in cp_fold_r
>  	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>  	  if (op0 == error_mark_node)
>  	    x = error_mark_node;
>  	  break;
> @@ -2587,7 +2618,7 @@ cp_fold (tree x)
>  	{
>  	  tree p = maybe_undo_parenthesized_ref (x);
>  	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>  	}
>        goto unary;
>  
> @@ -2779,8 +2810,8 @@ cp_fold (tree x)
>      case COND_EXPR:
>        loc = EXPR_LOCATION (x);
>        op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));

Whoops, we should also propagate the flags through calls to
cp_fold_rvalue and cp_fold_maybe_rvalue from cp_fold.  The below
version fixes this by adding static overloads of these functions that
additionally take and propagate a fold_flags parameter.

-- >8 --

Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(fold_flags_t): Declare.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Add an static overload that takes
	and propagates a fold_flags_t parameter, and define the existing
	public overload in terms of it.
	(cp_fold_rvalue): Likewise.
	(cp_fully_fold_init): Adjust use of cp_fold_data.
	(fold_cache): Replace with ...
	(fold_caches): ... this 2-element array of caches.
	(get_fold_cache): Define.
	(clear_fold_cache): Adjust.
	(cp_fold): Add fold_flags_t parameter.  Call get_fold_cache.
	Pass flags to cp_fold, cp_fold_rvalue and cp_fold_maybe_rvalue.
	<case CALL_EXPR>: If ff_mce_false is set, fold
	__builtin_is_constant_evaluated to false and pass mce_false to
	maybe_constant_value.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 139 ++++++++++++------
 .../g++.dg/opt/is_constant_evaluated1.C       |  15 ++
 .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++
 3 files changed, 144 insertions(+), 42 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..edece6b7a8a 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+  /* Whether we're folding a point where we know we're
+     definitely not in a manifestly constant-evaluated
+     context.  */
+  ff_mce_false = 1 << 1,
+};
+
+using fold_flags_t = int;
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags_t);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1012,9 +1026,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags_t flags;
+  cp_fold_data (fold_flags_t flags): flags (flags) {}
 };
 
 static tree
@@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  /* By now all manifestly-constant-evaluated expressions will have
+     been constant-evaluated already if possible, so we can safely
+     pass ff_mce_false.  */
+  cp_fold_data data (ff_genericize | ff_mce_false);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2386,12 +2402,12 @@ cxx_omp_disregard_value_expr (tree decl, bool shared)
 
 /* Fold expression X which is used as an rvalue if RVAL is true.  */
 
-tree
-cp_fold_maybe_rvalue (tree x, bool rval)
+static tree
+cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, flags);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2409,12 +2425,24 @@ cp_fold_maybe_rvalue (tree x, bool rval)
   return x;
 }
 
+tree
+cp_fold_maybe_rvalue (tree x, bool rval)
+{
+  return cp_fold_maybe_rvalue (x, rval, ff_none);
+}
+
 /* Fold expression X which is used as an rvalue.  */
 
+static tree
+cp_fold_rvalue (tree x, fold_flags_t flags)
+{
+  return cp_fold_maybe_rvalue (x, true, flags);
+}
+
 tree
 cp_fold_rvalue (tree x)
 {
-  return cp_fold_maybe_rvalue (x, true);
+  return cp_fold_rvalue (x, ff_none);
 }
 
 /* Perform folding on expression X.  */
@@ -2450,7 +2478,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_mce_false);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2466,15 +2494,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
   return cp_fold_maybe_rvalue (x, !lval);
 }
 
-static GTY((deletable)) hash_map<tree, tree> *fold_cache;
+static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
+
+/* Subroutine of cp_fold.  Returns which fold cache to use according
+   to the given flags.  We need multiple caches since the result of
+   folding may depend on which flags are used.  */
+
+static hash_map<tree, tree> *&
+get_fold_cache (fold_flags_t flags)
+{
+  if (flags & ff_mce_false)
+    return fold_caches[1];
+  else
+    return fold_caches[0];
+}
 
 /* Dispose of the whole FOLD_CACHE.  */
 
 void
 clear_fold_cache (void)
 {
-  if (fold_cache != NULL)
-    fold_cache->empty ();
+  for (auto& fold_cache : fold_caches)
+    if (fold_cache != NULL)
+      fold_cache->empty ();
 }
 
 /*  This function tries to fold an expression X.
@@ -2485,7 +2527,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags_t flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2503,6 +2545,7 @@ cp_fold (tree x)
   if (DECL_P (x) || CONSTANT_CLASS_P (x))
     return x;
 
+  auto& fold_cache = get_fold_cache (flags);
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
@@ -2517,7 +2560,7 @@ cp_fold (tree x)
     case CLEANUP_POINT_EXPR:
       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
 	 effects.  */
-      r = cp_fold_rvalue (TREE_OPERAND (x, 0));
+      r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
       if (!TREE_SIDE_EFFECTS (r))
 	x = r;
       break;
@@ -2542,14 +2585,14 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
 	}
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
 
       if (code == CONVERT_EXPR
 	  && SCALAR_TYPE_P (TREE_TYPE (x))
@@ -2577,7 +2620,7 @@ cp_fold (tree x)
       break;
 
     case EXCESS_PRECISION_EXPR:
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
       x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
       break;
 
@@ -2587,13 +2630,13 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
     case ADDR_EXPR:
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
 
       /* Cope with user tricks that amount to offsetof.  */
       if (op0 != error_mark_node
@@ -2630,7 +2673,7 @@ cp_fold (tree x)
     unary:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
 
     finish_unary:
       if (op0 != TREE_OPERAND (x, 0))
@@ -2657,7 +2700,7 @@ cp_fold (tree x)
       break;
 
     case UNARY_PLUS_EXPR:
-      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
+      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
       if (op0 == error_mark_node)
 	x = error_mark_node;
       else
@@ -2711,8 +2754,8 @@ cp_fold (tree x)
     case RANGE_EXPR: case COMPLEX_EXPR:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
-      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
+      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
 
       /* decltype(nullptr) has only one value, so optimize away all comparisons
 	 with that type right away, keeping them in the IL causes troubles for
@@ -2778,9 +2821,9 @@ cp_fold (tree x)
     case VEC_COND_EXPR:
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2870,7 +2913,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2890,8 +2933,12 @@ cp_fold (tree x)
 	  {
 	    switch (DECL_FE_FUNCTION_CODE (callee))
 	      {
-		/* Defer folding __builtin_is_constant_evaluated.  */
 	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
+		/* Defer folding __builtin_is_constant_evaluated unless
+		   we know this isn't a manifestly constant-evaluated
+		   context.  */
+		if (flags & ff_mce_false)
+		  x = boolean_false_node;
 		break;
 	      case CP_BUILT_IN_SOURCE_LOCATION:
 		x = fold_builtin_source_location (x);
@@ -2924,7 +2971,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2947,7 +2994,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2960,7 +3007,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_mce_false)
+	      /* Allow folding __builtin_is_constant_evaluated to false during
+		 constexpr evaluation of this call.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2987,7 +3042,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3018,7 +3073,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3035,10 +3090,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3066,7 +3121,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..983410b9e83
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,15 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+A* f(int n) {
+  static A a = {n, __builtin_is_constant_evaluated()};
+  return &a;
+}
+
+// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  
Jason Merrill Feb. 14, 2023, 11:02 p.m. UTC | #11
On 2/10/23 08:51, Patrick Palka wrote:
> On Fri, 10 Feb 2023, Patrick Palka wrote:
> 
>> On Thu, 9 Feb 2023, Patrick Palka wrote:
>>
>>> On Thu, 9 Feb 2023, Jason Merrill wrote:
>>>
>>>> On 2/9/23 09:36, Patrick Palka wrote:
>>>>> On Sun, 5 Feb 2023, Jason Merrill wrote:
>>>>>
>>>>>> On 2/3/23 15:51, Patrick Palka wrote:
>>>>>>> On Mon, 30 Jan 2023, Jason Merrill wrote:
>>>>>>>
>>>>>>>> On 1/27/23 17:02, Patrick Palka wrote:
>>>>>>>>> This PR illustrates that __builtin_is_constant_evaluated currently
>>>>>>>>> acts
>>>>>>>>> as an optimization barrier for our speculative constexpr evaluation,
>>>>>>>>> since we don't want to prematurely fold the builtin to false if the
>>>>>>>>> expression in question would be later manifestly constant evaluated
>>>>>>>>> (in
>>>>>>>>> which case it must be folded to true).
>>>>>>>>>
>>>>>>>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>>>>>>>> to get folded as false during cp_fold_function, since at that point
>>>>>>>>> we're sure we're doing manifestly constant evaluation.  To that end
>>>>>>>>> we add a flags parameter to cp_fold that controls what mce_value the
>>>>>>>>> CALL_EXPR case passes to maybe_constant_value.
>>>>>>>>>
>>>>>>>>> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
>>>>>>>>> for
>>>>>>>>> trunk?
>>>>>>>>>
>>>>>>>>> 	PR c++/108243
>>>>>>>>>
>>>>>>>>> gcc/cp/ChangeLog:
>>>>>>>>>
>>>>>>>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>>>>>>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>>>>>>>> 	(cp_fold_data::fold_flags): ... this.
>>>>>>>>> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
>>>>>>>>> 	(cp_fold_function): Likewise.
>>>>>>>>> 	(cp_fold_maybe_rvalue): Likewise.
>>>>>>>>> 	(cp_fully_fold_init): Likewise.
>>>>>>>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>>>>>>>> 	isn't empty.
>>>>>>>>> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
>>>>>>>>> 	if if ff_genericize is set.
>>>>>>>>>
>>>>>>>>> gcc/testsuite/ChangeLog:
>>>>>>>>>
>>>>>>>>> 	* g++.dg/opt/pr108243.C: New test.
>>>>>>>>> ---
>>>>>>>>>      gcc/cp/cp-gimplify.cc               | 76
>>>>>>>>> ++++++++++++++++++-----------
>>>>>>>>>      gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>>>>>>>>>      2 files changed, 76 insertions(+), 29 deletions(-)
>>>>>>>>>      create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>>
>>>>>>>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>>>>>>>> index a35cedd05cc..d023a63768f 100644
>>>>>>>>> --- a/gcc/cp/cp-gimplify.cc
>>>>>>>>> +++ b/gcc/cp/cp-gimplify.cc
>>>>>>>>> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
>>>>>>>>> see
>>>>>>>>>      #include "omp-general.h"
>>>>>>>>>      #include "opts.h"
>>>>>>>>>      +/* Flags for cp_fold and cp_fold_r.  */
>>>>>>>>> +
>>>>>>>>> +enum fold_flags {
>>>>>>>>> +  ff_none = 0,
>>>>>>>>> +  /* Whether we're being called from cp_fold_function.  */
>>>>>>>>> +  ff_genericize = 1 << 0,
>>>>>>>>> +};
>>>>>>>>> +
>>>>>>>>>      /* Forward declarations.  */
>>>>>>>>>        static tree cp_genericize_r (tree *, int *, void *);
>>>>>>>>>      static tree cp_fold_r (tree *, int *, void *);
>>>>>>>>>      static void cp_genericize_tree (tree*, bool);
>>>>>>>>> -static tree cp_fold (tree);
>>>>>>>>> +static tree cp_fold (tree, fold_flags);
>>>>>>>>>        /* Genericize a TRY_BLOCK.  */
>>>>>>>>>      @@ -996,9 +1004,8 @@ struct cp_genericize_data
>>>>>>>>>      struct cp_fold_data
>>>>>>>>>      {
>>>>>>>>>        hash_set<tree> pset;
>>>>>>>>> -  bool genericize; // called from cp_fold_function?
>>>>>>>>> -
>>>>>>>>> -  cp_fold_data (bool g): genericize (g) {}
>>>>>>>>> +  fold_flags flags;
>>>>>>>>> +  cp_fold_data (fold_flags flags): flags (flags) {}
>>>>>>>>>      };
>>>>>>>>>        static tree
>>>>>>>>> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>>>> void
>>>>>>>>> *data_)
>>>>>>>>>            break;
>>>>>>>>>          }
>>>>>>>>>      -  *stmt_p = stmt = cp_fold (*stmt_p);
>>>>>>>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>>>>>>>          if (data->pset.add (stmt))
>>>>>>>>>          {
>>>>>>>>> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>>>> void
>>>>>>>>> *data_)
>>>>>>>>>      	 here rather than in cp_genericize to avoid problems with the
>>>>>>>>> invisible
>>>>>>>>>      	 reference transition.  */
>>>>>>>>>          case INIT_EXPR:
>>>>>>>>> -      if (data->genericize)
>>>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>>>      	cp_genericize_init_expr (stmt_p);
>>>>>>>>>            break;
>>>>>>>>>            case TARGET_EXPR:
>>>>>>>>> -      if (data->genericize)
>>>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>>>      	cp_genericize_target_expr (stmt_p);
>>>>>>>>>              /* Folding might replace e.g. a COND_EXPR with a
>>>>>>>>> TARGET_EXPR;
>>>>>>>>> in
>>>>>>>>> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>>>> void
>>>>>>>>> *data_)
>>>>>>>>>      void
>>>>>>>>>      cp_fold_function (tree fndecl)
>>>>>>>>>      {
>>>>>>>>> -  cp_fold_data data (/*genericize*/true);
>>>>>>>>> +  cp_fold_data data (ff_genericize);
>>>>>>>>>        cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
>>>>>>>>> NULL);
>>>>>>>>>      }
>>>>>>>>>      @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>>>>>>>      {
>>>>>>>>>        while (true)
>>>>>>>>>          {
>>>>>>>>> -      x = cp_fold (x);
>>>>>>>>> +      x = cp_fold (x, ff_none);
>>>>>>>>>            if (rval)
>>>>>>>>>      	x = mark_rvalue_use (x);
>>>>>>>>>            if (rval && DECL_P (x)
>>>>>>>>> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>>>>>>>>>        if (processing_template_decl)
>>>>>>>>>          return x;
>>>>>>>>>        x = cp_fully_fold (x);
>>>>>>>>> -  cp_fold_data data (/*genericize*/false);
>>>>>>>>> +  cp_fold_data data (ff_none);
>>>>>>>>>        cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>>>>>>>        return x;
>>>>>>>>>      }
>>>>>>>>> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>>>>>>>>>          Function returns X or its folded variant.  */
>>>>>>>>>        static tree
>>>>>>>>> -cp_fold (tree x)
>>>>>>>>> +cp_fold (tree x, fold_flags flags)
>>>>>>>>>      {
>>>>>>>>>        tree op0, op1, op2, op3;
>>>>>>>>>        tree org_x = x, r = NULL_TREE;
>>>>>>>>> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>>>>>>>>>        if (fold_cache == NULL)
>>>>>>>>>          fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>>>>>>>      -  if (tree *cached = fold_cache->get (x))
>>>>>>>>> -    return *cached;
>>>>>>>>> +  bool cache_p = (flags == ff_none);
>>>>>>>>> +
>>>>>>>>> +  if (cache_p)
>>>>>>>>> +    if (tree *cached = fold_cache->get (x))
>>>>>>>>> +      return *cached;
>>>>>>>>>          uid_sensitive_constexpr_evaluation_checker c;
>>>>>>>>>      @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>>>>>>>>>      	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
>>>>>>>>> the
>>>>>>>>>      	     folding of the operand should be in the caches and if in
>>>>>>>>> cp_fold_r
>>>>>>>>>      	     it will modify it in place.  */
>>>>>>>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>>>>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>>>      	  if (op0 == error_mark_node)
>>>>>>>>>      	    x = error_mark_node;
>>>>>>>>>      	  break;
>>>>>>>>> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>>>>>>>>>      	{
>>>>>>>>>      	  tree p = maybe_undo_parenthesized_ref (x);
>>>>>>>>>      	  if (p != x)
>>>>>>>>> -	    return cp_fold (p);
>>>>>>>>> +	    return cp_fold (p, flags);
>>>>>>>>>      	}
>>>>>>>>>            goto unary;
>>>>>>>>>      @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>>>>>>>>>          case COND_EXPR:
>>>>>>>>>            loc = EXPR_LOCATION (x);
>>>>>>>>>            op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>>>>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>>>>>              if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>>>>>>>      	{
>>>>>>>>> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>>>>>>>>>      	      {
>>>>>>>>>      		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>>>>>>>      		  r = build_nop (TREE_TYPE (x), r);
>>>>>>>>> -		x = cp_fold (r);
>>>>>>>>> +		x = cp_fold (r, flags);
>>>>>>>>>      		break;
>>>>>>>>>      	      }
>>>>>>>>>      	  }
>>>>>>>>> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>>>>>>>>>      	int m = call_expr_nargs (x);
>>>>>>>>>      	for (int i = 0; i < m; i++)
>>>>>>>>>      	  {
>>>>>>>>> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
>>>>>>>>> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>>>>>>>>>      	    if (r != CALL_EXPR_ARG (x, i))
>>>>>>>>>      	      {
>>>>>>>>>      		if (r == error_mark_node)
>>>>>>>>> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>>>>>>>>>        	if (TREE_CODE (r) != CALL_EXPR)
>>>>>>>>>      	  {
>>>>>>>>> -	    x = cp_fold (r);
>>>>>>>>> +	    x = cp_fold (r, flags);
>>>>>>>>>      	    break;
>>>>>>>>>      	  }
>>>>>>>>>      @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>>>>>>>>>      	   constant, but the call followed by an INDIRECT_REF is.  */
>>>>>>>>>      	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>>>>>>>>>      	    && !flag_no_inline)
>>>>>>>>> -	  r = maybe_constant_value (x);
>>>>>>>>> +	  {
>>>>>>>>> +	    mce_value manifestly_const_eval = mce_unknown;
>>>>>>>>> +	    if (flags & ff_genericize)
>>>>>>>>> +	      /* At genericization time it's safe to fold
>>>>>>>>> +		 __builtin_is_constant_evaluated to false.  */
>>>>>>>>> +	      manifestly_const_eval = mce_false;
>>>>>>>>> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
>>>>>>>>> +				      manifestly_const_eval);
>>>>>>>>> +	  }
>>>>>>>>>      	optimize = sv;
>>>>>>>>>                if (TREE_CODE (r) != CALL_EXPR)
>>>>>>>>> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>>>>>>>>>      	vec<constructor_elt, va_gc> *nelts = NULL;
>>>>>>>>>      	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>>>>>>>>>      	  {
>>>>>>>>> -	    tree op = cp_fold (p->value);
>>>>>>>>> +	    tree op = cp_fold (p->value, flags);
>>>>>>>>>      	    if (op != p->value)
>>>>>>>>>      	      {
>>>>>>>>>      		if (op == error_mark_node)
>>>>>>>>> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>>>>>>>>>        	for (int i = 0; i < n; i++)
>>>>>>>>>      	  {
>>>>>>>>> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
>>>>>>>>> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>>>>>>>>>      	    if (op != TREE_VEC_ELT (x, i))
>>>>>>>>>      	      {
>>>>>>>>>      		if (!changed)
>>>>>>>>> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>>>>>>>>>          case ARRAY_RANGE_REF:
>>>>>>>>>              loc = EXPR_LOCATION (x);
>>>>>>>>> -      op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>>>>>> -      op3 = cp_fold (TREE_OPERAND (x, 3));
>>>>>>>>> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>>>>> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>>>>>>>>>              if (op0 != TREE_OPERAND (x, 0)
>>>>>>>>>      	  || op1 != TREE_OPERAND (x, 1)
>>>>>>>>> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>>>>>>>>>            /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
>>>>>>>>> which,
>>>>>>>>> after
>>>>>>>>>      	 folding, evaluates to an invariant.  In that case no need to
>>>>>>>>> wrap
>>>>>>>>>      	 this folded tree with a SAVE_EXPR.  */
>>>>>>>>> -      r = cp_fold (TREE_OPERAND (x, 0));
>>>>>>>>> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>>>            if (tree_invariant_p (r))
>>>>>>>>>      	x = r;
>>>>>>>>>            break;
>>>>>>>>> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>>>>>>>>>            copy_warning (x, org_x);
>>>>>>>>>          }
>>>>>>>>>      -  if (!c.evaluation_restricted_p ())
>>>>>>>>> +  if (cache_p && !c.evaluation_restricted_p ())
>>>>>>>>>          {
>>>>>>>>>            fold_cache->put (org_x, x);
>>>>>>>>>            /* Prevent that we try to fold an already folded result
>>>>>>>>> again.
>>>>>>>>> */
>>>>>>>>> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>> b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>> new file mode 100644
>>>>>>>>> index 00000000000..4c45dbba13c
>>>>>>>>> --- /dev/null
>>>>>>>>> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>> @@ -0,0 +1,29 @@
>>>>>>>>> +// PR c++/108243
>>>>>>>>> +// { dg-do compile { target c++11 } }
>>>>>>>>> +// { dg-additional-options "-O -fdump-tree-original" }
>>>>>>>>> +
>>>>>>>>> +constexpr int foo() {
>>>>>>>>> +  return __builtin_is_constant_evaluated() + 1;
>>>>>>>>> +}
>>>>>>>>> +
>>>>>>>>> +#if __cpp_if_consteval
>>>>>>>>> +constexpr int bar() {
>>>>>>>>> +  if consteval {
>>>>>>>>> +    return 5;
>>>>>>>>> +  } else {
>>>>>>>>> +    return 4;
>>>>>>>>> +  }
>>>>>>>>> +}
>>>>>>>>> +#endif
>>>>>>>>> +
>>>>>>>>> +int p, q;
>>>>>>>>> +
>>>>>>>>> +int main() {
>>>>>>>>> +  p = foo();
>>>>>>>>> +#if __cpp_if_consteval
>>>>>>>>> +  q = bar();
>>>>>>>>> +#endif
>>>>>>>>> +}
>>>>>>>>> +
>>>>>>>>> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
>>>>>>>>> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
>>>>>>>>
>>>>>>>> Let's also test a static initializer that can't be fully
>>>>>>>> constant-evaluated.
>>>>>>>
>>>>>>> D'oh, doing so revealed that cp_fold_function doesn't reach static
>>>>>>> initializers; that's taken care of by cp_fully_fold_init.  So it seems
>>>>>>> we need to make cp_fold when called from the latter entry point to also
>>>>>>> assume m_c_e is false.  We can't re-use ff_genericize here because that
>>>>>>> flag has additional effects in cp_fold_r, so it seems we need another
>>>>>>> flag that that only affects the manifestly constant-eval stuff; I called
>>>>>>> it ff_mce_false.  How does the following look?
>>>>>>>
>>>>>>> -- >8 --
>>>>>>>
>>>>>>> Subject: [PATCH 2/2] c++: speculative constexpr and
>>>>>>> is_constant_evaluated
>>>>>>>     [PR108243]
>>>>>>>
>>>>>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>>>>>> as an optimization barrier for our speculative constexpr evaluation,
>>>>>>> since we don't want to prematurely fold the builtin to false if the
>>>>>>> expression in question would be later manifestly constant evaluated (in
>>>>>>> which case it must be folded to true).
>>>>>>>
>>>>>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>>>>>> to get folded as false during cp_fold_function and cp_fully_fold_init,
>>>>>>> since at these points we're sure we're done with manifestly constant
>>>>>>> evaluation.  To that end we add a flags parameter to cp_fold that
>>>>>>> controls whether we pass mce_false or mce_unknown to
>>>>>>> maybe_constant_value
>>>>>>> when folding a CALL_EXPR.
>>>>>>>
>>>>>>> 	PR c++/108243
>>>>>>> 	PR c++/97553
>>>>>>>
>>>>>>> gcc/cp/ChangeLog:
>>>>>>>
>>>>>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>>>>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>>>>>> 	(cp_fold_data::fold_flags): ... this.
>>>>>>> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
>>>>>>> 	(cp_fold_function): Likewise.
>>>>>>> 	(cp_fold_maybe_rvalue): Likewise.
>>>>>>> 	(cp_fully_fold_init): Likewise.
>>>>>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>>>>>> 	isn't empty.
>>>>>>> 	<case CALL_EXPR>: If ff_genericize is set, fold
>>>>>>> 	__builtin_is_constant_evaluated to false and pass mce_false to
>>>>>>> 	maybe_constant_value.
>>>>>>>
>>>>>>> gcc/testsuite/ChangeLog:
>>>>>>>
>>>>>>> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
>>>>>>> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
>>>>>>> ---
>>>>>>>     gcc/cp/cp-gimplify.cc                         | 88
>>>>>>> ++++++++++++-------
>>>>>>>     .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>>>>>>>     .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>>>>>>>     3 files changed, 104 insertions(+), 30 deletions(-)
>>>>>>>     create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>>>>>>>     create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
>>>>>>>
>>>>>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>>>>>> index 9929d29981a..590ed787997 100644
>>>>>>> --- a/gcc/cp/cp-gimplify.cc
>>>>>>> +++ b/gcc/cp/cp-gimplify.cc
>>>>>>> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>>>>>>>     #include "omp-general.h"
>>>>>>>     #include "opts.h"
>>>>>>>     +/* Flags for cp_fold and cp_fold_r.  */
>>>>>>> +
>>>>>>> +enum fold_flags {
>>>>>>> +  ff_none = 0,
>>>>>>> +  /* Whether we're being called from cp_fold_function.  */
>>>>>>> +  ff_genericize = 1 << 0,
>>>>>>> +  /* Whether we're folding late enough that we could assume
>>>>>>> +     we're definitely not in a manifestly constant-evaluated
>>>>>>> +     context.  */
>>>>>>
>>>>>> It's not necessarily a matter of late enough; we could fold sooner and
>>>>>> still
>>>>>> know that, as in cp_fully_fold_init.  We could do the same at other
>>>>>> full-expression points, but we don't because we want to delay folding as
>>>>>> much
>>>>>> as possible.  So let's say "folding at a point where we know we're..."
>>>>>>
>>>>>>> +  ff_mce_false = 1 << 1,
>>>>>>> +};
>>>>>>> +
>>>>>>> +using fold_flags_t = int;
>>>>>>> +
>>>>>>>     /* Forward declarations.  */
>>>>>>>       static tree cp_genericize_r (tree *, int *, void *);
>>>>>>>     static tree cp_fold_r (tree *, int *, void *);
>>>>>>>     static void cp_genericize_tree (tree*, bool);
>>>>>>> -static tree cp_fold (tree);
>>>>>>> +static tree cp_fold (tree, fold_flags_t);
>>>>>>>       /* Genericize a TRY_BLOCK.  */
>>>>>>>     @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>>>>>>>     struct cp_fold_data
>>>>>>>     {
>>>>>>>       hash_set<tree> pset;
>>>>>>> -  bool genericize; // called from cp_fold_function?
>>>>>>> -
>>>>>>> -  cp_fold_data (bool g): genericize (g) {}
>>>>>>> +  fold_flags_t flags;
>>>>>>> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>>>>>>>     };
>>>>>>>       static tree
>>>>>>> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>>>> *data_)
>>>>>>>           break;
>>>>>>>         }
>>>>>>>     -  *stmt_p = stmt = cp_fold (*stmt_p);
>>>>>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>>>>>         if (data->pset.add (stmt))
>>>>>>>         {
>>>>>>> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>> void
>>>>>>> *data_)
>>>>>>>     	 here rather than in cp_genericize to avoid problems with the
>>>>>>> invisible
>>>>>>>     	 reference transition.  */
>>>>>>>         case INIT_EXPR:
>>>>>>> -      if (data->genericize)
>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>     	cp_genericize_init_expr (stmt_p);
>>>>>>>           break;
>>>>>>>           case TARGET_EXPR:
>>>>>>> -      if (data->genericize)
>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>     	cp_genericize_target_expr (stmt_p);
>>>>>>>             /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
>>>>>>> in
>>>>>>> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>>>> *data_)
>>>>>>>     void
>>>>>>>     cp_fold_function (tree fndecl)
>>>>>>>     {
>>>>>>> -  cp_fold_data data (/*genericize*/true);
>>>>>>> +  cp_fold_data data (ff_genericize | ff_mce_false);
>>>>>>
>>>>>> Here would be a good place for a comment about passing mce_false because
>>>>>> all
>>>>>> manifestly-constant-evaluated expressions will have been
>>>>>> constant-evaluated
>>>>>> already if possible.
>>>>>>
>>>>>>>       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>>>>>     }
>>>>>>>     @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>>>>>     {
>>>>>>>       while (true)
>>>>>>>         {
>>>>>>> -      x = cp_fold (x);
>>>>>>> +      x = cp_fold (x, ff_none);
>>>>>>>           if (rval)
>>>>>>>     	x = mark_rvalue_use (x);
>>>>>>>           if (rval && DECL_P (x)
>>>>>>> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>>>>>>>       if (processing_template_decl)
>>>>>>>         return x;
>>>>>>>       x = cp_fully_fold (x);
>>>>>>> -  cp_fold_data data (/*genericize*/false);
>>>>>>> +  cp_fold_data data (ff_mce_false);
>>>>>>>       cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>>>>>       return x;
>>>>>>>     }
>>>>>>> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>>>>>>>         Function returns X or its folded variant.  */
>>>>>>>       static tree
>>>>>>> -cp_fold (tree x)
>>>>>>> +cp_fold (tree x, fold_flags_t flags)
>>>>>>>     {
>>>>>>>       tree op0, op1, op2, op3;
>>>>>>>       tree org_x = x, r = NULL_TREE;
>>>>>>> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>>>>>>>       if (fold_cache == NULL)
>>>>>>>         fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>>>>>     -  if (tree *cached = fold_cache->get (x))
>>>>>>> -    return *cached;
>>>>>>> +  bool cache_p = (flags == ff_none);
>>>>>>> +
>>>>>>> +  if (cache_p)
>>>>>>> +    if (tree *cached = fold_cache->get (x))
>>>>>>> +      return *cached;
>>>>>>>         uid_sensitive_constexpr_evaluation_checker c;
>>>>>>>     @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>>>>>>>     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
>>>>>>> the
>>>>>>>     	     folding of the operand should be in the caches and if in
>>>>>>> cp_fold_r
>>>>>>>     	     it will modify it in place.  */
>>>>>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>     	  if (op0 == error_mark_node)
>>>>>>>     	    x = error_mark_node;
>>>>>>>     	  break;
>>>>>>> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>>>>>>>     	{
>>>>>>>     	  tree p = maybe_undo_parenthesized_ref (x);
>>>>>>>     	  if (p != x)
>>>>>>> -	    return cp_fold (p);
>>>>>>> +	    return cp_fold (p, flags);
>>>>>>>     	}
>>>>>>>           goto unary;
>>>>>>>     @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>>>>>>>         case COND_EXPR:
>>>>>>>           loc = EXPR_LOCATION (x);
>>>>>>>           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>>>             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>>>>>     	{
>>>>>>> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>>>>>>>     	      {
>>>>>>>     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>>>>>     		  r = build_nop (TREE_TYPE (x), r);
>>>>>>> -		x = cp_fold (r);
>>>>>>> +		x = cp_fold (r, flags);
>>>>>>>     		break;
>>>>>>>     	      }
>>>>>>>     	  }
>>>>>>> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>>>>>>>     	  {
>>>>>>>     	    switch (DECL_FE_FUNCTION_CODE (callee))
>>>>>>>     	      {
>>>>>>> -		/* Defer folding __builtin_is_constant_evaluated.  */
>>>>>>>     	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
>>>>>>> +		/* Defer folding __builtin_is_constant_evaluated unless
>>>>>>> +		   we can assume this isn't a manifestly constant-evaluated
>>>>>>
>>>>>> s/can assume/know/
>>>>>>
>>>>>> OK with those comment changes.
>>>>>
>>>>> Thanks a lot.  Unfortunately I think the patch has a significant problem
>>>>> that only just occurred to me -- disabling the cp_fold cache when the
>>>>> flag ff_mce_false is set effectively makes cp_fold_function and
>>>>> cp_fully_fold_init quadratic in the size of the expression (since
>>>>> cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
>>>>> disabled will end up fully walking each subtree).  Note that the reason
>>>>> we must disable the cache is because cp_fold with ff_mce_false might
>>>>> give a different folded result than without that flag if the expression
>>>>> contains a suitable CALL_EXPR subexpression.
>>>>
>>>> Good point.
>>>>
>>>>> One approach to fix this complexity issue would be to parameterize the
>>>>> cache according to the flags that were passed to cp_fold, which would
>>>>> allow us to keep the cache enabled when ff_mce_false is set.  A downside
>>>>> to this approach is that the size of the cp_fold cache would essentially
>>>>> double since for each tree we'd now have two cache entries, one for
>>>>> flags=ff_none and another for flags=ff_mce_false.
>>>>
>>>> We could also clear the cache before cp_fold_function since the two folds
>>>> shouldn't overlap (much).
>>>
>>> Makes sense, but IIUC we'd also have to clear it before (and after)
>>> cp_fully_fold_init too, which unlike cp_fold_function may get called
>>> in the middle of a function body.
>>
>> Ah sorry, I think I misunderstood your idea.  Clearing the cache between
>> cp_fold_function would definitely help with controlling the size of the
>> cache, and indeed there shouldn't be much overlap because there isn't
>> much sharing of expression trees across function bodies.
>>
>> However, I was curious about how big the fold_cache gets in practice,
>> and it turns out it doesn't get very big at all since we regularly clear
>> the fold_cache via clear_cv_and_fold_caches anyway.  According to my
>> experiments it doesn't get larger than about ~10k elements.  So a
>> doubling of that is pretty much insignificant.
>>
>> So ISTM parameterizing the cache is the way to go.  How does the
>> following look?
>>
>> -- >8 --
>>
>> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>>   [PR108243]
>>
>> 	PR c++/108243
>> 	PR c++/97553
>>
>> gcc/cp/ChangeLog:
>>
>> 	* cp-gimplify.cc (enum fold_flags): Define.
>> 	(fold_flags_t): Declare.
>> 	(cp_fold_data::genericize): Replace this data member with ...
>> 	(cp_fold_data::fold_flags): ... this.
>> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
>> 	(cp_fold_function): Likewise.
>> 	(cp_fold_maybe_rvalue): Likewise.
>> 	(cp_fully_fold_init): Likewise.
>> 	(fold_cache): Replace with ...
>> 	(fold_caches): ... this 2-element array of caches.
>> 	(get_fold_cache): Define.
>> 	(clear_fold_cache): Adjust.
>> 	(cp_fold): Add flags parameter.  Call get_fold_cache.
>> 	<case CALL_EXPR>: If ff_mce_false is set, fold
>> 	__builtin_is_constant_evaluated to false and pass mce_false to
>> 	maybe_constant_value.
>>
>> gcc/testsuite/ChangeLog:
>>
>> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
>> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
>> ---
>>   gcc/cp/cp-gimplify.cc                         | 103 +++++++++++++-----
>>   .../g++.dg/opt/is_constant_evaluated1.C       |  15 +++
>>   .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++++
>>   3 files changed, 120 insertions(+), 30 deletions(-)
>>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
>>
>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>> index 9929d29981a..01e624bc9de 100644
>> --- a/gcc/cp/cp-gimplify.cc
>> +++ b/gcc/cp/cp-gimplify.cc
>> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>>   #include "omp-general.h"
>>   #include "opts.h"
>>   
>> +/* Flags for cp_fold and cp_fold_r.  */
>> +
>> +enum fold_flags {
>> +  ff_none = 0,
>> +  /* Whether we're being called from cp_fold_function.  */
>> +  ff_genericize = 1 << 0,
>> +  /* Whether we're folding a point where we know we're
>> +     definitely not in a manifestly constant-evaluated
>> +     context.  */
>> +  ff_mce_false = 1 << 1,
>> +};
>> +
>> +using fold_flags_t = int;
>> +
>>   /* Forward declarations.  */
>>   
>>   static tree cp_genericize_r (tree *, int *, void *);
>>   static tree cp_fold_r (tree *, int *, void *);
>>   static void cp_genericize_tree (tree*, bool);
>> -static tree cp_fold (tree);
>> +static tree cp_fold (tree, fold_flags_t);
>>   
>>   /* Genericize a TRY_BLOCK.  */
>>   
>> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>>   struct cp_fold_data
>>   {
>>     hash_set<tree> pset;
>> -  bool genericize; // called from cp_fold_function?
>> -
>> -  cp_fold_data (bool g): genericize (g) {}
>> +  fold_flags_t flags;
>> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>>   };
>>   
>>   static tree
>> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>>         break;
>>       }
>>   
>> -  *stmt_p = stmt = cp_fold (*stmt_p);
>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>   
>>     if (data->pset.add (stmt))
>>       {
>> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>>   	 here rather than in cp_genericize to avoid problems with the invisible
>>   	 reference transition.  */
>>       case INIT_EXPR:
>> -      if (data->genericize)
>> +      if (data->flags & ff_genericize)
>>   	cp_genericize_init_expr (stmt_p);
>>         break;
>>   
>>       case TARGET_EXPR:
>> -      if (data->genericize)
>> +      if (data->flags & ff_genericize)
>>   	cp_genericize_target_expr (stmt_p);
>>   
>>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
>> @@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>>   void
>>   cp_fold_function (tree fndecl)
>>   {
>> -  cp_fold_data data (/*genericize*/true);
>> +  /* By now all manifestly-constant-evaluated expressions will have
>> +     been constant-evaluated already if possible, so we can safely
>> +     pass ff_mce_false.  */
>> +  cp_fold_data data (ff_genericize | ff_mce_false);
>>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>   }
>>   
>> @@ -2391,7 +2407,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>   {
>>     while (true)
>>       {
>> -      x = cp_fold (x);
>> +      x = cp_fold (x, ff_none);
>>         if (rval)
>>   	x = mark_rvalue_use (x);
>>         if (rval && DECL_P (x)
>> @@ -2450,7 +2466,7 @@ cp_fully_fold_init (tree x)
>>     if (processing_template_decl)
>>       return x;
>>     x = cp_fully_fold (x);
>> -  cp_fold_data data (/*genericize*/false);
>> +  cp_fold_data data (ff_mce_false);
>>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>     return x;
>>   }
>> @@ -2466,15 +2482,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
>>     return cp_fold_maybe_rvalue (x, !lval);
>>   }
>>   
>> -static GTY((deletable)) hash_map<tree, tree> *fold_cache;
>> +static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
>> +
>> +/* Subroutine of cp_fold.  Returns which fold cache to use according
>> +   to the given flags.  We need multiple caches since the result of
>> +   folding may depend on which flags are used.  */
>> +
>> +static hash_map<tree, tree> *&
>> +get_fold_cache (fold_flags_t flags)
>> +{
>> +  if (flags & ff_mce_false)
>> +    return fold_caches[1];
>> +  else
>> +    return fold_caches[0];
>> +}
>>   
>>   /* Dispose of the whole FOLD_CACHE.  */
>>   
>>   void
>>   clear_fold_cache (void)
>>   {
>> -  if (fold_cache != NULL)
>> -    fold_cache->empty ();
>> +  for (auto& fold_cache : fold_caches)
>> +    if (fold_cache != NULL)
>> +      fold_cache->empty ();
>>   }
>>   
>>   /*  This function tries to fold an expression X.
>> @@ -2485,7 +2515,7 @@ clear_fold_cache (void)
>>       Function returns X or its folded variant.  */
>>   
>>   static tree
>> -cp_fold (tree x)
>> +cp_fold (tree x, fold_flags_t flags)
>>   {
>>     tree op0, op1, op2, op3;
>>     tree org_x = x, r = NULL_TREE;
>> @@ -2503,6 +2533,7 @@ cp_fold (tree x)
>>     if (DECL_P (x) || CONSTANT_CLASS_P (x))
>>       return x;
>>   
>> +  auto& fold_cache = get_fold_cache (flags);
>>     if (fold_cache == NULL)
>>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>>   
>> @@ -2542,7 +2573,7 @@ cp_fold (tree x)
>>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>>   	     folding of the operand should be in the caches and if in cp_fold_r
>>   	     it will modify it in place.  */
>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>   	  if (op0 == error_mark_node)
>>   	    x = error_mark_node;
>>   	  break;
>> @@ -2587,7 +2618,7 @@ cp_fold (tree x)
>>   	{
>>   	  tree p = maybe_undo_parenthesized_ref (x);
>>   	  if (p != x)
>> -	    return cp_fold (p);
>> +	    return cp_fold (p, flags);
>>   	}
>>         goto unary;
>>   
>> @@ -2779,8 +2810,8 @@ cp_fold (tree x)
>>       case COND_EXPR:
>>         loc = EXPR_LOCATION (x);
>>         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> 
> Whoops, we should also propagate the flags through calls to
> cp_fold_rvalue and cp_fold_maybe_rvalue from cp_fold.  The below
> version fixes this by adding static overloads of these functions that
> additionally take and propagate a fold_flags parameter.

OK.

> -- >8 --
> 
> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>   [PR108243]
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(fold_flags_t): Declare.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Add an static overload that takes
> 	and propagates a fold_flags_t parameter, and define the existing
> 	public overload in terms of it.
> 	(cp_fold_rvalue): Likewise.
> 	(cp_fully_fold_init): Adjust use of cp_fold_data.
> 	(fold_cache): Replace with ...
> 	(fold_caches): ... this 2-element array of caches.
> 	(get_fold_cache): Define.
> 	(clear_fold_cache): Adjust.
> 	(cp_fold): Add fold_flags_t parameter.  Call get_fold_cache.
> 	Pass flags to cp_fold, cp_fold_rvalue and cp_fold_maybe_rvalue.
> 	<case CALL_EXPR>: If ff_mce_false is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc                         | 139 ++++++++++++------
>   .../g++.dg/opt/is_constant_evaluated1.C       |  15 ++
>   .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++
>   3 files changed, 144 insertions(+), 42 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..edece6b7a8a 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>   #include "omp-general.h"
>   #include "opts.h"
>   
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding a point where we know we're
> +     definitely not in a manifestly constant-evaluated
> +     context.  */
> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>   /* Forward declarations.  */
>   
>   static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>   struct cp_fold_data
>   {
>     hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>   };
>   
>   static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>   
>     if (data->pset.add (stmt))
>       {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	 here rather than in cp_genericize to avoid problems with the invisible
>   	 reference transition.  */
>       case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_init_expr (stmt_p);
>         break;
>   
>       case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_target_expr (stmt_p);
>   
>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   void
>   cp_fold_function (tree fndecl)
>   {
> -  cp_fold_data data (/*genericize*/true);
> +  /* By now all manifestly-constant-evaluated expressions will have
> +     been constant-evaluated already if possible, so we can safely
> +     pass ff_mce_false.  */
> +  cp_fold_data data (ff_genericize | ff_mce_false);
>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>   }
>   
> @@ -2386,12 +2402,12 @@ cxx_omp_disregard_value_expr (tree decl, bool shared)
>   
>   /* Fold expression X which is used as an rvalue if RVAL is true.  */
>   
> -tree
> -cp_fold_maybe_rvalue (tree x, bool rval)
> +static tree
> +cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
>   {
>     while (true)
>       {
> -      x = cp_fold (x);
> +      x = cp_fold (x, flags);
>         if (rval)
>   	x = mark_rvalue_use (x);
>         if (rval && DECL_P (x)
> @@ -2409,12 +2425,24 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>     return x;
>   }
>   
> +tree
> +cp_fold_maybe_rvalue (tree x, bool rval)
> +{
> +  return cp_fold_maybe_rvalue (x, rval, ff_none);
> +}
> +
>   /* Fold expression X which is used as an rvalue.  */
>   
> +static tree
> +cp_fold_rvalue (tree x, fold_flags_t flags)
> +{
> +  return cp_fold_maybe_rvalue (x, true, flags);
> +}
> +
>   tree
>   cp_fold_rvalue (tree x)
>   {
> -  return cp_fold_maybe_rvalue (x, true);
> +  return cp_fold_rvalue (x, ff_none);
>   }
>   
>   /* Perform folding on expression X.  */
> @@ -2450,7 +2478,7 @@ cp_fully_fold_init (tree x)
>     if (processing_template_decl)
>       return x;
>     x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>     return x;
>   }
> @@ -2466,15 +2494,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
>     return cp_fold_maybe_rvalue (x, !lval);
>   }
>   
> -static GTY((deletable)) hash_map<tree, tree> *fold_cache;
> +static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
> +
> +/* Subroutine of cp_fold.  Returns which fold cache to use according
> +   to the given flags.  We need multiple caches since the result of
> +   folding may depend on which flags are used.  */
> +
> +static hash_map<tree, tree> *&
> +get_fold_cache (fold_flags_t flags)
> +{
> +  if (flags & ff_mce_false)
> +    return fold_caches[1];
> +  else
> +    return fold_caches[0];
> +}
>   
>   /* Dispose of the whole FOLD_CACHE.  */
>   
>   void
>   clear_fold_cache (void)
>   {
> -  if (fold_cache != NULL)
> -    fold_cache->empty ();
> +  for (auto& fold_cache : fold_caches)
> +    if (fold_cache != NULL)
> +      fold_cache->empty ();
>   }
>   
>   /*  This function tries to fold an expression X.
> @@ -2485,7 +2527,7 @@ clear_fold_cache (void)
>       Function returns X or its folded variant.  */
>   
>   static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>   {
>     tree op0, op1, op2, op3;
>     tree org_x = x, r = NULL_TREE;
> @@ -2503,6 +2545,7 @@ cp_fold (tree x)
>     if (DECL_P (x) || CONSTANT_CLASS_P (x))
>       return x;
>   
> +  auto& fold_cache = get_fold_cache (flags);
>     if (fold_cache == NULL)
>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>   
> @@ -2517,7 +2560,7 @@ cp_fold (tree x)
>       case CLEANUP_POINT_EXPR:
>         /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
>   	 effects.  */
> -      r = cp_fold_rvalue (TREE_OPERAND (x, 0));
> +      r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
>         if (!TREE_SIDE_EFFECTS (r))
>   	x = r;
>         break;
> @@ -2542,14 +2585,14 @@ cp_fold (tree x)
>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>   	     folding of the operand should be in the caches and if in cp_fold_r
>   	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>   	  if (op0 == error_mark_node)
>   	    x = error_mark_node;
>   	  break;
>   	}
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
>   
>         if (code == CONVERT_EXPR
>   	  && SCALAR_TYPE_P (TREE_TYPE (x))
> @@ -2577,7 +2620,7 @@ cp_fold (tree x)
>         break;
>   
>       case EXCESS_PRECISION_EXPR:
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
>         x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
>         break;
>   
> @@ -2587,13 +2630,13 @@ cp_fold (tree x)
>   	{
>   	  tree p = maybe_undo_parenthesized_ref (x);
>   	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>   	}
>         goto unary;
>   
>       case ADDR_EXPR:
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
>   
>         /* Cope with user tricks that amount to offsetof.  */
>         if (op0 != error_mark_node
> @@ -2630,7 +2673,7 @@ cp_fold (tree x)
>       unary:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
>   
>       finish_unary:
>         if (op0 != TREE_OPERAND (x, 0))
> @@ -2657,7 +2700,7 @@ cp_fold (tree x)
>         break;
>   
>       case UNARY_PLUS_EXPR:
> -      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> +      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
>         if (op0 == error_mark_node)
>   	x = error_mark_node;
>         else
> @@ -2711,8 +2754,8 @@ cp_fold (tree x)
>       case RANGE_EXPR: case COMPLEX_EXPR:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> -      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
> +      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
>   
>         /* decltype(nullptr) has only one value, so optimize away all comparisons
>   	 with that type right away, keeping them in the IL causes troubles for
> @@ -2778,9 +2821,9 @@ cp_fold (tree x)
>       case VEC_COND_EXPR:
>       case COND_EXPR:
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>   
>         if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>   	{
> @@ -2870,7 +2913,7 @@ cp_fold (tree x)
>   	      {
>   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>   		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>   		break;
>   	      }
>   	  }
> @@ -2890,8 +2933,12 @@ cp_fold (tree x)
>   	  {
>   	    switch (DECL_FE_FUNCTION_CODE (callee))
>   	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>   	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we know this isn't a manifestly constant-evaluated
> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>   		break;
>   	      case CP_BUILT_IN_SOURCE_LOCATION:
>   		x = fold_builtin_source_location (x);
> @@ -2924,7 +2971,7 @@ cp_fold (tree x)
>   	int m = call_expr_nargs (x);
>   	for (int i = 0; i < m; i++)
>   	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>   	    if (r != CALL_EXPR_ARG (x, i))
>   	      {
>   		if (r == error_mark_node)
> @@ -2947,7 +2994,7 @@ cp_fold (tree x)
>   
>   	if (TREE_CODE (r) != CALL_EXPR)
>   	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>   	    break;
>   	  }
>   
> @@ -2960,7 +3007,15 @@ cp_fold (tree x)
>   	   constant, but the call followed by an INDIRECT_REF is.  */
>   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>   	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3042,7 @@ cp_fold (tree x)
>   	vec<constructor_elt, va_gc> *nelts = NULL;
>   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>   	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>   	    if (op != p->value)
>   	      {
>   		if (op == error_mark_node)
> @@ -3018,7 +3073,7 @@ cp_fold (tree x)
>   
>   	for (int i = 0; i < n; i++)
>   	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>   	    if (op != TREE_VEC_ELT (x, i))
>   	      {
>   		if (!changed)
> @@ -3035,10 +3090,10 @@ cp_fold (tree x)
>       case ARRAY_RANGE_REF:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>   
>         if (op0 != TREE_OPERAND (x, 0)
>   	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3121,7 @@ cp_fold (tree x)
>         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>   	 folding, evaluates to an invariant.  In that case no need to wrap
>   	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>         if (tree_invariant_p (r))
>   	x = r;
>         break;
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..983410b9e83
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,15 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +A* f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +  return &a;
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
  

Patch

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index a35cedd05cc..d023a63768f 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,20 @@  along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+};
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -996,9 +1004,8 @@  struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags flags;
+  cp_fold_data (fold_flags flags): flags (flags) {}
 };
 
 static tree
@@ -1039,7 +1046,7 @@  cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1119,12 +1126,12 @@  cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1157,7 +1164,7 @@  cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  cp_fold_data data (ff_genericize);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2375,7 +2382,7 @@  cp_fold_maybe_rvalue (tree x, bool rval)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, ff_none);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2434,7 +2441,7 @@  cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_none);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2469,7 +2476,7 @@  clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2490,8 +2497,11 @@  cp_fold (tree x)
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
-  if (tree *cached = fold_cache->get (x))
-    return *cached;
+  bool cache_p = (flags == ff_none);
+
+  if (cache_p)
+    if (tree *cached = fold_cache->get (x))
+      return *cached;
 
   uid_sensitive_constexpr_evaluation_checker c;
 
@@ -2526,7 +2536,7 @@  cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
@@ -2571,7 +2581,7 @@  cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
@@ -2763,8 +2773,8 @@  cp_fold (tree x)
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2854,7 +2864,7 @@  cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2908,7 +2918,7 @@  cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2931,7 +2941,7 @@  cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2944,7 +2954,15 @@  cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_genericize)
+	      /* At genericization time it's safe to fold
+		 __builtin_is_constant_evaluated to false.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2971,7 +2989,7 @@  cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3002,7 +3020,7 @@  cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3019,10 +3037,10 @@  cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3050,7 +3068,7 @@  cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
@@ -3069,7 +3087,7 @@  cp_fold (tree x)
       copy_warning (x, org_x);
     }
 
-  if (!c.evaluation_restricted_p ())
+  if (cache_p && !c.evaluation_restricted_p ())
     {
       fold_cache->put (org_x, x);
       /* Prevent that we try to fold an already folded result again.  */
diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C b/gcc/testsuite/g++.dg/opt/pr108243.C
new file mode 100644
index 00000000000..4c45dbba13c
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/pr108243.C
@@ -0,0 +1,29 @@ 
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+// { dg-final { scan-tree-dump-not "= bar" "original" } }