[pushed] aarch64: Commonise some folding code
Checks
Commit Message
Add an aarch64_sve::gimple_folder helper for folding calls
to integer constants. SME will make more use of this.
Tested on aarch64-linux-gnu & pushed.
Richard
gcc/
* config/aarch64/aarch64-sve-builtins.h
(gimple_folder::fold_to_cstu): New member function.
* config/aarch64/aarch64-sve-builtins.cc
(gimple_folder::fold_to_cstu): Define.
* config/aarch64/aarch64-sve-builtins-base.cc
(svcnt_bhwd_impl::fold): Use it.
---
gcc/config/aarch64/aarch64-sve-builtins-base.cc | 9 ++-------
gcc/config/aarch64/aarch64-sve-builtins.cc | 7 +++++++
gcc/config/aarch64/aarch64-sve-builtins.h | 1 +
3 files changed, 10 insertions(+), 7 deletions(-)
@@ -517,9 +517,7 @@ public:
gimple *
fold (gimple_folder &f) const override
{
- tree count = build_int_cstu (TREE_TYPE (f.lhs),
- GET_MODE_NUNITS (m_ref_mode));
- return gimple_build_assign (f.lhs, count);
+ return f.fold_to_cstu (GET_MODE_NUNITS (m_ref_mode));
}
rtx
@@ -553,10 +551,7 @@ public:
unsigned int elements_per_vq = 128 / GET_MODE_UNIT_BITSIZE (m_ref_mode);
HOST_WIDE_INT value = aarch64_fold_sve_cnt_pat (pattern, elements_per_vq);
if (value >= 0)
- {
- tree count = build_int_cstu (TREE_TYPE (f.lhs), value);
- return gimple_build_assign (f.lhs, count);
- }
+ return f.fold_to_cstu (value);
return NULL;
}
@@ -2615,6 +2615,13 @@ gimple_folder::redirect_call (const function_instance &instance)
return call;
}
+/* Fold the call to constant VAL. */
+gimple *
+gimple_folder::fold_to_cstu (poly_uint64 val)
+{
+ return gimple_build_assign (lhs, build_int_cstu (TREE_TYPE (lhs), val));
+}
+
/* Fold the call to a PTRUE, taking the element size from type suffix 0. */
gimple *
gimple_folder::fold_to_ptrue ()
@@ -500,6 +500,7 @@ public:
tree load_store_cookie (tree);
gimple *redirect_call (const function_instance &);
+ gimple *fold_to_cstu (poly_uint64);
gimple *fold_to_pfalse ();
gimple *fold_to_ptrue ();
gimple *fold_to_vl_pred (unsigned int);