commit 7a25d85f91d84e53e707bb36d052f8196e49e147
Author: Stam Markianos-Wright <stam.markianos-wright@arm.com>
Date: Tue Oct 18 17:42:56 2022 +0100
arm: Add define_attr to to create a mapping between MVE predicated and unpredicated insns
I'd like to submit two patches that add support for Arm's MVE
Tail Predicated Low Overhead Loop feature.
--- Introduction ---
The M-class Arm-ARM:
https://developer.arm.com/documentation/ddi0553/bu/?lang=en
Section B5.5.1 "Loop tail predication" describes the feature
we are adding support for with this patch (although
we only add codegen for DLSTP/LETP instruction loops).
Previously with commit d2ed233cb94 we'd added support for
non-MVE DLS/LE loops through the loop-doloop pass, which, given
a standard MVE loop like:
```
void __attribute__ ((noinline)) test (int16_t *a, int16_t *b, int16_t *c, int n)
{
while (n > 0)
{
mve_pred16_t p = vctp16q (n);
int16x8_t va = vldrhq_z_s16 (a, p);
int16x8_t vb = vldrhq_z_s16 (b, p);
int16x8_t vc = vaddq_x_s16 (va, vb, p);
vstrhq_p_s16 (c, vc, p);
c+=8;
a+=8;
b+=8;
n-=8;
}
}
```
.. would output:
```
<pre-calculate the number of iterations and place it into lr>
dls lr, lr
.L3:
vctp.16 r3
vmrs ip, P0 @ movhi
sxth ip, ip
vmsr P0, ip @ movhi
mov r4, r0
vpst
vldrht.16 q2, [r4]
mov r4, r1
vmov q3, q0
vpst
vldrht.16 q1, [r4]
mov r4, r2
vpst
vaddt.i16 q3, q2, q1
subs r3, r3, #8
vpst
vstrht.16 q3, [r4]
adds r0, r0, #16
adds r1, r1, #16
adds r2, r2, #16
le lr, .L3
```
where the LE instruction will decrement LR by 1, compare and
branch if needed.
(there are also other inefficiencies with the above code, like the
pointless vmrs/sxth/vmsr on the VPR and the adds not being merged
into the vldrht/vstrht as a #16 offsets and some random movs!
But that's different problems...)
The MVE version is similar, except that:
* Instead of DLS/LE the instructions are DLSTP/LETP.
* Instead of pre-calculating the number of iterations of the
loop, we place the number of elements to be processed by the
loop into LR.
* Instead of decrementing the LR by one, LETP will decrement it
by FPSCR.LTPSIZE, which is the number of elements being
processed in each iteration: 16 for 8-bit elements, 5 for 16-bit
elements, etc.
* On the final iteration, automatic Loop Tail Predication is
performed, as if the instructions within the loop had been VPT
predicated with a VCTP generating the VPR predicate in every
loop iteration.
The dlstp/letp loop now looks like:
```
<place n into r3>
dlstp.16 lr, r3
.L14:
mov r3, r0
vldrh.16 q3, [r3]
mov r3, r1
vldrh.16 q2, [r3]
mov r3, r2
vadd.i16 q3, q3, q2
adds r0, r0, #16
vstrh.16 q3, [r3]
adds r1, r1, #16
adds r2, r2, #16
letp lr, .L14
```
Since the loop tail predication is automatic, we have eliminated
the VCTP that had been specified by the user in the intrinsic
and converted the VPT-predicated instructions into their
unpredicated equivalents (which also saves us from VPST insns).
The LE instruction here decrements LR by 8 in each iteration.
--- This 1/2 patch ---
This first patch lays some groundwork by adding an attribute to
md patterns, and then the second patch contains the functional
changes.
One major difficulty in implementing MVE Tail-Predicated Low
Overhead Loops was the need to transform VPT-predicated insns
in the insn chain into their unpredicated equivalents, like:
`mve_vldrbq_z_<supf><mode> -> mve_vldrbq_<supf><mode>`.
This requires us to have a deterministic link between two
different patterns in mve.md -- this _could_ be done by
re-ordering the entirety of mve.md such that the patterns are
at some constant icode proximity (e.g. having the _z immediately
after the unpredicated version would mean that to map from the
former to the latter you could use icode-1), but that is a very
messy solution that would lead to complex unknown dependencies
between the ordering of patterns.
This patch proves an alternative way of doing that: using an insn
attribute to encode the icode of the unpredicated instruction.
No regressions on arm-none-eabi with an MVE target.
Thank you,
Stam Markianos-Wright
gcc/ChangeLog:
* config/arm/arm.md (mve_unpredicated_insn): New attribute.
* config/arm/arm.h (MVE_VPT_PREDICATED_INSN_P): New define.
(MVE_VPT_UNPREDICATED_INSN_P): Likewise.
(MVE_VPT_PREDICABLE_INSN_P): Likewise.
* config/arm/vec-common.md (mve_vshlq_<supf><mode>): Add attribute.
* config/arm/mve.md (arm_vcx1q<a>_p_v16qi): Add attribute.
(arm_vcx1q<a>v16qi): Likewise.
(arm_vcx1qav16qi): Likewise.
(arm_vcx1qv16qi): Likewise.
(arm_vcx2q<a>_p_v16qi): Likewise.
(arm_vcx2q<a>v16qi): Likewise.
(arm_vcx2qav16qi): Likewise.
(arm_vcx2qv16qi): Likewise.
(arm_vcx3q<a>_p_v16qi): Likewise.
(arm_vcx3q<a>v16qi): Likewise.
(arm_vcx3qav16qi): Likewise.
(arm_vcx3qv16qi): Likewise.
(mve_vabavq_<supf><mode>): Likewise.
(mve_vabavq_p_<supf><mode>): Likewise.
(mve_vabdq_<supf><mode>): Likewise.
(mve_vabdq_f<mode>): Likewise.
(mve_vabdq_m_<supf><mode>): Likewise.
(mve_vabdq_m_f<mode>): Likewise.
(mve_vabsq_f<mode>): Likewise.
(mve_vabsq_m_f<mode>): Likewise.
(mve_vabsq_m_s<mode>): Likewise.
(mve_vabsq_s<mode>): Likewise.
(mve_vadciq_<supf>v4si): Likewise.
(mve_vadciq_m_<supf>v4si): Likewise.
(mve_vadcq_<supf>v4si): Likewise.
(mve_vadcq_m_<supf>v4si): Likewise.
(mve_vaddlvaq_<supf>v4si): Likewise.
(mve_vaddlvaq_p_<supf>v4si): Likewise.
(mve_vaddlvq_<supf>v4si): Likewise.
(mve_vaddlvq_p_<supf>v4si): Likewise.
(mve_vaddq_f<mode>): Likewise.
(mve_vaddq_m_<supf><mode>): Likewise.
(mve_vaddq_m_f<mode>): Likewise.
(mve_vaddq_m_n_<supf><mode>): Likewise.
(mve_vaddq_m_n_f<mode>): Likewise.
(mve_vaddq_n_<supf><mode>): Likewise.
(mve_vaddq_n_f<mode>): Likewise.
(mve_vaddq<mode>): Likewise.
(mve_vaddvaq_<supf><mode>): Likewise.
(mve_vaddvaq_p_<supf><mode>): Likewise.
(mve_vaddvq_<supf><mode>): Likewise.
(mve_vaddvq_p_<supf><mode>): Likewise.
(mve_vandq_<supf><mode>): Likewise.
(mve_vandq_f<mode>): Likewise.
(mve_vandq_m_<supf><mode>): Likewise.
(mve_vandq_m_f<mode>): Likewise.
(mve_vandq_s<mode>): Likewise.
(mve_vandq_u<mode>): Likewise.
(mve_vbicq_<supf><mode>): Likewise.
(mve_vbicq_f<mode>): Likewise.
(mve_vbicq_m_<supf><mode>): Likewise.
(mve_vbicq_m_f<mode>): Likewise.
(mve_vbicq_m_n_<supf><mode>): Likewise.
(mve_vbicq_n_<supf><mode>): Likewise.
(mve_vbicq_s<mode>): Likewise.
(mve_vbicq_u<mode>): Likewise.
(mve_vbrsrq_m_n_<supf><mode>): Likewise.
(mve_vbrsrq_m_n_f<mode>): Likewise.
(mve_vbrsrq_n_<supf><mode>): Likewise.
(mve_vbrsrq_n_f<mode>): Likewise.
(mve_vcaddq_rot270_m_<supf><mode>): Likewise.
(mve_vcaddq_rot270_m_f<mode>): Likewise.
(mve_vcaddq_rot270<mode>): Likewise.
(mve_vcaddq_rot270<mode>): Likewise.
(mve_vcaddq_rot90_m_<supf><mode>): Likewise.
(mve_vcaddq_rot90_m_f<mode>): Likewise.
(mve_vcaddq_rot90<mode>): Likewise.
(mve_vcaddq_rot90<mode>): Likewise.
(mve_vcaddq<mve_rot><mode>): Likewise.
(mve_vcaddq<mve_rot><mode>): Likewise.
(mve_vclsq_m_s<mode>): Likewise.
(mve_vclsq_s<mode>): Likewise.
(mve_vclzq_<supf><mode>): Likewise.
(mve_vclzq_m_<supf><mode>): Likewise.
(mve_vclzq_s<mode>): Likewise.
(mve_vclzq_u<mode>): Likewise.
(mve_vcmlaq_m_f<mode>): Likewise.
(mve_vcmlaq_rot180_m_f<mode>): Likewise.
(mve_vcmlaq_rot180<mode>): Likewise.
(mve_vcmlaq_rot270_m_f<mode>): Likewise.
(mve_vcmlaq_rot270<mode>): Likewise.
(mve_vcmlaq_rot90_m_f<mode>): Likewise.
(mve_vcmlaq_rot90<mode>): Likewise.
(mve_vcmlaq<mode>): Likewise.
(mve_vcmlaq<mve_rot><mode>): Likewise.
(mve_vcmp<mve_cmp_op>q_<mode>): Likewise.
(mve_vcmp<mve_cmp_op>q_f<mode>): Likewise.
(mve_vcmp<mve_cmp_op>q_n_<mode>): Likewise.
(mve_vcmp<mve_cmp_op>q_n_f<mode>): Likewise.
(mve_vcmpcsq_<mode>): Likewise.
(mve_vcmpcsq_m_n_u<mode>): Likewise.
(mve_vcmpcsq_m_u<mode>): Likewise.
(mve_vcmpcsq_n_<mode>): Likewise.
(mve_vcmpeqq_<mode>): Likewise.
(mve_vcmpeqq_f<mode>): Likewise.
(mve_vcmpeqq_m_<supf><mode>): Likewise.
(mve_vcmpeqq_m_f<mode>): Likewise.
(mve_vcmpeqq_m_n_<supf><mode>): Likewise.
(mve_vcmpeqq_m_n_f<mode>): Likewise.
(mve_vcmpeqq_n_<mode>): Likewise.
(mve_vcmpeqq_n_f<mode>): Likewise.
(mve_vcmpgeq_<mode>): Likewise.
(mve_vcmpgeq_f<mode>): Likewise.
(mve_vcmpgeq_m_f<mode>): Likewise.
(mve_vcmpgeq_m_n_f<mode>): Likewise.
(mve_vcmpgeq_m_n_s<mode>): Likewise.
(mve_vcmpgeq_m_s<mode>): Likewise.
(mve_vcmpgeq_n_<mode>): Likewise.
(mve_vcmpgeq_n_f<mode>): Likewise.
(mve_vcmpgtq_<mode>): Likewise.
(mve_vcmpgtq_f<mode>): Likewise.
(mve_vcmpgtq_m_f<mode>): Likewise.
(mve_vcmpgtq_m_n_f<mode>): Likewise.
(mve_vcmpgtq_m_n_s<mode>): Likewise.
(mve_vcmpgtq_m_s<mode>): Likewise.
(mve_vcmpgtq_n_<mode>): Likewise.
(mve_vcmpgtq_n_f<mode>): Likewise.
(mve_vcmphiq_<mode>): Likewise.
(mve_vcmphiq_m_n_u<mode>): Likewise.
(mve_vcmphiq_m_u<mode>): Likewise.
(mve_vcmphiq_n_<mode>): Likewise.
(mve_vcmpleq_<mode>): Likewise.
(mve_vcmpleq_f<mode>): Likewise.
(mve_vcmpleq_m_f<mode>): Likewise.
(mve_vcmpleq_m_n_f<mode>): Likewise.
(mve_vcmpleq_m_n_s<mode>): Likewise.
(mve_vcmpleq_m_s<mode>): Likewise.
(mve_vcmpleq_n_<mode>): Likewise.
(mve_vcmpleq_n_f<mode>): Likewise.
(mve_vcmpltq_<mode>): Likewise.
(mve_vcmpltq_f<mode>): Likewise.
(mve_vcmpltq_m_f<mode>): Likewise.
(mve_vcmpltq_m_n_f<mode>): Likewise.
(mve_vcmpltq_m_n_s<mode>): Likewise.
(mve_vcmpltq_m_s<mode>): Likewise.
(mve_vcmpltq_n_<mode>): Likewise.
(mve_vcmpltq_n_f<mode>): Likewise.
(mve_vcmpneq_<mode>): Likewise.
(mve_vcmpneq_f<mode>): Likewise.
(mve_vcmpneq_m_<supf><mode>): Likewise.
(mve_vcmpneq_m_f<mode>): Likewise.
(mve_vcmpneq_m_n_<supf><mode>): Likewise.
(mve_vcmpneq_m_n_f<mode>): Likewise.
(mve_vcmpneq_n_<mode>): Likewise.
(mve_vcmpneq_n_f<mode>): Likewise.
(mve_vcmulq_m_f<mode>): Likewise.
(mve_vcmulq_rot180_m_f<mode>): Likewise.
(mve_vcmulq_rot180<mode>): Likewise.
(mve_vcmulq_rot270_m_f<mode>): Likewise.
(mve_vcmulq_rot270<mode>): Likewise.
(mve_vcmulq_rot90_m_f<mode>): Likewise.
(mve_vcmulq_rot90<mode>): Likewise.
(mve_vcmulq<mode>): Likewise.
(mve_vcmulq<mve_rot><mode>): Likewise.
(mve_vctp<mode1>q_mhi): Likewise.
(mve_vctp<mode1>qhi): Likewise.
(mve_vcvtaq_<supf><mode>): Likewise.
(mve_vcvtaq_m_<supf><mode>): Likewise.
(mve_vcvtbq_f16_f32v8hf): Likewise.
(mve_vcvtbq_f32_f16v4sf): Likewise.
(mve_vcvtbq_m_f16_f32v8hf): Likewise.
(mve_vcvtbq_m_f32_f16v4sf): Likewise.
(mve_vcvtmq_<supf><mode>): Likewise.
(mve_vcvtmq_m_<supf><mode>): Likewise.
(mve_vcvtnq_<supf><mode>): Likewise.
(mve_vcvtnq_m_<supf><mode>): Likewise.
(mve_vcvtpq_<supf><mode>): Likewise.
(mve_vcvtpq_m_<supf><mode>): Likewise.
(mve_vcvtq_from_f_<supf><mode>): Likewise.
(mve_vcvtq_m_from_f_<supf><mode>): Likewise.
(mve_vcvtq_m_n_from_f_<supf><mode>): Likewise.
(mve_vcvtq_m_n_to_f_<supf><mode>): Likewise.
(mve_vcvtq_m_to_f_<supf><mode>): Likewise.
(mve_vcvtq_n_from_f_<supf><mode>): Likewise.
(mve_vcvtq_n_to_f_<supf><mode>): Likewise.
(mve_vcvtq_to_f_<supf><mode>): Likewise.
(mve_vcvttq_f16_f32v8hf): Likewise.
(mve_vcvttq_f32_f16v4sf): Likewise.
(mve_vcvttq_m_f16_f32v8hf): Likewise.
(mve_vcvttq_m_f32_f16v4sf): Likewise.
(mve_vddupq_m_wb_u<mode>_insn): Likewise.
(mve_vddupq_u<mode>_insn): Likewise.
(mve_vdupq_m_n_<supf><mode>): Likewise.
(mve_vdupq_m_n_f<mode>): Likewise.
(mve_vdupq_n_<supf><mode>): Likewise.
(mve_vdupq_n_f<mode>): Likewise.
(mve_vdwdupq_m_wb_u<mode>_insn): Likewise.
(mve_vdwdupq_wb_u<mode>_insn): Likewise.
(mve_veorq_<supf><mode>): Likewise.
(mve_veorq_f<mode>): Likewise.
(mve_veorq_m_<supf><mode>): Likewise.
(mve_veorq_m_f<mode>): Likewise.
(mve_veorq_s<mode>): Likewise.
(mve_veorq_u<mode>): Likewise.
(mve_vfmaq_f<mode>): Likewise.
(mve_vfmaq_m_f<mode>): Likewise.
(mve_vfmaq_m_n_f<mode>): Likewise.
(mve_vfmaq_n_f<mode>): Likewise.
(mve_vfmasq_m_n_f<mode>): Likewise.
(mve_vfmasq_n_f<mode>): Likewise.
(mve_vfmsq_f<mode>): Likewise.
(mve_vfmsq_m_f<mode>): Likewise.
(mve_vhaddq_<supf><mode>): Likewise.
(mve_vhaddq_m_<supf><mode>): Likewise.
(mve_vhaddq_m_n_<supf><mode>): Likewise.
(mve_vhaddq_n_<supf><mode>): Likewise.
(mve_vhcaddq_rot270_m_s<mode>): Likewise.
(mve_vhcaddq_rot270_s<mode>): Likewise.
(mve_vhcaddq_rot90_m_s<mode>): Likewise.
(mve_vhcaddq_rot90_s<mode>): Likewise.
(mve_vhsubq_<supf><mode>): Likewise.
(mve_vhsubq_m_<supf><mode>): Likewise.
(mve_vhsubq_m_n_<supf><mode>): Likewise.
(mve_vhsubq_n_<supf><mode>): Likewise.
(mve_vidupq_m_wb_u<mode>_insn): Likewise.
(mve_vidupq_u<mode>_insn): Likewise.
(mve_viwdupq_m_wb_u<mode>_insn): Likewise.
(mve_viwdupq_wb_u<mode>_insn): Likewise.
(mve_vldrbq_<supf><mode>): Likewise.
(mve_vldrbq_gather_offset_<supf><mode>): Likewise.
(mve_vldrbq_gather_offset_z_<supf><mode>): Likewise.
(mve_vldrbq_z_<supf><mode>): Likewise.
(mve_vldrdq_gather_base_<supf>v2di): Likewise.
(mve_vldrdq_gather_base_wb_<supf>v2di_insn): Likewise.
(mve_vldrdq_gather_base_wb_z_<supf>v2di_insn): Likewise.
(mve_vldrdq_gather_base_z_<supf>v2di): Likewise.
(mve_vldrdq_gather_offset_<supf>v2di): Likewise.
(mve_vldrdq_gather_offset_z_<supf>v2di): Likewise.
(mve_vldrdq_gather_shifted_offset_<supf>v2di): Likewise.
(mve_vldrdq_gather_shifted_offset_z_<supf>v2di): Likewise.
(mve_vldrhq_<supf><mode>): Likewise.
(mve_vldrhq_fv8hf): Likewise.
(mve_vldrhq_gather_offset_<supf><mode>): Likewise.
(mve_vldrhq_gather_offset_fv8hf): Likewise.
(mve_vldrhq_gather_offset_z_<supf><mode>): Likewise.
(mve_vldrhq_gather_offset_z_fv8hf): Likewise.
(mve_vldrhq_gather_shifted_offset_<supf><mode>): Likewise.
(mve_vldrhq_gather_shifted_offset_fv8hf): Likewise.
(mve_vldrhq_gather_shifted_offset_z_<supf><mode>): Likewise.
(mve_vldrhq_gather_shifted_offset_z_fv8hf): Likewise.
(mve_vldrhq_z_<supf><mode>): Likewise.
(mve_vldrhq_z_fv8hf): Likewise.
(mve_vldrwq_<supf>v4si): Likewise.
(mve_vldrwq_fv4sf): Likewise.
(mve_vldrwq_gather_base_<supf>v4si): Likewise.
(mve_vldrwq_gather_base_fv4sf): Likewise.
(mve_vldrwq_gather_base_wb_<supf>v4si_insn): Likewise.
(mve_vldrwq_gather_base_wb_fv4sf_insn): Likewise.
(mve_vldrwq_gather_base_wb_z_<supf>v4si_insn): Likewise.
(mve_vldrwq_gather_base_wb_z_fv4sf_insn): Likewise.
(mve_vldrwq_gather_base_z_<supf>v4si): Likewise.
(mve_vldrwq_gather_base_z_fv4sf): Likewise.
(mve_vldrwq_gather_offset_<supf>v4si): Likewise.
(mve_vldrwq_gather_offset_fv4sf): Likewise.
(mve_vldrwq_gather_offset_z_<supf>v4si): Likewise.
(mve_vldrwq_gather_offset_z_fv4sf): Likewise.
(mve_vldrwq_gather_shifted_offset_<supf>v4si): Likewise.
(mve_vldrwq_gather_shifted_offset_fv4sf): Likewise.
(mve_vldrwq_gather_shifted_offset_z_<supf>v4si): Likewise.
(mve_vldrwq_gather_shifted_offset_z_fv4sf): Likewise.
(mve_vldrwq_z_<supf>v4si): Likewise.
(mve_vldrwq_z_fv4sf): Likewise.
(mve_vmaxaq_m_s<mode>): Likewise.
(mve_vmaxaq_s<mode>): Likewise.
(mve_vmaxavq_p_s<mode>): Likewise.
(mve_vmaxavq_s<mode>): Likewise.
(mve_vmaxnmaq_f<mode>): Likewise.
(mve_vmaxnmaq_m_f<mode>): Likewise.
(mve_vmaxnmavq_f<mode>): Likewise.
(mve_vmaxnmavq_p_f<mode>): Likewise.
(mve_vmaxnmq_f<mode>): Likewise.
(mve_vmaxnmq_m_f<mode>): Likewise.
(mve_vmaxnmvq_f<mode>): Likewise.
(mve_vmaxnmvq_p_f<mode>): Likewise.
(mve_vmaxq_<supf><mode>): Likewise.
(mve_vmaxq_m_<supf><mode>): Likewise.
(mve_vmaxq_s<mode>): Likewise.
(mve_vmaxq_u<mode>): Likewise.
(mve_vmaxvq_<supf><mode>): Likewise.
(mve_vmaxvq_p_<supf><mode>): Likewise.
(mve_vminaq_m_s<mode>): Likewise.
(mve_vminaq_s<mode>): Likewise.
(mve_vminavq_p_s<mode>): Likewise.
(mve_vminavq_s<mode>): Likewise.
(mve_vminnmaq_f<mode>): Likewise.
(mve_vminnmaq_m_f<mode>): Likewise.
(mve_vminnmavq_f<mode>): Likewise.
(mve_vminnmavq_p_f<mode>): Likewise.
(mve_vminnmq_f<mode>): Likewise.
(mve_vminnmq_m_f<mode>): Likewise.
(mve_vminnmvq_f<mode>): Likewise.
(mve_vminnmvq_p_f<mode>): Likewise.
(mve_vminq_<supf><mode>): Likewise.
(mve_vminq_m_<supf><mode>): Likewise.
(mve_vminq_s<mode>): Likewise.
(mve_vminq_u<mode>): Likewise.
(mve_vminvq_<supf><mode>): Likewise.
(mve_vminvq_p_<supf><mode>): Likewise.
(mve_vmladavaq_<supf><mode>): Likewise.
(mve_vmladavaq_p_<supf><mode>): Likewise.
(mve_vmladavaxq_p_s<mode>): Likewise.
(mve_vmladavaxq_s<mode>): Likewise.
(mve_vmladavq_<supf><mode>): Likewise.
(mve_vmladavq_p_<supf><mode>): Likewise.
(mve_vmladavxq_p_s<mode>): Likewise.
(mve_vmladavxq_s<mode>): Likewise.
(mve_vmlaldavaq_<supf><mode>): Likewise.
(mve_vmlaldavaq_p_<supf><mode>): Likewise.
(mve_vmlaldavaxq_<supf><mode>): Likewise.
(mve_vmlaldavaxq_p_<supf><mode>): Likewise.
(mve_vmlaldavaxq_s<mode>): Likewise.
(mve_vmlaldavq_<supf><mode>): Likewise.
(mve_vmlaldavq_p_<supf><mode>): Likewise.
(mve_vmlaldavxq_p_s<mode>): Likewise.
(mve_vmlaldavxq_s<mode>): Likewise.
(mve_vmlaq_m_n_<supf><mode>): Likewise.
(mve_vmlaq_n_<supf><mode>): Likewise.
(mve_vmlasq_m_n_<supf><mode>): Likewise.
(mve_vmlasq_n_<supf><mode>): Likewise.
(mve_vmlsdavaq_p_s<mode>): Likewise.
(mve_vmlsdavaq_s<mode>): Likewise.
(mve_vmlsdavaxq_p_s<mode>): Likewise.
(mve_vmlsdavaxq_s<mode>): Likewise.
(mve_vmlsdavq_p_s<mode>): Likewise.
(mve_vmlsdavq_s<mode>): Likewise.
(mve_vmlsdavxq_p_s<mode>): Likewise.
(mve_vmlsdavxq_s<mode>): Likewise.
(mve_vmlsldavaq_p_s<mode>): Likewise.
(mve_vmlsldavaq_s<mode>): Likewise.
(mve_vmlsldavaxq_p_s<mode>): Likewise.
(mve_vmlsldavaxq_s<mode>): Likewise.
(mve_vmlsldavq_p_s<mode>): Likewise.
(mve_vmlsldavq_s<mode>): Likewise.
(mve_vmlsldavxq_p_s<mode>): Likewise.
(mve_vmlsldavxq_s<mode>): Likewise.
(mve_vmovlbq_<supf><mode>): Likewise.
(mve_vmovlbq_m_<supf><mode>): Likewise.
(mve_vmovltq_<supf><mode>): Likewise.
(mve_vmovltq_m_<supf><mode>): Likewise.
(mve_vmovnbq_<supf><mode>): Likewise.
(mve_vmovnbq_m_<supf><mode>): Likewise.
(mve_vmovntq_<supf><mode>): Likewise.
(mve_vmovntq_m_<supf><mode>): Likewise.
(mve_vmulhq_<supf><mode>): Likewise.
(mve_vmulhq_m_<supf><mode>): Likewise.
(mve_vmullbq_int_<supf><mode>): Likewise.
(mve_vmullbq_int_m_<supf><mode>): Likewise.
(mve_vmullbq_poly_m_p<mode>): Likewise.
(mve_vmullbq_poly_p<mode>): Likewise.
(mve_vmulltq_int_<supf><mode>): Likewise.
(mve_vmulltq_int_m_<supf><mode>): Likewise.
(mve_vmulltq_poly_m_p<mode>): Likewise.
(mve_vmulltq_poly_p<mode>): Likewise.
(mve_vmulq_<supf><mode>): Likewise.
(mve_vmulq_f<mode>): Likewise.
(mve_vmulq_m_<supf><mode>): Likewise.
(mve_vmulq_m_f<mode>): Likewise.
(mve_vmulq_m_n_<supf><mode>): Likewise.
(mve_vmulq_m_n_f<mode>): Likewise.
(mve_vmulq_n_<supf><mode>): Likewise.
(mve_vmulq_n_f<mode>): Likewise.
(mve_vmvnq_<supf><mode>): Likewise.
(mve_vmvnq_m_<supf><mode>): Likewise.
(mve_vmvnq_m_n_<supf><mode>): Likewise.
(mve_vmvnq_n_<supf><mode>): Likewise.
(mve_vmvnq_s<mode>): Likewise.
(mve_vmvnq_u<mode>): Likewise.
(mve_vnegq_f<mode>): Likewise.
(mve_vnegq_m_f<mode>): Likewise.
(mve_vnegq_m_s<mode>): Likewise.
(mve_vnegq_s<mode>): Likewise.
(mve_vornq_<supf><mode>): Likewise.
(mve_vornq_f<mode>): Likewise.
(mve_vornq_m_<supf><mode>): Likewise.
(mve_vornq_m_f<mode>): Likewise.
(mve_vornq_s<mode>): Likewise.
(mve_vornq_u<mode>): Likewise.
(mve_vorrq_<supf><mode>): Likewise.
(mve_vorrq_f<mode>): Likewise.
(mve_vorrq_m_<supf><mode>): Likewise.
(mve_vorrq_m_f<mode>): Likewise.
(mve_vorrq_m_n_<supf><mode>): Likewise.
(mve_vorrq_n_<supf><mode>): Likewise.
(mve_vorrq_s<mode>): Likewise.
(mve_vorrq_s<mode>): Likewise.
(mve_vqabsq_m_s<mode>): Likewise.
(mve_vqabsq_s<mode>): Likewise.
(mve_vqaddq_<supf><mode>): Likewise.
(mve_vqaddq_m_<supf><mode>): Likewise.
(mve_vqaddq_m_n_<supf><mode>): Likewise.
(mve_vqaddq_n_<supf><mode>): Likewise.
(mve_vqdmladhq_m_s<mode>): Likewise.
(mve_vqdmladhq_s<mode>): Likewise.
(mve_vqdmladhxq_m_s<mode>): Likewise.
(mve_vqdmladhxq_s<mode>): Likewise.
(mve_vqdmlahq_m_n_s<mode>): Likewise.
(mve_vqdmlahq_n_<supf><mode>): Likewise.
(mve_vqdmlahq_n_s<mode>): Likewise.
(mve_vqdmlashq_m_n_s<mode>): Likewise.
(mve_vqdmlashq_n_<supf><mode>): Likewise.
(mve_vqdmlashq_n_s<mode>): Likewise.
(mve_vqdmlsdhq_m_s<mode>): Likewise.
(mve_vqdmlsdhq_s<mode>): Likewise.
(mve_vqdmlsdhxq_m_s<mode>): Likewise.
(mve_vqdmlsdhxq_s<mode>): Likewise.
(mve_vqdmulhq_m_n_s<mode>): Likewise.
(mve_vqdmulhq_m_s<mode>): Likewise.
(mve_vqdmulhq_n_s<mode>): Likewise.
(mve_vqdmulhq_s<mode>): Likewise.
(mve_vqdmullbq_m_n_s<mode>): Likewise.
(mve_vqdmullbq_m_s<mode>): Likewise.
(mve_vqdmullbq_n_s<mode>): Likewise.
(mve_vqdmullbq_s<mode>): Likewise.
(mve_vqdmulltq_m_n_s<mode>): Likewise.
(mve_vqdmulltq_m_s<mode>): Likewise.
(mve_vqdmulltq_n_s<mode>): Likewise.
(mve_vqdmulltq_s<mode>): Likewise.
(mve_vqmovnbq_<supf><mode>): Likewise.
(mve_vqmovnbq_m_<supf><mode>): Likewise.
(mve_vqmovntq_<supf><mode>): Likewise.
(mve_vqmovntq_m_<supf><mode>): Likewise.
(mve_vqmovunbq_m_s<mode>): Likewise.
(mve_vqmovunbq_s<mode>): Likewise.
(mve_vqmovuntq_m_s<mode>): Likewise.
(mve_vqmovuntq_s<mode>): Likewise.
(mve_vqnegq_m_s<mode>): Likewise.
(mve_vqnegq_s<mode>): Likewise.
(mve_vqrdmladhq_m_s<mode>): Likewise.
(mve_vqrdmladhq_s<mode>): Likewise.
(mve_vqrdmladhxq_m_s<mode>): Likewise.
(mve_vqrdmladhxq_s<mode>): Likewise.
(mve_vqrdmlahq_m_n_s<mode>): Likewise.
(mve_vqrdmlahq_n_<supf><mode>): Likewise.
(mve_vqrdmlahq_n_s<mode>): Likewise.
(mve_vqrdmlashq_m_n_s<mode>): Likewise.
(mve_vqrdmlashq_n_<supf><mode>): Likewise.
(mve_vqrdmlashq_n_s<mode>): Likewise.
(mve_vqrdmlsdhq_m_s<mode>): Likewise.
(mve_vqrdmlsdhq_s<mode>): Likewise.
(mve_vqrdmlsdhxq_m_s<mode>): Likewise.
(mve_vqrdmlsdhxq_s<mode>): Likewise.
(mve_vqrdmulhq_m_n_s<mode>): Likewise.
(mve_vqrdmulhq_m_s<mode>): Likewise.
(mve_vqrdmulhq_n_s<mode>): Likewise.
(mve_vqrdmulhq_s<mode>): Likewise.
(mve_vqrshlq_<supf><mode>): Likewise.
(mve_vqrshlq_m_<supf><mode>): Likewise.
(mve_vqrshlq_m_n_<supf><mode>): Likewise.
(mve_vqrshlq_n_<supf><mode>): Likewise.
(mve_vqrshrnbq_m_n_<supf><mode>): Likewise.
(mve_vqrshrnbq_n_<supf><mode>): Likewise.
(mve_vqrshrntq_m_n_<supf><mode>): Likewise.
(mve_vqrshrntq_n_<supf><mode>): Likewise.
(mve_vqrshrunbq_m_n_s<mode>): Likewise.
(mve_vqrshrunbq_n_s<mode>): Likewise.
(mve_vqrshruntq_m_n_s<mode>): Likewise.
(mve_vqrshruntq_n_s<mode>): Likewise.
(mve_vqshlq_<supf><mode>): Likewise.
(mve_vqshlq_m_<supf><mode>): Likewise.
(mve_vqshlq_m_n_<supf><mode>): Likewise.
(mve_vqshlq_m_r_<supf><mode>): Likewise.
(mve_vqshlq_n_<supf><mode>): Likewise.
(mve_vqshlq_r_<supf><mode>): Likewise.
(mve_vqshluq_m_n_s<mode>): Likewise.
(mve_vqshluq_n_s<mode>): Likewise.
(mve_vqshrnbq_m_n_<supf><mode>): Likewise.
(mve_vqshrnbq_n_<supf><mode>): Likewise.
(mve_vqshrntq_m_n_<supf><mode>): Likewise.
(mve_vqshrntq_n_<supf><mode>): Likewise.
(mve_vqshrunbq_m_n_s<mode>): Likewise.
(mve_vqshrunbq_n_s<mode>): Likewise.
(mve_vqshruntq_m_n_s<mode>): Likewise.
(mve_vqshruntq_n_s<mode>): Likewise.
(mve_vqsubq_<supf><mode>): Likewise.
(mve_vqsubq_m_<supf><mode>): Likewise.
(mve_vqsubq_m_n_<supf><mode>): Likewise.
(mve_vqsubq_n_<supf><mode>): Likewise.
(mve_vrev16q_<supf>v16qi): Likewise.
(mve_vrev16q_m_<supf>v16qi): Likewise.
(mve_vrev32q_<supf><mode>): Likewise.
(mve_vrev32q_fv8hf): Likewise.
(mve_vrev32q_m_<supf><mode>): Likewise.
(mve_vrev32q_m_fv8hf): Likewise.
(mve_vrev64q_<supf><mode>): Likewise.
(mve_vrev64q_f<mode>): Likewise.
(mve_vrev64q_m_<supf><mode>): Likewise.
(mve_vrev64q_m_f<mode>): Likewise.
(mve_vrhaddq_<supf><mode>): Likewise.
(mve_vrhaddq_m_<supf><mode>): Likewise.
(mve_vrmlaldavhaq_<supf>v4si): Likewise.
(mve_vrmlaldavhaq_p_sv4si): Likewise.
(mve_vrmlaldavhaq_p_uv4si): Likewise.
(mve_vrmlaldavhaq_sv4si): Likewise.
(mve_vrmlaldavhaq_uv4si): Likewise.
(mve_vrmlaldavhaxq_p_sv4si): Likewise.
(mve_vrmlaldavhaxq_sv4si): Likewise.
(mve_vrmlaldavhq_<supf>v4si): Likewise.
(mve_vrmlaldavhq_p_<supf>v4si): Likewise.
(mve_vrmlaldavhxq_p_sv4si): Likewise.
(mve_vrmlaldavhxq_sv4si): Likewise.
(mve_vrmlsldavhaq_p_sv4si): Likewise.
(mve_vrmlsldavhaq_sv4si): Likewise.
(mve_vrmlsldavhaxq_p_sv4si): Likewise.
(mve_vrmlsldavhaxq_sv4si): Likewise.
(mve_vrmlsldavhq_p_sv4si): Likewise.
(mve_vrmlsldavhq_sv4si): Likewise.
(mve_vrmlsldavhxq_p_sv4si): Likewise.
(mve_vrmlsldavhxq_sv4si): Likewise.
(mve_vrmulhq_<supf><mode>): Likewise.
(mve_vrmulhq_m_<supf><mode>): Likewise.
(mve_vrndaq_f<mode>): Likewise.
(mve_vrndaq_m_f<mode>): Likewise.
(mve_vrndmq_f<mode>): Likewise.
(mve_vrndmq_m_f<mode>): Likewise.
(mve_vrndnq_f<mode>): Likewise.
(mve_vrndnq_m_f<mode>): Likewise.
(mve_vrndpq_f<mode>): Likewise.
(mve_vrndpq_m_f<mode>): Likewise.
(mve_vrndq_f<mode>): Likewise.
(mve_vrndq_m_f<mode>): Likewise.
(mve_vrndxq_f<mode>): Likewise.
(mve_vrndxq_m_f<mode>): Likewise.
(mve_vrshlq_<supf><mode>): Likewise.
(mve_vrshlq_m_<supf><mode>): Likewise.
(mve_vrshlq_m_n_<supf><mode>): Likewise.
(mve_vrshlq_n_<supf><mode>): Likewise.
(mve_vrshrnbq_m_n_<supf><mode>): Likewise.
(mve_vrshrnbq_n_<supf><mode>): Likewise.
(mve_vrshrntq_m_n_<supf><mode>): Likewise.
(mve_vrshrntq_n_<supf><mode>): Likewise.
(mve_vrshrq_m_n_<supf><mode>): Likewise.
(mve_vrshrq_n_<supf><mode>): Likewise.
(mve_vsbciq_<supf>v4si): Likewise.
(mve_vsbciq_m_<supf>v4si): Likewise.
(mve_vsbcq_<supf>v4si): Likewise.
(mve_vsbcq_m_<supf>v4si): Likewise.
(mve_vshlcq_<supf><mode>): Likewise.
(mve_vshlcq_m_<supf><mode>): Likewise.
(mve_vshllbq_m_n_<supf><mode>): Likewise.
(mve_vshllbq_n_<supf><mode>): Likewise.
(mve_vshlltq_m_n_<supf><mode>): Likewise.
(mve_vshlltq_n_<supf><mode>): Likewise.
(mve_vshlq_<supf><mode>): Likewise.
(mve_vshlq_<supf><mode>): Likewise.
(mve_vshlq_m_<supf><mode>): Likewise.
(mve_vshlq_m_n_<supf><mode>): Likewise.
(mve_vshlq_m_r_<supf><mode>): Likewise.
(mve_vshlq_n_<supf><mode>): Likewise.
(mve_vshlq_r_<supf><mode>): Likewise.
(mve_vshrnbq_m_n_<supf><mode>): Likewise.
(mve_vshrnbq_n_<supf><mode>): Likewise.
(mve_vshrntq_m_n_<supf><mode>): Likewise.
(mve_vshrntq_n_<supf><mode>): Likewise.
(mve_vshrq_m_n_<supf><mode>): Likewise.
(mve_vshrq_n_<supf><mode>): Likewise.
(mve_vsliq_m_n_<supf><mode>): Likewise.
(mve_vsliq_n_<supf><mode>): Likewise.
(mve_vsriq_m_n_<supf><mode>): Likewise.
(mve_vsriq_n_<supf><mode>): Likewise.
(mve_vstrbq_<supf><mode>): Likewise.
(mve_vstrbq_p_<supf><mode>): Likewise.
(mve_vstrbq_scatter_offset_<supf><mode>_insn): Likewise.
(mve_vstrbq_scatter_offset_p_<supf><mode>_insn): Likewise.
(mve_vstrdq_scatter_base_<supf>v2di): Likewise.
(mve_vstrdq_scatter_base_p_<supf>v2di): Likewise.
(mve_vstrdq_scatter_base_wb_<supf>v2di): Likewise.
(mve_vstrdq_scatter_base_wb_p_<supf>v2di): Likewise.
(mve_vstrdq_scatter_offset_<supf>v2di_insn): Likewise.
(mve_vstrdq_scatter_offset_p_<supf>v2di_insn): Likewise.
(mve_vstrdq_scatter_shifted_offset_<supf>v2di_insn): Likewise.
(mve_vstrdq_scatter_shifted_offset_p_<supf>v2di_insn): Likewise.
(mve_vstrhq_<supf><mode>): Likewise.
(mve_vstrhq_fv8hf): Likewise.
(mve_vstrhq_p_<supf><mode>): Likewise.
(mve_vstrhq_p_fv8hf): Likewise.
(mve_vstrhq_scatter_offset_<supf><mode>_insn): Likewise.
(mve_vstrhq_scatter_offset_fv8hf_insn): Likewise.
(mve_vstrhq_scatter_offset_p_<supf><mode>_insn): Likewise.
(mve_vstrhq_scatter_offset_p_fv8hf_insn): Likewise.
(mve_vstrhq_scatter_shifted_offset_<supf><mode>_insn): Likewise.
(mve_vstrhq_scatter_shifted_offset_fv8hf_insn): Likewise.
(mve_vstrhq_scatter_shifted_offset_p_<supf><mode>_insn): Likewise.
(mve_vstrhq_scatter_shifted_offset_p_fv8hf_insn): Likewise.
(mve_vstrwq_<supf>v4si): Likewise.
(mve_vstrwq_fv4sf): Likewise.
(mve_vstrwq_p_<supf>v4si): Likewise.
(mve_vstrwq_p_fv4sf): Likewise.
(mve_vstrwq_scatter_base_<supf>v4si): Likewise.
(mve_vstrwq_scatter_base_fv4sf): Likewise.
(mve_vstrwq_scatter_base_p_<supf>v4si): Likewise.
(mve_vstrwq_scatter_base_p_fv4sf): Likewise.
(mve_vstrwq_scatter_base_wb_<supf>v4si): Likewise.
(mve_vstrwq_scatter_base_wb_fv4sf): Likewise.
(mve_vstrwq_scatter_base_wb_p_<supf>v4si): Likewise.
(mve_vstrwq_scatter_base_wb_p_fv4sf): Likewise.
(mve_vstrwq_scatter_offset_<supf>v4si_insn): Likewise.
(mve_vstrwq_scatter_offset_fv4sf_insn): Likewise.
(mve_vstrwq_scatter_offset_p_<supf>v4si_insn): Likewise.
(mve_vstrwq_scatter_offset_p_fv4sf_insn): Likewise.
(mve_vstrwq_scatter_shifted_offset_<supf>v4si_insn): Likewise.
(mve_vstrwq_scatter_shifted_offset_fv4sf_insn): Likewise.
(mve_vstrwq_scatter_shifted_offset_p_<supf>v4si_insn): Likewise.
(mve_vstrwq_scatter_shifted_offset_p_fv4sf_insn): Likewise.
(mve_vsubq_<supf><mode>): Likewise.
(mve_vsubq_f<mode>): Likewise.
(mve_vsubq_m_<supf><mode>): Likewise.
(mve_vsubq_m_f<mode>): Likewise.
(mve_vsubq_m_n_<supf><mode>): Likewise.
(mve_vsubq_m_n_f<mode>): Likewise.
(mve_vsubq_n_<supf><mode>): Likewise.
(mve_vsubq_n_f<mode>): Likewise.
@@ -2358,6 +2358,21 @@ extern int making_const_table;
else if (TARGET_THUMB1) \
thumb1_final_prescan_insn (INSN)
+/* These defines are useful to refer to the value of the mve_unpredicated_insn
+ insn attribute. Note that, because these use the get_attr_* function, these
+ will change recog_data if (INSN) isn't current_insn. */
+#define MVE_VPT_PREDICABLE_INSN_P(INSN) \
+ (recog_memoized (INSN) >= 0 \
+ && get_attr_mve_unpredicated_insn (INSN) != 0) \
+
+#define MVE_VPT_PREDICATED_INSN_P(INSN) \
+ (MVE_VPT_PREDICABLE_INSN_P (INSN) \
+ && recog_memoized (INSN) != get_attr_mve_unpredicated_insn (INSN)) \
+
+#define MVE_VPT_UNPREDICATED_INSN_P(INSN) \
+ (MVE_VPT_PREDICABLE_INSN_P (INSN) \
+ && recog_memoized (INSN) == get_attr_mve_unpredicated_insn (INSN)) \
+
#define ARM_SIGN_EXTEND(x) ((HOST_WIDE_INT) \
(HOST_BITS_PER_WIDE_INT <= 32 ? (unsigned HOST_WIDE_INT) (x) \
: ((((unsigned HOST_WIDE_INT)(x)) & (unsigned HOST_WIDE_INT) 0xffffffff) |\
@@ -124,6 +124,8 @@
; and not all ARM insns do.
(define_attr "predicated" "yes,no" (const_string "no"))
+(define_attr "mve_unpredicated_insn" "" (const_int 0))
+
; LENGTH of an instruction (in bytes)
(define_attr "length" ""
(const_int 4))
@@ -2296,6 +2296,7 @@
(define_int_attr mmla_sfx [(UNSPEC_MATMUL_S "s8") (UNSPEC_MATMUL_U "u8")
(UNSPEC_MATMUL_US "s8")])
+
;;MVE int attribute.
(define_int_attr supf [(VCVTQ_TO_F_S "s") (VCVTQ_TO_F_U "u") (VREV16Q_S "s")
(VREV16Q_U "u") (VMVNQ_N_S "s") (VMVNQ_N_U "u")
@@ -17,7 +17,7 @@
;; along with GCC; see the file COPYING3. If not see
;; <http://www.gnu.org/licenses/>.
-(define_insn "*mve_mov<mode>"
+(define_insn "mve_mov<mode>"
[(set (match_operand:MVE_types 0 "nonimmediate_operand" "=w,w,r,w , w, r,Ux,w")
(match_operand:MVE_types 1 "general_operand" " w,r,w,DnDm,UxUi,r,w, Ul"))]
"TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT"
@@ -81,18 +81,27 @@
return "";
}
}
- [(set_attr "type" "mve_move,mve_move,mve_move,mve_move,mve_load,multiple,mve_store,mve_load")
+ [(set_attr_alternative "mve_unpredicated_insn" [(symbol_ref "CODE_FOR_mve_mov<mode>")
+ (symbol_ref "CODE_FOR_nothing")
+ (symbol_ref "CODE_FOR_nothing")
+ (symbol_ref "CODE_FOR_mve_mov<mode>")
+ (symbol_ref "CODE_FOR_mve_mov<mode>")
+ (symbol_ref "CODE_FOR_nothing")
+ (symbol_ref "CODE_FOR_mve_mov<mode>")
+ (symbol_ref "CODE_FOR_nothing")])
+ (set_attr "type" "mve_move,mve_move,mve_move,mve_move,mve_load,multiple,mve_store,mve_load")
(set_attr "length" "4,8,8,4,4,8,4,8")
(set_attr "thumb2_pool_range" "*,*,*,*,1018,*,*,*")
(set_attr "neg_pool_range" "*,*,*,*,996,*,*,*")])
-(define_insn "*mve_vdup<mode>"
+(define_insn "mve_vdup<mode>"
[(set (match_operand:MVE_vecs 0 "s_register_operand" "=w")
(vec_duplicate:MVE_vecs
(match_operand:<V_elem> 1 "s_register_operand" "r")))]
"TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT"
"vdup.<V_sz_elem>\t%q0, %1"
- [(set_attr "length" "4")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vdup<mode>"))
+ (set_attr "length" "4")
(set_attr "type" "mve_move")])
;;
@@ -145,7 +154,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_mnemo>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -159,7 +169,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -173,7 +184,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"v<absneg_str>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_v<absneg_str>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -187,7 +199,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.%#<V_sz_elem>\t%q0, %1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -201,7 +214,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
;; [vcvttq_f32_f16])
@@ -214,7 +228,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtt.f32.f16\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvttq_f32_f16v4sf"))
+ (set_attr "type" "mve_move")
])
;;
@@ -228,7 +243,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtb.f32.f16\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtbq_f32_f16v4sf"))
+ (set_attr "type" "mve_move")
])
;;
@@ -242,7 +258,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvt.f%#<V_sz_elem>.<supf>%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_to_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -256,7 +273,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -270,7 +288,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvt.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_from_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -284,7 +303,8 @@
]
"TARGET_HAVE_MVE"
"v<absneg_str>.s%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_v<absneg_str>q_s<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -297,7 +317,8 @@
]
"TARGET_HAVE_MVE"
"vmvn\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmvnq_u<mode>"))
+ (set_attr "type" "mve_move")
])
(define_expand "mve_vmvnq_s<mode>"
[
@@ -318,7 +339,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.%#<V_sz_elem>\t%q0, %1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -331,7 +353,8 @@
]
"TARGET_HAVE_MVE"
"vclz.i%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vclzq_s<mode>"))
+ (set_attr "type" "mve_move")
])
(define_expand "mve_vclzq_u<mode>"
[
@@ -354,7 +377,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -368,7 +392,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -382,7 +407,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -397,7 +423,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -411,7 +438,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtp.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtpq_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -425,7 +453,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtn.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtnq_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -439,7 +468,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtm.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtmq_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -453,7 +483,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvta.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtaq_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -467,7 +498,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.i%#<V_sz_elem>\t%q0, %1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -481,7 +513,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<V_sz_elem>\t%q0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -495,7 +528,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>32\t%Q0, %R0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
])
;;
@@ -509,7 +543,8 @@
]
"TARGET_HAVE_MVE"
"vctp.<MVE_vctp>\t%1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vctp<MVE_vctp>q<MVE_vpred>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -523,7 +558,8 @@
]
"TARGET_HAVE_MVE"
"vpnot"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vpnotv16bi"))
+ (set_attr "type" "mve_move")
])
;;
@@ -538,7 +574,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -553,7 +590,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvt.f<V_sz_elem>.<supf><V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_n_to_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;; [vcreateq_f])
@@ -599,7 +637,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf><V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;; Versions that take constant vectors as operand 2 (with all elements
@@ -617,7 +656,8 @@
VALID_NEON_QREG_MODE (<MODE>mode),
true);
}
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vshrq_n_s<mode>_imm"))
+ (set_attr "type" "mve_move")
])
(define_insn "mve_vshrq_n_u<mode>_imm"
[
@@ -632,7 +672,8 @@
VALID_NEON_QREG_MODE (<MODE>mode),
true);
}
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vshrq_n_u<mode>_imm"))
+ (set_attr "type" "mve_move")
])
;;
@@ -647,7 +688,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvt.<supf><V_sz_elem>.f<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_n_from_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -662,8 +704,9 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>32\t%Q0, %R0, %q1"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [vcmpneq_, vcmpcsq_, vcmpeqq_, vcmpgeq_, vcmpgtq_, vcmphiq_, vcmpleq_, vcmpltq_])
@@ -676,7 +719,8 @@
]
"TARGET_HAVE_MVE"
"vcmp.<mve_cmp_type>%#<V_sz_elem>\t<mve_cmp_op>, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op>q_<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -691,7 +735,8 @@
]
"TARGET_HAVE_MVE"
"vcmp.<mve_cmp_type>%#<V_sz_elem> <mve_cmp_op>, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op>q_n_<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -722,7 +767,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -739,7 +785,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.i%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -754,7 +801,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -769,7 +817,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%0, %q1"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -789,8 +838,11 @@
"@
vand\t%q0, %q1, %q2
* return neon_output_logic_immediate (\"vand\", &operands[2], <MODE>mode, 1, VALID_NEON_QREG_MODE (<MODE>mode));"
- [(set_attr "type" "mve_move")
+ [(set_attr_alternative "mve_unpredicated_insn" [(symbol_ref "CODE_FOR_mve_vandq_u<mode>")
+ (symbol_ref "CODE_FOR_nothing")])
+ (set_attr "type" "mve_move")
])
+
(define_expand "mve_vandq_s<mode>"
[
(set (match_operand:MVE_2 0 "s_register_operand")
@@ -811,7 +863,8 @@
]
"TARGET_HAVE_MVE"
"vbic\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vbicq_u<mode>"))
+ (set_attr "type" "mve_move")
])
(define_expand "mve_vbicq_s<mode>"
@@ -835,7 +888,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -853,7 +907,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<isu>%#<V_sz_elem>\t%q0, %q1, %q2, #<rot>"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;; Auto vectorizer pattern for int vcadd
@@ -876,7 +931,8 @@
]
"TARGET_HAVE_MVE"
"veor\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_veorq_u<mode>"))
+ (set_attr "type" "mve_move")
])
(define_expand "mve_veorq_s<mode>"
[
@@ -904,7 +960,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -920,7 +977,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.s%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -935,7 +993,8 @@
]
"TARGET_HAVE_MVE"
"<max_min_su_str>.<max_min_supf>%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<max_min_su_str>q_<max_min_supf><mode>"))
+ (set_attr "type" "mve_move")
])
@@ -954,7 +1013,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -972,7 +1032,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -987,7 +1048,8 @@
]
"TARGET_HAVE_MVE"
"vmullb.<supf>%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmullbq_int_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1002,7 +1064,8 @@
]
"TARGET_HAVE_MVE"
"vmullt.<supf>%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmulltq_int_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1018,7 +1081,8 @@
]
"TARGET_HAVE_MVE"
"<mve_addsubmul>.i%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_addsubmul>q<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1032,7 +1096,8 @@
]
"TARGET_HAVE_MVE"
"vorn\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vornq_s<mode>"))
+ (set_attr "type" "mve_move")
])
(define_expand "mve_vornq_u<mode>"
@@ -1061,7 +1126,8 @@
"@
vorr\t%q0, %q1, %q2
* return neon_output_logic_immediate (\"vorr\", &operands[2], <MODE>mode, 0, VALID_NEON_QREG_MODE (<MODE>mode));"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vorrq_s<mode>"))
+ (set_attr "type" "mve_move")
])
(define_expand "mve_vorrq_u<mode>"
[
@@ -1085,7 +1151,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1101,7 +1168,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1117,7 +1185,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_r_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1132,7 +1201,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1147,7 +1217,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1162,7 +1233,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>32\t%Q0, %R0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1179,7 +1251,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1193,7 +1266,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vand\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vandq_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1207,7 +1281,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vbic\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vbicq_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1223,7 +1298,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%q0, %q1, %q2, #<rot>"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1237,7 +1313,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> <mve_cmp_op>, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1252,7 +1329,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> <mve_cmp_op>, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1267,8 +1345,10 @@
]
"TARGET_HAVE_MVE"
"vpst\;vctpt.<MVE_vctp>\t%1"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vctp<MVE_vctp>q<MVE_vpred>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")
+])
;;
;; [vcvtbq_f16_f32])
@@ -1282,7 +1362,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtb.f16.f32\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtbq_f16_f32v8hf"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1297,7 +1378,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtt.f16.f32\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvttq_f16_f32v8hf"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1311,7 +1393,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"veor\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_veorq_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1327,7 +1410,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1345,7 +1429,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1360,7 +1445,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<max_min_f_str>.f%#<V_sz_elem> %q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<max_min_f_str>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1378,7 +1464,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%Q0, %R0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1398,7 +1485,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<isu>%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1414,7 +1502,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_addsubmul>.f%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_addsubmul>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1428,7 +1517,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vorn\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vornq_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1442,7 +1532,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vorr\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vorrq_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1458,7 +1549,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.i%#<V_sz_elem> %q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1474,7 +1566,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.s%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1490,7 +1583,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.s%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1508,7 +1602,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>32\t%Q0, %R0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1524,7 +1619,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1539,7 +1635,8 @@
]
"TARGET_HAVE_MVE"
"vmullt.p%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmulltq_poly_p<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1554,7 +1651,8 @@
]
"TARGET_HAVE_MVE"
"vmullb.p%#<V_sz_elem>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmullbq_poly_p<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1575,8 +1673,9 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcmpt.f%#<V_sz_elem>\t<mve_cmp_op1>, %q1, %q2"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op1>q_f<mode>"))
+ (set_attr "length""8")])
+
;;
;; [vcvtaq_m_u, vcvtaq_m_s])
;;
@@ -1590,8 +1689,10 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtat.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtaq_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
+
;;
;; [vcvtq_m_to_f_s, vcvtq_m_to_f_u])
;;
@@ -1605,8 +1706,9 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtt.f%#<V_sz_elem>.<supf>%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_to_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [vqrshrnbq_n_u, vqrshrnbq_n_s]
@@ -1632,7 +1734,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<isu>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1651,7 +1754,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>32\t%Q0, %R0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1667,7 +1771,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1713,7 +1818,10 @@
(match_dup 4)]
VSHLCQ))]
"TARGET_HAVE_MVE"
- "vshlc\t%q0, %1, %4")
+ "vshlc\t%q0, %1, %4"
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vshlcq_<supf><mode>"))
+ (set_attr "type" "mve_move")
+])
;;
;; [vabsq_m_s]
@@ -1733,7 +1841,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<isu>%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1749,7 +1858,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1772,7 +1882,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vcmpt.<isu>%#<V_sz_elem>\t<mve_cmp_op1>, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op1>q_n_<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1795,7 +1906,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vcmpt.<isu>%#<V_sz_elem>\t<mve_cmp_op1>, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op1>q_<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1811,7 +1923,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1828,7 +1941,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.s%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1847,7 +1961,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1866,7 +1981,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1885,7 +2001,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1906,7 +2023,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1922,7 +2040,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1938,7 +2057,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1961,7 +2081,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.s%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -1978,7 +2099,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -1995,7 +2117,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_r_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2011,7 +2134,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2027,7 +2151,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -2043,7 +2168,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -2066,7 +2192,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_mnemo>t.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2082,7 +2209,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>32\t%Q0, %R0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
;; [vcmlaq, vcmlaq_rot90, vcmlaq_rot180, vcmlaq_rot270])
@@ -2100,7 +2228,9 @@
"@
vcmul.f%#<V_sz_elem> %q0, %q2, %q3, #<rot>
vcmla.f%#<V_sz_elem> %q0, %q2, %q3, #<rot>"
- [(set_attr "type" "mve_move")
+ [(set_attr_alternative "mve_unpredicated_insn" [(symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_f<mode>")
+ (symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_f<mode>")])
+ (set_attr "type" "mve_move")
])
;;
@@ -2121,7 +2251,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcmpt.f%#<V_sz_elem>\t<mve_cmp_op1>, %q1, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcmp<mve_cmp_op1>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2137,7 +2268,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtbt.f16.f32\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtbq_f16_f32v8hf"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2153,7 +2285,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtbt.f32.f16\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtbq_f32_f16v4sf"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2169,7 +2302,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvttt.f16.f32\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvttq_f16_f32v8hf"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2185,8 +2319,9 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvttt.f32.f16\t%q0, %q2"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvttq_f32_f16v4sf"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [vdupq_m_n_f])
@@ -2201,7 +2336,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2218,7 +2354,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -2235,7 +2372,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>.f%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -2252,7 +2390,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2271,7 +2410,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.f%#<V_sz_elem>\t%0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2290,7 +2430,8 @@
]
"TARGET_HAVE_MVE"
"<mve_insn>.<supf>%#<V_sz_elem>\t%Q0, %R0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -2309,7 +2450,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%Q0, %R0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2326,7 +2468,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2347,7 +2490,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<isu>%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2363,7 +2507,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.i%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2380,7 +2525,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.i%#<V_sz_elem>\t%q0, %2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2396,7 +2542,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"<mve_insn>\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
])
;;
@@ -2412,7 +2559,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2428,7 +2576,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2444,7 +2593,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2463,7 +2613,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>32\t%Q0, %R0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2479,7 +2630,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtmt.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtmq_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2495,7 +2647,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtpt.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtpq_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2511,7 +2664,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtnt.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtnq_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2528,7 +2682,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtt.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_n_from_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2544,7 +2699,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2560,8 +2716,9 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtt.<supf>%#<V_sz_elem>.f%#<V_sz_elem>\t%q0, %q2"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_from_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [vabavq_p_s, vabavq_p_u])
@@ -2577,7 +2734,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length" "8")])
;;
@@ -2594,8 +2752,9 @@
]
"TARGET_HAVE_MVE"
"vpst\n\t<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
- (set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length" "8")])
;;
;; [vsriq_m_n_s, vsriq_m_n_u])
@@ -2611,8 +2770,9 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
- (set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length" "8")])
;;
;; [vcvtq_m_n_to_f_u, vcvtq_m_n_to_f_s])
@@ -2628,7 +2788,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vcvtt.f%#<V_sz_elem>.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vcvtq_n_to_f_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2668,7 +2829,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2687,8 +2849,9 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.i%#<V_sz_elem> %q0, %q2, %3"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [vaddq_m_u, vaddq_m_s]
@@ -2706,7 +2869,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.i%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2726,7 +2890,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2743,8 +2908,9 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [vcaddq_rot90_m_u, vcaddq_rot90_m_s]
@@ -2763,7 +2929,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<isu>%#<V_sz_elem>\t%q0, %q2, %q3, #<rot>"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2791,7 +2958,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2812,7 +2980,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2829,7 +2998,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vmullbt.<supf>%#<V_sz_elem> %q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmullbq_int_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2846,7 +3016,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vmulltt.<supf>%#<V_sz_elem> %q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmulltq_int_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2863,7 +3034,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vornt\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vornq_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2881,7 +3053,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2899,7 +3072,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2916,7 +3090,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2936,7 +3111,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%Q0, %R0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2964,7 +3140,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<isu>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -2984,7 +3161,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>32\t%Q0, %R0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3002,7 +3180,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.<supf>%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3019,7 +3198,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vmullbt.p%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmullbq_poly_p<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3036,7 +3216,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vmulltt.p%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vmulltq_poly_p<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3054,7 +3235,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.s%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3072,7 +3254,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;<mve_insn>t.s%#<V_sz_elem>\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3096,7 +3279,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.f%#<V_sz_elem> %q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3117,7 +3301,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.f%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3137,7 +3322,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3154,7 +3340,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.%#<V_sz_elem>\t%q0, %q2, %3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_n_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3176,7 +3363,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.f%#<V_sz_elem>\t%q0, %q2, %q3, #<rot>"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3196,7 +3384,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;<mve_insn>t.f%#<V_sz_elem>\t%q0, %q2, %q3, #<rot>"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q<mve_rot>_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3213,7 +3402,8 @@
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vornt\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vornq_f<mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -3233,7 +3423,8 @@
output_asm_insn("vstrb.<V_sz_elem>\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrbq_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vstrbq_scatter_offset_s vstrbq_scatter_offset_u]
@@ -3261,7 +3452,8 @@
VSTRBSOQ))]
"TARGET_HAVE_MVE"
"vstrb.<V_sz_elem>\t%q2, [%0, %q1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrbq_scatter_offset_<supf><mode>_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_scatter_base_s vstrwq_scatter_base_u]
@@ -3283,7 +3475,8 @@
output_asm_insn("vstrw.u32\t%q2, [%q0, %1]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_<supf>v4si"))
+ (set_attr "length" "4")])
;;
;; [vldrbq_gather_offset_s vldrbq_gather_offset_u]
@@ -3306,7 +3499,8 @@
output_asm_insn ("vldrb.<supf><V_sz_elem>\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrbq_gather_offset_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vldrbq_s vldrbq_u]
@@ -3328,7 +3522,8 @@
output_asm_insn ("vldrb.<supf><V_sz_elem>\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrbq_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_gather_base_s vldrwq_gather_base_u]
@@ -3348,7 +3543,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%q1, %2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_<supf>v4si"))
+ (set_attr "length" "4")])
;;
;; [vstrbq_scatter_offset_p_s vstrbq_scatter_offset_p_u]
@@ -3380,7 +3576,8 @@
VSTRBSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrbt.<V_sz_elem>\t%q2, [%0, %q1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrbq_scatter_offset_<supf><mode>_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_base_p_s vstrwq_scatter_base_p_u]
@@ -3403,7 +3600,8 @@
output_asm_insn ("vpst\n\tvstrwt.u32\t%q2, [%q0, %1]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_<supf>v4si"))
+ (set_attr "length" "8")])
(define_insn "mve_vstrbq_p_<supf><mode>"
[(set (match_operand:<MVE_B_ELEM> 0 "mve_memory_operand" "=Ux")
@@ -3421,7 +3619,8 @@
output_asm_insn ("vpst\;vstrbt.<V_sz_elem>\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrbq_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vldrbq_gather_offset_z_s vldrbq_gather_offset_z_u]
@@ -3446,7 +3645,8 @@
output_asm_insn ("vpst\n\tvldrbt.<supf><V_sz_elem>\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrbq_gather_offset_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vldrbq_z_s vldrbq_z_u]
@@ -3469,7 +3669,8 @@
output_asm_insn ("vpst\;vldrbt.<supf><V_sz_elem>\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrbq_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_gather_base_z_s vldrwq_gather_base_z_u]
@@ -3490,7 +3691,8 @@
output_asm_insn ("vpst\n\tvldrwt.u32\t%q0, [%q1, %2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_<supf>v4si"))
+ (set_attr "length" "8")])
;;
;; [vldrhq_f]
@@ -3509,7 +3711,8 @@
output_asm_insn ("vldrh.16\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_fv8hf"))
+ (set_attr "length" "4")])
;;
;; [vldrhq_gather_offset_s vldrhq_gather_offset_u]
@@ -3532,7 +3735,8 @@
output_asm_insn ("vldrh.<supf><V_sz_elem>\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_offset_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vldrhq_gather_offset_z_s vldrhq_gather_offset_z_u]
@@ -3557,7 +3761,8 @@
output_asm_insn ("vpst\n\tvldrht.<supf><V_sz_elem>\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_offset_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vldrhq_gather_shifted_offset_s vldrhq_gather_shifted_offset_u]
@@ -3580,7 +3785,8 @@
output_asm_insn ("vldrh.<supf><V_sz_elem>\t%q0, [%m1, %q2, uxtw #1]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_shifted_offset_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vldrhq_gather_shifted_offset_z_s vldrhq_gather_shited_offset_z_u]
@@ -3605,7 +3811,8 @@
output_asm_insn ("vpst\n\tvldrht.<supf><V_sz_elem>\t%q0, [%m1, %q2, uxtw #1]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_shifted_offset_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vldrhq_s, vldrhq_u]
@@ -3627,7 +3834,8 @@
output_asm_insn ("vldrh.<supf><V_sz_elem>\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vldrhq_z_f]
@@ -3647,7 +3855,8 @@
output_asm_insn ("vpst\;vldrht.16\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_fv8hf"))
+ (set_attr "length" "8")])
;;
;; [vldrhq_z_s vldrhq_z_u]
@@ -3670,7 +3879,8 @@
output_asm_insn ("vpst\;vldrht.<supf><V_sz_elem>\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_f]
@@ -3689,7 +3899,8 @@
output_asm_insn ("vldrw.32\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_s vldrwq_u]
@@ -3708,7 +3919,8 @@
output_asm_insn ("vldrw.32\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_<supf>v4si"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_z_f]
@@ -3728,7 +3940,8 @@
output_asm_insn ("vpst\;vldrwt.32\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_z_s vldrwq_z_u]
@@ -3748,7 +3961,8 @@
output_asm_insn ("vpst\;vldrwt.32\t%q0, %E1",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_<supf>v4si"))
+ (set_attr "length" "8")])
(define_expand "mve_vld1q_f<mode>"
[(match_operand:MVE_0 0 "s_register_operand")
@@ -3788,7 +4002,8 @@
output_asm_insn ("vldrd.64\t%q0, [%q1, %2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_base_<supf>v2di"))
+ (set_attr "length" "4")])
;;
;; [vldrdq_gather_base_z_s vldrdq_gather_base_z_u]
@@ -3809,7 +4024,8 @@
output_asm_insn ("vpst\n\tvldrdt.u64\t%q0, [%q1, %2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_base_<supf>v2di"))
+ (set_attr "length" "8")])
;;
;; [vldrdq_gather_offset_s vldrdq_gather_offset_u]
@@ -3829,7 +4045,8 @@
output_asm_insn ("vldrd.u64\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_offset_<supf>v2di"))
+ (set_attr "length" "4")])
;;
;; [vldrdq_gather_offset_z_s vldrdq_gather_offset_z_u]
@@ -3850,7 +4067,8 @@
output_asm_insn ("vpst\n\tvldrdt.u64\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_offset_<supf>v2di"))
+ (set_attr "length" "8")])
;;
;; [vldrdq_gather_shifted_offset_s vldrdq_gather_shifted_offset_u]
@@ -3870,7 +4088,8 @@
output_asm_insn ("vldrd.u64\t%q0, [%m1, %q2, uxtw #3]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_shifted_offset_<supf>v2di"))
+ (set_attr "length" "4")])
;;
;; [vldrdq_gather_shifted_offset_z_s vldrdq_gather_shifted_offset_z_u]
@@ -3891,7 +4110,8 @@
output_asm_insn ("vpst\n\tvldrdt.u64\t%q0, [%m1, %q2, uxtw #3]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_shifted_offset_<supf>v2di"))
+ (set_attr "length" "8")])
;;
;; [vldrhq_gather_offset_f]
@@ -3911,7 +4131,8 @@
output_asm_insn ("vldrh.f16\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_offset_fv8hf"))
+ (set_attr "length" "4")])
;;
;; [vldrhq_gather_offset_z_f]
@@ -3933,7 +4154,8 @@
output_asm_insn ("vpst\n\tvldrht.f16\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_offset_fv8hf"))
+ (set_attr "length" "8")])
;;
;; [vldrhq_gather_shifted_offset_f]
@@ -3953,7 +4175,8 @@
output_asm_insn ("vldrh.f16\t%q0, [%m1, %q2, uxtw #1]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_shifted_offset_fv8hf"))
+ (set_attr "length" "4")])
;;
;; [vldrhq_gather_shifted_offset_z_f]
@@ -3975,7 +4198,8 @@
output_asm_insn ("vpst\n\tvldrht.f16\t%q0, [%m1, %q2, uxtw #1]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrhq_gather_shifted_offset_fv8hf"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_gather_base_f]
@@ -3995,7 +4219,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%q1, %2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_gather_base_z_f]
@@ -4016,7 +4241,8 @@
output_asm_insn ("vpst\n\tvldrwt.u32\t%q0, [%q1, %2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_gather_offset_f]
@@ -4036,7 +4262,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_offset_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_gather_offset_s vldrwq_gather_offset_u]
@@ -4056,7 +4283,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_offset_<supf>v4si"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_gather_offset_z_f]
@@ -4078,7 +4306,8 @@
output_asm_insn ("vpst\n\tvldrwt.u32\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_offset_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_gather_offset_z_s vldrwq_gather_offset_z_u]
@@ -4100,7 +4329,8 @@
output_asm_insn ("vpst\n\tvldrwt.u32\t%q0, [%m1, %q2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_offset_<supf>v4si"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_gather_shifted_offset_f]
@@ -4120,7 +4350,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_shifted_offset_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_gather_shifted_offset_s vldrwq_gather_shifted_offset_u]
@@ -4140,7 +4371,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_shifted_offset_<supf>v4si"))
+ (set_attr "length" "4")])
;;
;; [vldrwq_gather_shifted_offset_z_f]
@@ -4162,7 +4394,8 @@
output_asm_insn ("vpst\n\tvldrwt.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_shifted_offset_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vldrwq_gather_shifted_offset_z_s vldrwq_gather_shifted_offset_z_u]
@@ -4184,7 +4417,8 @@
output_asm_insn ("vpst\n\tvldrwt.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_shifted_offset_<supf>v4si"))
+ (set_attr "length" "8")])
;;
;; [vstrhq_f]
@@ -4203,7 +4437,8 @@
output_asm_insn ("vstrh.16\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_fv8hf"))
+ (set_attr "length" "4")])
;;
;; [vstrhq_p_f]
@@ -4224,7 +4459,8 @@
output_asm_insn ("vpst\;vstrht.16\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_fv8hf"))
+ (set_attr "length" "8")])
;;
;; [vstrhq_p_s vstrhq_p_u]
@@ -4246,7 +4482,8 @@
output_asm_insn ("vpst\;vstrht.<V_sz_elem>\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_<supf><mode>"))
+ (set_attr "length" "8")])
;;
;; [vstrhq_scatter_offset_p_s vstrhq_scatter_offset_p_u]
@@ -4278,7 +4515,8 @@
VSTRHSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrht.<V_sz_elem>\t%q2, [%0, %q1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_offset_<supf><mode>_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrhq_scatter_offset_s vstrhq_scatter_offset_u]
@@ -4306,7 +4544,8 @@
VSTRHSOQ))]
"TARGET_HAVE_MVE"
"vstrh.<V_sz_elem>\t%q2, [%0, %q1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_offset_<supf><mode>_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrhq_scatter_shifted_offset_p_s vstrhq_scatter_shifted_offset_p_u]
@@ -4338,7 +4577,8 @@
VSTRHSSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrht.<V_sz_elem>\t%q2, [%0, %q1, uxtw #1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_shifted_offset_<supf><mode>_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrhq_scatter_shifted_offset_s vstrhq_scatter_shifted_offset_u]
@@ -4367,7 +4607,8 @@
VSTRHSSOQ))]
"TARGET_HAVE_MVE"
"vstrh.<V_sz_elem>\t%q2, [%0, %q1, uxtw #1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_shifted_offset_<supf><mode>_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrhq_s, vstrhq_u]
@@ -4386,7 +4627,8 @@
output_asm_insn ("vstrh.<V_sz_elem>\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_<supf><mode>"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_f]
@@ -4405,7 +4647,8 @@
output_asm_insn ("vstrw.32\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_p_f]
@@ -4426,7 +4669,8 @@
output_asm_insn ("vpst\;vstrwt.32\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_p_s vstrwq_p_u]
@@ -4447,7 +4691,8 @@
output_asm_insn ("vpst\;vstrwt.32\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_<supf>v4si"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_s vstrwq_u]
@@ -4466,7 +4711,8 @@
output_asm_insn ("vstrw.32\t%q1, %E0",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_<supf>v4si"))
+ (set_attr "length" "4")])
(define_expand "mve_vst1q_f<mode>"
[(match_operand:<MVE_CNVT> 0 "mve_memory_operand")
@@ -4509,7 +4755,8 @@
output_asm_insn ("vpst\;\tvstrdt.u64\t%q2, [%q0, %1]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_base_<supf>v2di"))
+ (set_attr "length" "8")])
;;
;; [vstrdq_scatter_base_s vstrdq_scatter_base_u]
@@ -4531,7 +4778,8 @@
output_asm_insn ("vstrd.u64\t%q2, [%q0, %1]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_base_<supf>v2di"))
+ (set_attr "length" "4")])
;;
;; [vstrdq_scatter_offset_p_s vstrdq_scatter_offset_p_u]
@@ -4562,7 +4810,8 @@
VSTRDSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrdt.64\t%q2, [%0, %q1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_offset_<supf>v2di_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrdq_scatter_offset_s vstrdq_scatter_offset_u]
@@ -4590,7 +4839,8 @@
VSTRDSOQ))]
"TARGET_HAVE_MVE"
"vstrd.64\t%q2, [%0, %q1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_offset_<supf>v2di_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrdq_scatter_shifted_offset_p_s vstrdq_scatter_shifted_offset_p_u]
@@ -4622,7 +4872,8 @@
VSTRDSSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrdt.64\t%q2, [%0, %q1, uxtw #3]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_shifted_offset_<supf>v2di_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrdq_scatter_shifted_offset_s vstrdq_scatter_shifted_offset_u]
@@ -4651,7 +4902,8 @@
VSTRDSSOQ))]
"TARGET_HAVE_MVE"
"vstrd.64\t%q2, [%0, %q1, uxtw #3]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_shifted_offset_<supf>v2di_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrhq_scatter_offset_f]
@@ -4679,7 +4931,8 @@
VSTRHQSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vstrh.16\t%q2, [%0, %q1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_offset_fv8hf_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrhq_scatter_offset_p_f]
@@ -4710,7 +4963,8 @@
VSTRHQSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vstrht.16\t%q2, [%0, %q1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_offset_fv8hf_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrhq_scatter_shifted_offset_f]
@@ -4738,7 +4992,8 @@
VSTRHQSSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vstrh.16\t%q2, [%0, %q1, uxtw #1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_shifted_offset_fv8hf_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrhq_scatter_shifted_offset_p_f]
@@ -4770,7 +5025,8 @@
VSTRHQSSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vstrht.16\t%q2, [%0, %q1, uxtw #1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrhq_scatter_shifted_offset_fv8hf_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_base_f]
@@ -4792,7 +5048,8 @@
output_asm_insn ("vstrw.u32\t%q2, [%q0, %1]",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_scatter_base_p_f]
@@ -4815,7 +5072,8 @@
output_asm_insn ("vpst\n\tvstrwt.u32\t%q2, [%q0, %1]",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_offset_f]
@@ -4843,7 +5101,8 @@
VSTRWQSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vstrw.32\t%q2, [%0, %q1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_offset_fv4sf_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_scatter_offset_p_f]
@@ -4874,7 +5133,8 @@
VSTRWQSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vstrwt.32\t%q2, [%0, %q1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_offset_fv4sf_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_offset_s vstrwq_scatter_offset_u]
@@ -4905,7 +5165,8 @@
VSTRWSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrwt.32\t%q2, [%0, %q1]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_offset_<supf>v4si_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_offset_s vstrwq_scatter_offset_u]
@@ -4933,7 +5194,8 @@
VSTRWSOQ))]
"TARGET_HAVE_MVE"
"vstrw.32\t%q2, [%0, %q1]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_offset_<supf>v4si_insn"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_scatter_shifted_offset_f]
@@ -4961,7 +5223,8 @@
VSTRWQSSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vstrw.32\t%q2, [%0, %q1, uxtw #2]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_shifted_offset_fv4sf_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_shifted_offset_p_f]
@@ -4993,7 +5256,8 @@
VSTRWQSSO_F))]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vpst\;vstrwt.32\t%q2, [%0, %q1, uxtw #2]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_shifted_offset_fv4sf_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_shifted_offset_p_s vstrwq_scatter_shifted_offset_p_u]
@@ -5025,7 +5289,8 @@
VSTRWSSOQ))]
"TARGET_HAVE_MVE"
"vpst\;vstrwt.32\t%q2, [%0, %q1, uxtw #2]"
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_shifted_offset_<supf>v4si_insn"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_shifted_offset_s vstrwq_scatter_shifted_offset_u]
@@ -5054,7 +5319,8 @@
VSTRWSSOQ))]
"TARGET_HAVE_MVE"
"vstrw.32\t%q2, [%0, %q1, uxtw #2]"
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_shifted_offset_<supf>v4si_insn"))
+ (set_attr "length" "4")])
;;
;; [vidupq_n_u])
@@ -5122,7 +5388,8 @@
(match_operand:SI 6 "immediate_operand" "i")))]
"TARGET_HAVE_MVE"
"vpst\;\tvidupt.u%#<V_sz_elem>\t%q0, %2, %4"
- [(set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vidupq_u<mode>_insn"))
+ (set_attr "length""8")])
;;
;; [vddupq_n_u])
@@ -5190,7 +5457,8 @@
(match_operand:SI 6 "immediate_operand" "i")))]
"TARGET_HAVE_MVE"
"vpst\;vddupt.u%#<V_sz_elem>\t%q0, %2, %4"
- [(set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vddupq_u<mode>_insn"))
+ (set_attr "length""8")])
;;
;; [vdwdupq_n_u])
@@ -5306,8 +5574,9 @@
]
"TARGET_HAVE_MVE"
"vpst\;vdwdupt.u%#<V_sz_elem>\t%q2, %3, %R4, %5"
- [(set_attr "type" "mve_move")
- (set_attr "length""8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vdwdupq_wb_u<mode>_insn"))
+ (set_attr "type" "mve_move")
+ (set_attr "length""8")])
;;
;; [viwdupq_n_u])
@@ -5423,7 +5692,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;\tviwdupt.u%#<V_sz_elem>\t%q2, %3, %R4, %5"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_viwdupq_wb_u<mode>_insn"))
+ (set_attr "type" "mve_move")
(set_attr "length""8")])
;;
@@ -5449,7 +5719,8 @@
output_asm_insn ("vstrw.u32\t%q2, [%q0, %1]!",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_wb_<supf>v4si"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_scatter_base_wb_p_s vstrwq_scatter_base_wb_p_u]
@@ -5475,7 +5746,8 @@
output_asm_insn ("vpst\;\tvstrwt.u32\t%q2, [%q0, %1]!",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_wb_<supf>v4si"))
+ (set_attr "length" "8")])
;;
;; [vstrwq_scatter_base_wb_f]
@@ -5500,7 +5772,8 @@
output_asm_insn ("vstrw.u32\t%q2, [%q0, %1]!",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_wb_fv4sf"))
+ (set_attr "length" "4")])
;;
;; [vstrwq_scatter_base_wb_p_f]
@@ -5526,7 +5799,8 @@
output_asm_insn ("vpst\;vstrwt.u32\t%q2, [%q0, %1]!",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrwq_scatter_base_wb_fv4sf"))
+ (set_attr "length" "8")])
;;
;; [vstrdq_scatter_base_wb_s vstrdq_scatter_base_wb_u]
@@ -5551,7 +5825,8 @@
output_asm_insn ("vstrd.u64\t%q2, [%q0, %1]!",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_base_wb_<supf>v2di"))
+ (set_attr "length" "4")])
;;
;; [vstrdq_scatter_base_wb_p_s vstrdq_scatter_base_wb_p_u]
@@ -5577,7 +5852,8 @@
output_asm_insn ("vpst\;vstrdt.u64\t%q2, [%q0, %1]!",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vstrdq_scatter_base_wb_<supf>v2di"))
+ (set_attr "length" "8")])
(define_expand "mve_vldrwq_gather_base_wb_<supf>v4si"
[(match_operand:V4SI 0 "s_register_operand")
@@ -5629,7 +5905,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%q1, %2]!",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_wb_<supf>v4si_insn"))
+ (set_attr "length" "4")])
(define_expand "mve_vldrwq_gather_base_wb_z_<supf>v4si"
[(match_operand:V4SI 0 "s_register_operand")
@@ -5685,7 +5962,8 @@
output_asm_insn ("vpst\;vldrwt.u32\t%q0, [%q1, %2]!",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_wb_<supf>v4si_insn"))
+ (set_attr "length" "8")])
(define_expand "mve_vldrwq_gather_base_wb_fv4sf"
[(match_operand:V4SI 0 "s_register_operand")
@@ -5737,7 +6015,8 @@
output_asm_insn ("vldrw.u32\t%q0, [%q1, %2]!",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_wb_fv4sf_insn"))
+ (set_attr "length" "4")])
(define_expand "mve_vldrwq_gather_base_wb_z_fv4sf"
[(match_operand:V4SI 0 "s_register_operand")
@@ -5794,7 +6073,8 @@
output_asm_insn ("vpst\;vldrwt.u32\t%q0, [%q1, %2]!",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrwq_gather_base_wb_fv4sf_insn"))
+ (set_attr "length" "8")])
(define_expand "mve_vldrdq_gather_base_wb_<supf>v2di"
[(match_operand:V2DI 0 "s_register_operand")
@@ -5847,7 +6127,8 @@
output_asm_insn ("vldrd.64\t%q0, [%q1, %2]!",ops);
return "";
}
- [(set_attr "length" "4")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_base_wb_<supf>v2di_insn"))
+ (set_attr "length" "4")])
(define_expand "mve_vldrdq_gather_base_wb_z_<supf>v2di"
[(match_operand:V2DI 0 "s_register_operand")
@@ -5886,7 +6167,7 @@
(unspec_volatile:SI [(reg:SI VFPCC_REGNUM)] UNSPEC_GET_FPSCR_NZCVQC))]
"TARGET_HAVE_MVE"
"vmrs\\t%0, FPSCR_nzcvqc"
- [(set_attr "type" "mve_move")])
+ [(set_attr "type" "mve_move")])
(define_insn "set_fpscr_nzcvqc"
[(set (reg:SI VFPCC_REGNUM)
@@ -5894,7 +6175,7 @@
VUNSPEC_SET_FPSCR_NZCVQC))]
"TARGET_HAVE_MVE"
"vmsr\\tFPSCR_nzcvqc, %0"
- [(set_attr "type" "mve_move")])
+ [(set_attr "type" "mve_move")])
;;
;; [vldrdq_gather_base_wb_z_s vldrdq_gather_base_wb_z_u]
@@ -5919,7 +6200,8 @@
output_asm_insn ("vpst\;vldrdt.u64\t%q0, [%q1, %2]!",ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vldrdq_gather_base_wb_<supf>v2di_insn"))
+ (set_attr "length" "8")])
;;
;; [vadciq_m_s, vadciq_m_u])
;;
@@ -5936,7 +6218,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vadcit.i32\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vadciq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "8")])
;;
@@ -5953,7 +6236,8 @@
]
"TARGET_HAVE_MVE"
"vadci.i32\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vadciq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "4")])
;;
@@ -5972,7 +6256,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vadct.i32\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vadcq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "8")])
;;
@@ -5989,7 +6274,8 @@
]
"TARGET_HAVE_MVE"
"vadc.i32\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vadcq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "4")
(set_attr "conds" "set")])
@@ -6009,7 +6295,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vsbcit.i32\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vsbciq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "8")])
;;
@@ -6026,7 +6313,8 @@
]
"TARGET_HAVE_MVE"
"vsbci.i32\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vsbciq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "4")])
;;
@@ -6045,7 +6333,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vsbct.i32\t%q0, %q2, %q3"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vsbcq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "8")])
;;
@@ -6062,7 +6351,8 @@
]
"TARGET_HAVE_MVE"
"vsbc.i32\t%q0, %q1, %q2"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vsbcq_<supf>v4si"))
+ (set_attr "type" "mve_move")
(set_attr "length" "4")])
;;
@@ -6091,7 +6381,7 @@
"vst21.<V_sz_elem>\t{%q0, %q1}, %3", ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set_attr "length" "8")])
;;
;; [vld2q])
@@ -6119,7 +6409,7 @@
"vld21.<V_sz_elem>\t{%q0, %q1}, %3", ops);
return "";
}
- [(set_attr "length" "8")])
+ [(set_attr "length" "8")])
;;
;; [vld4q])
@@ -6462,7 +6752,8 @@
]
"TARGET_HAVE_MVE"
"vpst\;vshlct\t%q0, %1, %4"
- [(set_attr "type" "mve_move")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_vshlcq_<supf><mode>"))
+ (set_attr "type" "mve_move")
(set_attr "length" "8")])
;; CDE instructions on MVE registers.
@@ -6474,7 +6765,8 @@
UNSPEC_VCDE))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vcx1\\tp%c1, %q0, #%c2"
- [(set_attr "type" "coproc")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx1qv16qi"))
+ (set_attr "type" "coproc")]
)
(define_insn "arm_vcx1qav16qi"
@@ -6485,7 +6777,8 @@
UNSPEC_VCDEA))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vcx1a\\tp%c1, %q0, #%c3"
- [(set_attr "type" "coproc")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx1qav16qi"))
+ (set_attr "type" "coproc")]
)
(define_insn "arm_vcx2qv16qi"
@@ -6496,7 +6789,8 @@
UNSPEC_VCDE))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vcx2\\tp%c1, %q0, %q2, #%c3"
- [(set_attr "type" "coproc")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx2qv16qi"))
+ (set_attr "type" "coproc")]
)
(define_insn "arm_vcx2qav16qi"
@@ -6508,7 +6802,8 @@
UNSPEC_VCDEA))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vcx2a\\tp%c1, %q0, %q3, #%c4"
- [(set_attr "type" "coproc")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx2qav16qi"))
+ (set_attr "type" "coproc")]
)
(define_insn "arm_vcx3qv16qi"
@@ -6520,7 +6815,8 @@
UNSPEC_VCDE))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vcx3\\tp%c1, %q0, %q2, %q3, #%c4"
- [(set_attr "type" "coproc")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx3qv16qi"))
+ (set_attr "type" "coproc")]
)
(define_insn "arm_vcx3qav16qi"
@@ -6533,7 +6829,8 @@
UNSPEC_VCDEA))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vcx3a\\tp%c1, %q0, %q3, %q4, #%c5"
- [(set_attr "type" "coproc")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx3qav16qi"))
+ (set_attr "type" "coproc")]
)
(define_insn "arm_vcx1q<a>_p_v16qi"
@@ -6545,7 +6842,8 @@
CDE_VCX))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vpst\;vcx1<a>t\\tp%c1, %q0, #%c3"
- [(set_attr "type" "coproc")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx1q<a>v16qi"))
+ (set_attr "type" "coproc")
(set_attr "length" "8")]
)
@@ -6559,7 +6857,8 @@
CDE_VCX))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vpst\;vcx2<a>t\\tp%c1, %q0, %q3, #%c4"
- [(set_attr "type" "coproc")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx2q<a>v16qi"))
+ (set_attr "type" "coproc")
(set_attr "length" "8")]
)
@@ -6574,11 +6873,12 @@
CDE_VCX))]
"TARGET_CDE && TARGET_HAVE_MVE"
"vpst\;vcx3<a>t\\tp%c1, %q0, %q3, %q4, #%c5"
- [(set_attr "type" "coproc")
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_arm_vcx3q<a>v16qi"))
+ (set_attr "type" "coproc")
(set_attr "length" "8")]
)
-(define_insn "*movmisalign<mode>_mve_store"
+(define_insn "movmisalign<mode>_mve_store"
[(set (match_operand:MVE_VLD_ST 0 "mve_memory_operand" "=Ux")
(unspec:MVE_VLD_ST [(match_operand:MVE_VLD_ST 1 "s_register_operand" " w")]
UNSPEC_MISALIGNED_ACCESS))]
@@ -6586,11 +6886,12 @@
|| (TARGET_HAVE_MVE_FLOAT && VALID_MVE_SF_MODE (<MODE>mode)))
&& !BYTES_BIG_ENDIAN && unaligned_access"
"vstr<V_sz_elem1>.<V_sz_elem>\t%q1, %E0"
- [(set_attr "type" "mve_store")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_movmisalign<mode>_mve_store"))
+ (set_attr "type" "mve_store")]
)
-(define_insn "*movmisalign<mode>_mve_load"
+(define_insn "movmisalign<mode>_mve_load"
[(set (match_operand:MVE_VLD_ST 0 "s_register_operand" "=w")
(unspec:MVE_VLD_ST [(match_operand:MVE_VLD_ST 1 "mve_memory_operand" " Ux")]
UNSPEC_MISALIGNED_ACCESS))]
@@ -6598,7 +6899,8 @@
|| (TARGET_HAVE_MVE_FLOAT && VALID_MVE_SF_MODE (<MODE>mode)))
&& !BYTES_BIG_ENDIAN && unaligned_access"
"vldr<V_sz_elem1>.<V_sz_elem>\t%q0, %E1"
- [(set_attr "type" "mve_load")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_movmisalign<mode>_mve_load"))
+ (set_attr "type" "mve_load")]
)
;; Expander for VxBI moves
@@ -6680,3 +6982,40 @@
}
}
)
+
+;; Originally expanded by 'predicated_doloop_end'.
+;; In the rare situation where the branch is too far, we do also need to
+;; revert FPSCR.LTPSIZE back to 0x100 after the last iteration.
+(define_insn "*predicated_doloop_end_internal"
+ [(set (pc)
+ (if_then_else
+ (ge (plus:SI (reg:SI LR_REGNUM)
+ (match_operand:SI 0 "const_int_operand" ""))
+ (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))
+ (set (reg:SI LR_REGNUM)
+ (plus:SI (reg:SI LR_REGNUM) (match_dup 0)))
+ (clobber (reg:CC CC_REGNUM))]
+ "TARGET_32BIT && TARGET_HAVE_LOB && TARGET_HAVE_MVE && TARGET_THUMB2"
+ {
+ if (get_attr_length (insn) == 4)
+ return "letp\t%|lr, %l1";
+ else
+ return "subs\t%|lr, #%n0\n\tbgt\t%l1\n\tlctp";
+ }
+ [(set (attr "length")
+ (if_then_else
+ (ltu (minus (pc) (match_dup 1)) (const_int 1024))
+ (const_int 4)
+ (const_int 6)))
+ (set_attr "type" "branch")])
+
+(define_insn "dlstp<mode1>_insn"
+ [
+ (set (reg:SI LR_REGNUM)
+ (unspec:SI [(match_operand:SI 0 "s_register_operand" "r")]
+ DLSTP))
+ ]
+ "TARGET_32BIT && TARGET_HAVE_LOB && TARGET_HAVE_MVE && TARGET_THUMB2"
+ "dlstp.<mode1>\t%|lr, %0")
@@ -366,7 +366,8 @@
"@
<mve_insn>.<supf>%#<V_sz_elem>\t%<V_reg>0, %<V_reg>1, %<V_reg>2
* return neon_output_shift_immediate (\"vshl\", 'i', &operands[2], <MODE>mode, VALID_NEON_QREG_MODE (<MODE>mode), true);"
- [(set_attr "type" "neon_shift_reg<q>, neon_shift_imm<q>")]
+ [(set (attr "mve_unpredicated_insn") (symbol_ref "CODE_FOR_mve_<mve_insn>q_<supf><mode>"))
+ (set_attr "type" "neon_shift_reg<q>, neon_shift_imm<q>")]
)
(define_expand "vashl<mode>3"