i386: Use macro to wrap up share builtin exceptions in builtin isa check
Checks
Commit Message
Hi all,
Currently in i386, we have several ISAs share builtin between each other
which is handled in ix86_check_builtin_isa_match with if condition clauses.
The patterns for these clauses are quite similar so it will be more friendly
for developers if we rewrite them as a macro.
This patch adds that macro. Tested on x86_64-pc-linux-gnu. Ok for trunk?
BRs,
Haochen
gcc/ChangeLog:
* config/i386/i386-expand.cc
(ix86_check_builtin_isa_match): Correct wrong comments.
Add a new macro SHARE_BUILTIN and refactor the current if
clauses to macro.
---
gcc/config/i386/i386-expand.cc | 72 ++++++++++++----------------------
1 file changed, 24 insertions(+), 48 deletions(-)
Comments
On Tue, Apr 18, 2023 at 2:57 PM Haochen Jiang via Gcc-patches
<gcc-patches@gcc.gnu.org> wrote:
>
> Hi all,
>
> Currently in i386, we have several ISAs share builtin between each other
> which is handled in ix86_check_builtin_isa_match with if condition clauses.
>
> The patterns for these clauses are quite similar so it will be more friendly
> for developers if we rewrite them as a macro.
>
> This patch adds that macro. Tested on x86_64-pc-linux-gnu. Ok for trunk?
Ok.
>
> BRs,
> Haochen
>
> gcc/ChangeLog:
>
> * config/i386/i386-expand.cc
> (ix86_check_builtin_isa_match): Correct wrong comments.
> Add a new macro SHARE_BUILTIN and refactor the current if
> clauses to macro.
> ---
> gcc/config/i386/i386-expand.cc | 72 ++++++++++++----------------------
> 1 file changed, 24 insertions(+), 48 deletions(-)
>
> diff --git a/gcc/config/i386/i386-expand.cc b/gcc/config/i386/i386-expand.cc
> index 0d817fc3f3b..54d5dfae677 100644
> --- a/gcc/config/i386/i386-expand.cc
> +++ b/gcc/config/i386/i386-expand.cc
> @@ -12588,6 +12588,7 @@ ix86_check_builtin_isa_match (unsigned int fcode,
> HOST_WIDE_INT isa2 = ix86_isa_flags2;
> HOST_WIDE_INT bisa = ix86_builtins_isa[fcode].isa;
> HOST_WIDE_INT bisa2 = ix86_builtins_isa[fcode].isa2;
> + HOST_WIDE_INT tmp_isa = isa, tmp_isa2 = isa2;
> /* The general case is we require all the ISAs specified in bisa{,2}
> to be enabled.
> The exceptions are:
> @@ -12596,60 +12597,35 @@ ix86_check_builtin_isa_match (unsigned int fcode,
> OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4
> (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL) or
> OPTION_MASK_ISA2_AVXVNNI
> - (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512IFMA) or
> + (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL) or
> OPTION_MASK_ISA2_AVXIFMA
> - (OPTION_MASK_ISA_AVXNECONVERT | OPTION_MASK_ISA2_AVX512BF16) or
> + (OPTION_MASK_ISA_AVX512VL | OPTION_MASK_ISA2_AVX512BF16) or
> OPTION_MASK_ISA2_AVXNECONVERT
> where for each such pair it is sufficient if either of the ISAs is
> enabled, plus if it is ored with other options also those others.
> OPTION_MASK_ISA_MMX in bisa is satisfied also if TARGET_MMX_WITH_SSE. */
> - if (((bisa & (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A))
> - == (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A))
> - && (isa & (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A)) != 0)
> - isa |= (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A);
>
> - if (((bisa & (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32))
> - == (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32))
> - && (isa & (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32)) != 0)
> - isa |= (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32);
> -
> - if (((bisa & (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4))
> - == (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4))
> - && (isa & (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4)) != 0)
> - isa |= (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4);
> -
> - if ((((bisa & (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
> - == (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
> - || (bisa2 & OPTION_MASK_ISA2_AVXVNNI) != 0)
> - && (((isa & (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
> - == (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
> - || (isa2 & OPTION_MASK_ISA2_AVXVNNI) != 0))
> - {
> - isa |= OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL;
> - isa2 |= OPTION_MASK_ISA2_AVXVNNI;
> - }
> -
> - if ((((bisa & (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
> - == (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
> - || (bisa2 & OPTION_MASK_ISA2_AVXIFMA) != 0)
> - && (((isa & (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
> - == (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
> - || (isa2 & OPTION_MASK_ISA2_AVXIFMA) != 0))
> - {
> - isa |= OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL;
> - isa2 |= OPTION_MASK_ISA2_AVXIFMA;
> - }
> -
> - if ((((bisa & OPTION_MASK_ISA_AVX512VL) != 0
> - && (bisa2 & OPTION_MASK_ISA2_AVX512BF16) != 0)
> - && (bisa2 & OPTION_MASK_ISA2_AVXNECONVERT) != 0)
> - && (((isa & OPTION_MASK_ISA_AVX512VL) != 0
> - && (isa2 & OPTION_MASK_ISA2_AVX512BF16) != 0)
> - || (isa2 & OPTION_MASK_ISA2_AVXNECONVERT) != 0))
> - {
> - isa |= OPTION_MASK_ISA_AVX512VL;
> - isa2 |= OPTION_MASK_ISA2_AVXNECONVERT | OPTION_MASK_ISA2_AVX512BF16;
> - }
> +#define SHARE_BUILTIN(A1, A2, B1, B2) \
> + if ((((bisa & (A1)) == (A1) && (bisa2 & (A2)) == (A2)) \
> + && ((bisa & (B1)) == (B1) && (bisa2 & (B2)) == (B2))) \
> + && (((isa & (A1)) == (A1) && (isa2 & (A2)) == (A2)) \
> + || ((isa & (B1)) == (B1) && (isa2 & (B2)) == (B2)))) \
> + { \
> + tmp_isa |= (A1) | (B1); \
> + tmp_isa2 |= (A2) | (B2); \
> + }
> +
> + SHARE_BUILTIN (OPTION_MASK_ISA_SSE, 0, OPTION_MASK_ISA_3DNOW_A, 0);
> + SHARE_BUILTIN (OPTION_MASK_ISA_SSE4_2, 0, OPTION_MASK_ISA_CRC32, 0);
> + SHARE_BUILTIN (OPTION_MASK_ISA_FMA, 0, OPTION_MASK_ISA_FMA4, 0);
> + SHARE_BUILTIN (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL, 0, 0,
> + OPTION_MASK_ISA2_AVXVNNI);
> + SHARE_BUILTIN (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL, 0, 0,
> + OPTION_MASK_ISA2_AVXIFMA);
> + SHARE_BUILTIN (OPTION_MASK_ISA_AVX512VL, OPTION_MASK_ISA2_AVX512BF16, 0,
> + OPTION_MASK_ISA2_AVXNECONVERT);
> + isa = tmp_isa;
> + isa2 = tmp_isa2;
>
> if ((bisa & OPTION_MASK_ISA_MMX) && !TARGET_MMX && TARGET_MMX_WITH_SSE
> /* __builtin_ia32_maskmovq requires MMX registers. */
> --
> 2.31.1
>
@@ -12588,6 +12588,7 @@ ix86_check_builtin_isa_match (unsigned int fcode,
HOST_WIDE_INT isa2 = ix86_isa_flags2;
HOST_WIDE_INT bisa = ix86_builtins_isa[fcode].isa;
HOST_WIDE_INT bisa2 = ix86_builtins_isa[fcode].isa2;
+ HOST_WIDE_INT tmp_isa = isa, tmp_isa2 = isa2;
/* The general case is we require all the ISAs specified in bisa{,2}
to be enabled.
The exceptions are:
@@ -12596,60 +12597,35 @@ ix86_check_builtin_isa_match (unsigned int fcode,
OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4
(OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL) or
OPTION_MASK_ISA2_AVXVNNI
- (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512IFMA) or
+ (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL) or
OPTION_MASK_ISA2_AVXIFMA
- (OPTION_MASK_ISA_AVXNECONVERT | OPTION_MASK_ISA2_AVX512BF16) or
+ (OPTION_MASK_ISA_AVX512VL | OPTION_MASK_ISA2_AVX512BF16) or
OPTION_MASK_ISA2_AVXNECONVERT
where for each such pair it is sufficient if either of the ISAs is
enabled, plus if it is ored with other options also those others.
OPTION_MASK_ISA_MMX in bisa is satisfied also if TARGET_MMX_WITH_SSE. */
- if (((bisa & (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A))
- == (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A))
- && (isa & (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A)) != 0)
- isa |= (OPTION_MASK_ISA_SSE | OPTION_MASK_ISA_3DNOW_A);
- if (((bisa & (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32))
- == (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32))
- && (isa & (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32)) != 0)
- isa |= (OPTION_MASK_ISA_SSE4_2 | OPTION_MASK_ISA_CRC32);
-
- if (((bisa & (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4))
- == (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4))
- && (isa & (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4)) != 0)
- isa |= (OPTION_MASK_ISA_FMA | OPTION_MASK_ISA_FMA4);
-
- if ((((bisa & (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
- == (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
- || (bisa2 & OPTION_MASK_ISA2_AVXVNNI) != 0)
- && (((isa & (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
- == (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL))
- || (isa2 & OPTION_MASK_ISA2_AVXVNNI) != 0))
- {
- isa |= OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL;
- isa2 |= OPTION_MASK_ISA2_AVXVNNI;
- }
-
- if ((((bisa & (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
- == (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
- || (bisa2 & OPTION_MASK_ISA2_AVXIFMA) != 0)
- && (((isa & (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
- == (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL))
- || (isa2 & OPTION_MASK_ISA2_AVXIFMA) != 0))
- {
- isa |= OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL;
- isa2 |= OPTION_MASK_ISA2_AVXIFMA;
- }
-
- if ((((bisa & OPTION_MASK_ISA_AVX512VL) != 0
- && (bisa2 & OPTION_MASK_ISA2_AVX512BF16) != 0)
- && (bisa2 & OPTION_MASK_ISA2_AVXNECONVERT) != 0)
- && (((isa & OPTION_MASK_ISA_AVX512VL) != 0
- && (isa2 & OPTION_MASK_ISA2_AVX512BF16) != 0)
- || (isa2 & OPTION_MASK_ISA2_AVXNECONVERT) != 0))
- {
- isa |= OPTION_MASK_ISA_AVX512VL;
- isa2 |= OPTION_MASK_ISA2_AVXNECONVERT | OPTION_MASK_ISA2_AVX512BF16;
- }
+#define SHARE_BUILTIN(A1, A2, B1, B2) \
+ if ((((bisa & (A1)) == (A1) && (bisa2 & (A2)) == (A2)) \
+ && ((bisa & (B1)) == (B1) && (bisa2 & (B2)) == (B2))) \
+ && (((isa & (A1)) == (A1) && (isa2 & (A2)) == (A2)) \
+ || ((isa & (B1)) == (B1) && (isa2 & (B2)) == (B2)))) \
+ { \
+ tmp_isa |= (A1) | (B1); \
+ tmp_isa2 |= (A2) | (B2); \
+ }
+
+ SHARE_BUILTIN (OPTION_MASK_ISA_SSE, 0, OPTION_MASK_ISA_3DNOW_A, 0);
+ SHARE_BUILTIN (OPTION_MASK_ISA_SSE4_2, 0, OPTION_MASK_ISA_CRC32, 0);
+ SHARE_BUILTIN (OPTION_MASK_ISA_FMA, 0, OPTION_MASK_ISA_FMA4, 0);
+ SHARE_BUILTIN (OPTION_MASK_ISA_AVX512VNNI | OPTION_MASK_ISA_AVX512VL, 0, 0,
+ OPTION_MASK_ISA2_AVXVNNI);
+ SHARE_BUILTIN (OPTION_MASK_ISA_AVX512IFMA | OPTION_MASK_ISA_AVX512VL, 0, 0,
+ OPTION_MASK_ISA2_AVXIFMA);
+ SHARE_BUILTIN (OPTION_MASK_ISA_AVX512VL, OPTION_MASK_ISA2_AVX512BF16, 0,
+ OPTION_MASK_ISA2_AVXNECONVERT);
+ isa = tmp_isa;
+ isa2 = tmp_isa2;
if ((bisa & OPTION_MASK_ISA_MMX) && !TARGET_MMX && TARGET_MMX_WITH_SSE
/* __builtin_ia32_maskmovq requires MMX registers. */