[V2,06/14] RISC-V: P6: Add computing reaching definition data flow

Message ID 20231017113500.1160997-7-lehua.ding@rivai.ai
State Unresolved
Headers
Series Refactor and cleanup vsetvl pass |

Checks

Context Check Description
snail/gcc-patch-check warning Git am fail log

Commit Message

Lehua Ding Oct. 17, 2023, 11:34 a.m. UTC
  This sub-patch add some helper functions for computing reaching defintion data
and three computational functions for different object. These three functions
are used by phase 2 and 3.

gcc/ChangeLog:

	* config/riscv/riscv-vsetvl.cc (bitmap_union_of_preds_with_entry): New.
	(compute_reaching_defintion): New.
	(pre_vsetvl::compute_avl_def_data): New.
	(pre_vsetvl::compute_vsetvl_def_data): New.
	(pre_vsetvl::compute_vsetvl_lcm_data): New.

---
 gcc/config/riscv/riscv-vsetvl.cc | 468 +++++++++++++++++++++++++++++++
 1 file changed, 468 insertions(+)

--
2.36.3
  

Comments

juzhe.zhong@rivai.ai Oct. 18, 2023, 4:12 a.m. UTC | #1
compute_vsetvl_lcm_data -> compute_lcm_local_properties




juzhe.zhong@rivai.ai
 
From: Lehua Ding
Date: 2023-10-17 19:34
To: gcc-patches
CC: juzhe.zhong; kito.cheng; rdapp.gcc; palmer; jeffreyalaw; lehua.ding
Subject: [PATCH V2 06/14] RISC-V: P6: Add computing reaching definition data flow
This sub-patch add some helper functions for computing reaching defintion data
and three computational functions for different object. These three functions
are used by phase 2 and 3.
 
gcc/ChangeLog:
 
* config/riscv/riscv-vsetvl.cc (bitmap_union_of_preds_with_entry): New.
(compute_reaching_defintion): New.
(pre_vsetvl::compute_avl_def_data): New.
(pre_vsetvl::compute_vsetvl_def_data): New.
(pre_vsetvl::compute_vsetvl_lcm_data): New.
 
---
gcc/config/riscv/riscv-vsetvl.cc | 468 +++++++++++++++++++++++++++++++
1 file changed, 468 insertions(+)
 
diff --git a/gcc/config/riscv/riscv-vsetvl.cc b/gcc/config/riscv/riscv-vsetvl.cc
index 33bdcec04d8..b1269e8cf4f 100644
--- a/gcc/config/riscv/riscv-vsetvl.cc
+++ b/gcc/config/riscv/riscv-vsetvl.cc
@@ -103,6 +103,121 @@ along with GCC; see the file COPYING3.  If not see
using namespace rtl_ssa;
using namespace riscv_vector;
 
+/* Set the bitmap DST to the union of SRC of predecessors of
+   basic block B.
+   It's a bit different from bitmap_union_of_preds in cfganal.cc. This function
+   takes into account the case where pred is ENTRY basic block. The main reason
+   for this difference is to make it easier to insert some special value into
+   the ENTRY base block. For example, vsetvl_info with a status of UNKNOW.  */
+static void
+bitmap_union_of_preds_with_entry (sbitmap dst, sbitmap *src, basic_block b)
+{
+  unsigned int set_size = dst->size;
+  edge e;
+  unsigned ix;
+
+  for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
+    {
+      e = EDGE_PRED (b, ix);
+      bitmap_copy (dst, src[e->src->index]);
+      break;
+    }
+
+  if (ix == EDGE_COUNT (b->preds))
+    bitmap_clear (dst);
+  else
+    for (ix++; ix < EDGE_COUNT (b->preds); ix++)
+      {
+ unsigned int i;
+ SBITMAP_ELT_TYPE *p, *r;
+
+ e = EDGE_PRED (b, ix);
+ p = src[e->src->index]->elms;
+ r = dst->elms;
+ for (i = 0; i < set_size; i++)
+   *r++ |= *p++;
+      }
+}
+
+/* Compute the reaching defintion in and out based on the gen and KILL
+   informations in each Base Blocks.
+   This function references the compute_avaiable implementation in lcm.cc  */
+static void
+compute_reaching_defintion (sbitmap *gen, sbitmap *kill, sbitmap *in,
+     sbitmap *out)
+{
+  edge e;
+  basic_block *worklist, *qin, *qout, *qend, bb;
+  unsigned int qlen;
+  edge_iterator ei;
+
+  /* Allocate a worklist array/queue.  Entries are only added to the
+     list if they were not already on the list.  So the size is
+     bounded by the number of basic blocks.  */
+  qin = qout = worklist
+    = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
+
+  /* Put every block on the worklist; this is necessary because of the
+     optimistic initialization of AVOUT above.  Use reverse postorder
+     to make the forward dataflow problem require less iterations.  */
+  int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
+  int n = pre_and_rev_post_order_compute_fn (cfun, NULL, rpo, false);
+  for (int i = 0; i < n; ++i)
+    {
+      bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
+      *qin++ = bb;
+      bb->aux = bb;
+    }
+  free (rpo);
+
+  qin = worklist;
+  qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
+
+  /* Mark blocks which are successors of the entry block so that we
+     can easily identify them below.  */
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+
+  /* Iterate until the worklist is empty.  */
+  while (qlen)
+    {
+      /* Take the first entry off the worklist.  */
+      bb = *qout++;
+      qlen--;
+
+      if (qout >= qend)
+ qout = worklist;
+
+      /* Do not clear the aux field for blocks which are successors of the
+ ENTRY block.  That way we never add then to the worklist again.  */
+      if (bb->aux != ENTRY_BLOCK_PTR_FOR_FN (cfun))
+ bb->aux = NULL;
+
+      bitmap_union_of_preds_with_entry (in[bb->index], out, bb);
+
+      if (bitmap_ior_and_compl (out[bb->index], gen[bb->index], in[bb->index],
+ kill[bb->index]))
+ /* If the out state of this block changed, then we need
+    to add the successors of this block to the worklist
+    if they are not already on the worklist.  */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+   if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
+     {
+       *qin++ = e->dest;
+       e->dest->aux = e;
+       qlen++;
+
+       if (qin >= qend)
+ qin = worklist;
+     }
+    }
+
+  clear_aux_for_edges ();
+  clear_aux_for_blocks ();
+  free (worklist);
+}
+
static CONSTEXPR const unsigned ALL_SEW[] = {8, 16, 32, 64};
static CONSTEXPR const vlmul_type ALL_LMUL[]
   = {LMUL_1, LMUL_2, LMUL_4, LMUL_8, LMUL_F8, LMUL_F4, LMUL_F2};
@@ -2669,6 +2784,359 @@ public:
   }
};
 
+void
+pre_vsetvl::compute_avl_def_data ()
+{
+  if (bitmap_empty_p (avl_regs))
+    return;
+
+  unsigned num_regs = GP_REG_LAST + 1;
+  unsigned num_bbs = last_basic_block_for_fn (cfun);
+
+  sbitmap *avl_def_loc_temp = sbitmap_vector_alloc (num_bbs, num_regs);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      bitmap_and (avl_def_loc_temp[bb->index ()], avl_regs,
+   reg_def_loc[bb->index ()]);
+
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.has_info ())
+ {
+   vsetvl_info &footer_info = block_info.get_footer_info ();
+   gcc_assert (footer_info.valid_p ());
+   if (footer_info.has_reg_vl ())
+     bitmap_set_bit (avl_def_loc_temp[bb->index ()],
+     REGNO (footer_info.get_vl ()));
+ }
+    }
+
+  if (avl_def_in)
+    sbitmap_vector_free (avl_def_in);
+  if (avl_def_out)
+    sbitmap_vector_free (avl_def_out);
+
+  unsigned num_exprs = num_bbs * num_regs;
+  sbitmap *avl_def_loc = sbitmap_vector_alloc (num_bbs, num_exprs);
+  sbitmap *kill = sbitmap_vector_alloc (num_bbs, num_exprs);
+  avl_def_in = sbitmap_vector_alloc (num_bbs, num_exprs);
+  avl_def_out = sbitmap_vector_alloc (num_bbs, num_exprs);
+
+  bitmap_vector_clear (avl_def_loc, num_bbs);
+  bitmap_vector_clear (kill, num_bbs);
+  bitmap_vector_clear (avl_def_out, num_bbs);
+
+  unsigned regno;
+  sbitmap_iterator sbi;
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    EXECUTE_IF_SET_IN_BITMAP (avl_def_loc_temp[bb->index ()], 0, regno, sbi)
+      {
+ bitmap_set_bit (avl_def_loc[bb->index ()],
+ get_expr_id (bb->index (), regno, num_bbs));
+ bitmap_set_range (kill[bb->index ()], regno * num_bbs, num_bbs);
+      }
+
+  basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  EXECUTE_IF_SET_IN_BITMAP (avl_regs, 0, regno, sbi)
+    bitmap_set_bit (avl_def_out[entry->index],
+     get_expr_id (entry->index, regno, num_bbs));
+
+  compute_reaching_defintion (avl_def_loc, kill, avl_def_in, avl_def_out);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file,
+        "  Compute avl reaching defition data (num_bbs %d, num_regs "
+        "%d):\n\n",
+        num_bbs, num_regs);
+      fprintf (dump_file, "    avl_regs: ");
+      dump_bitmap_file (dump_file, avl_regs);
+      fprintf (dump_file, "\n    bitmap data:\n");
+      for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+   unsigned int i = bb->index ();
+   fprintf (dump_file, "      BB %u:\n", i);
+   fprintf (dump_file, "        avl_def_loc:");
+   unsigned expr_id;
+   sbitmap_iterator sbi;
+   EXECUTE_IF_SET_IN_BITMAP (avl_def_loc[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n        kill:");
+   EXECUTE_IF_SET_IN_BITMAP (kill[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n        avl_def_in:");
+   EXECUTE_IF_SET_IN_BITMAP (avl_def_in[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n        avl_def_out:");
+   EXECUTE_IF_SET_IN_BITMAP (avl_def_out[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n");
+ }
+    }
+
+  sbitmap_vector_free (avl_def_loc);
+  sbitmap_vector_free (kill);
+  sbitmap_vector_free (avl_def_loc_temp);
+
+  dem.set_avl_in_out_data (avl_def_in, avl_def_out);
+}
+
+void
+pre_vsetvl::compute_vsetvl_def_data ()
+{
+  vsetvl_def_exprs.truncate (0);
+  add_expr (vsetvl_def_exprs, unknow_info);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ continue;
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+      add_expr (vsetvl_def_exprs, footer_info);
+    }
+
+  if (vsetvl_def_in)
+    sbitmap_vector_free (vsetvl_def_in);
+  if (vsetvl_def_out)
+    sbitmap_vector_free (vsetvl_def_out);
+
+  sbitmap *def_loc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+    vsetvl_def_exprs.length ());
+  sbitmap *kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ vsetvl_def_exprs.length ());
+
+  vsetvl_def_in = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ vsetvl_def_exprs.length ());
+  vsetvl_def_out = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ vsetvl_def_exprs.length ());
+
+  bitmap_vector_clear (def_loc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (kill, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (vsetvl_def_out, last_basic_block_for_fn (cfun));
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ {
+   for (unsigned i = 0; i < vsetvl_def_exprs.length (); i += 1)
+     {
+       const vsetvl_info &info = *vsetvl_def_exprs[i];
+       if (!info.has_reg_avl ())
+ continue;
+       unsigned int regno;
+       sbitmap_iterator sbi;
+       EXECUTE_IF_SET_IN_BITMAP (reg_def_loc[bb->index ()], 0, regno,
+ sbi)
+ if (regno == REGNO (info.get_avl ()))
+   bitmap_set_bit (kill[bb->index ()], i);
+     }
+   continue;
+ }
+
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      bitmap_ones (kill[bb->index ()]);
+      bitmap_set_bit (def_loc[bb->index ()],
+       get_expr_index (vsetvl_def_exprs, footer_info));
+    }
+
+  /* Set the def_out of the ENTRY basic block to unknow_info expr.  */
+  basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  bitmap_set_bit (vsetvl_def_out[entry->index],
+   get_expr_index (vsetvl_def_exprs, unknow_info));
+
+  compute_reaching_defintion (def_loc, kill, vsetvl_def_in, vsetvl_def_out);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file,
+        "\n  Compute vsetvl info reaching defition data:\n\n");
+      fprintf (dump_file, "    Expression List (%d):\n",
+        vsetvl_def_exprs.length ());
+      for (unsigned i = 0; i < vsetvl_def_exprs.length (); i++)
+ {
+   const auto &info = *vsetvl_def_exprs[i];
+   fprintf (dump_file, "      Expr[%u]: ", i);
+   info.dump (dump_file, "        ");
+ }
+      fprintf (dump_file, "\n    bitmap data:\n");
+      for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+   unsigned int i = bb->index ();
+   fprintf (dump_file, "      BB %u:\n", i);
+   fprintf (dump_file, "        def_loc: ");
+   dump_bitmap_file (dump_file, def_loc[i]);
+   fprintf (dump_file, "        kill: ");
+   dump_bitmap_file (dump_file, kill[i]);
+   fprintf (dump_file, "        vsetvl_def_in: ");
+   dump_bitmap_file (dump_file, vsetvl_def_in[i]);
+   fprintf (dump_file, "        vsetvl_def_out: ");
+   dump_bitmap_file (dump_file, vsetvl_def_out[i]);
+ }
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ continue;
+      vsetvl_info &curr_info = block_info.get_header_info ();
+      if (!curr_info.valid_p ())
+ continue;
+
+      unsigned int expr_index;
+      sbitmap_iterator sbi;
+      bool full_available = true;
+      EXECUTE_IF_SET_IN_BITMAP (vsetvl_def_in[bb->index ()], 0, expr_index, sbi)
+ {
+   vsetvl_info &prev_info = *vsetvl_def_exprs[expr_index];
+   if (!prev_info.valid_p ()
+       || !dem.available_with (prev_info, curr_info))
+     {
+       full_available = false;
+       break;
+     }
+ }
+      block_info.full_available = full_available;
+    }
+
+  sbitmap_vector_free (def_loc);
+  sbitmap_vector_free (kill);
+}
+
+void
+pre_vsetvl::compute_vsetvl_lcm_data ()
+{
+  exprs.truncate (0);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ continue;
+      vsetvl_info &header_info = block_info.get_header_info ();
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+      add_expr (exprs, header_info);
+      add_expr (exprs, footer_info);
+    }
+
+  int num_exprs = exprs.length ();
+  if (avloc)
+    sbitmap_vector_free (avloc);
+  if (kill)
+    sbitmap_vector_free (kill);
+  if (antloc)
+    sbitmap_vector_free (antloc);
+  if (transp)
+    sbitmap_vector_free (transp);
+  if (avin)
+    sbitmap_vector_free (avin);
+  if (avout)
+    sbitmap_vector_free (avout);
+
+  avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+
+  bitmap_vector_clear (avloc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (antloc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (transp, last_basic_block_for_fn (cfun));
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      vsetvl_block_info &block_info = get_block_info (bb);
+
+      /* Compute transp */
+      if (block_info.empty_p ())
+ {
+   bitmap_ones (transp[bb_index]);
+   for (int i = 0; i < num_exprs; i += 1)
+     {
+       const vsetvl_info &info = *exprs[i];
+       if (!info.has_reg_avl () && !info.has_reg_vl ())
+ continue;
+
+       unsigned int regno;
+       sbitmap_iterator sbi;
+       EXECUTE_IF_SET_IN_BITMAP (reg_def_loc[bb->index ()], 0, regno,
+ sbi)
+ {
+   if (regno == REGNO (info.get_avl ()))
+     bitmap_clear_bit (transp[bb->index ()], i);
+ }
+
+       for (const insn_info *insn : bb->real_nondebug_insns ())
+ {
+   if ((info.has_reg_avl ()
+        && find_access (insn->defs (), REGNO (info.get_avl ())))
+       || (info.has_reg_vl ()
+   && find_access (insn->uses (),
+   REGNO (info.get_vl ()))))
+     {
+       bitmap_clear_bit (transp[bb_index], i);
+       break;
+     }
+ }
+     }
+
+   continue;
+ }
+
+      vsetvl_info &header_info = block_info.get_header_info ();
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+
+      if (header_info.valid_p ()
+   && (anticpatable_exp_p (header_info) || block_info.full_available))
+ bitmap_set_bit (antloc[bb_index], get_expr_index (exprs, header_info));
+
+      if (footer_info.valid_p ())
+ for (int i = 0; i < num_exprs; i += 1)
+   {
+     const vsetvl_info &info = *exprs[i];
+     if (!info.valid_p ())
+       continue;
+     if (available_exp_p (footer_info, info))
+       bitmap_set_bit (avloc[bb_index], i);
+   }
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      bitmap_ior (kill[bb_index], transp[bb_index], avloc[bb_index]);
+      bitmap_not (kill[bb_index], kill[bb_index]);
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      edge e;
+      edge_iterator ei;
+      FOR_EACH_EDGE (e, ei, bb->cfg_bb ()->preds)
+ if (e->flags & EDGE_COMPLEX)
+   {
+     bitmap_clear (antloc[bb_index]);
+     bitmap_clear (transp[bb_index]);
+   }
+    }
+}
+
void
pre_vsetvl::fuse_local_vsetvl_info ()
{
--
2.36.3
  
juzhe.zhong@rivai.ai Oct. 18, 2023, 4:13 a.m. UTC | #2
Copy and paste the original comments:

-/* Compute the local properties of each recorded expression.
-
-   Local properties are those that are defined by the block, irrespective of
-   other blocks.
-
-   An expression is transparent in a block if its operands are not modified
-   in the block.
-
-   An expression is computed (locally available) in a block if it is computed
-   at least once and expression would contain the same value if the
-   computation was moved to the end of the block.
-
-   An expression is locally anticipatable in a block if it is computed at
-   least once and expression would contain the same value if the computation
-   was moved to the beginning of the block.  */
-void
-pass_vsetvl::compute_local_properties (void)
-{
-  /* -  If T is locally available at the end of a block, then T' must be
-	available at the end of the same block. Since some optimization has
-	occurred earlier, T' might not be locally available, however, it must
-	have been previously computed on all paths. As a formula, T at AVLOC(B)
-	implies that T' at AVOUT(B).
-	An "available occurrence" is one that is the last occurrence in the
-	basic block and the operands are not modified by following statements in
-	the basic block [including this insn].
-
-     -  If T is locally anticipated at the beginning of a block, then either
-	T', is locally anticipated or it is already available from previous
-	blocks. As a formula, this means that T at ANTLOC(B) implies that T' at
-	ANTLOC(B) at AVIN(B).
-	An "anticipatable occurrence" is one that is the first occurrence in the
-	basic block, the operands are not modified in the basic block prior
-	to the occurrence and the output is not used between the start of
-	the block and the occurrence.  */



juzhe.zhong@rivai.ai
 
From: Lehua Ding
Date: 2023-10-17 19:34
To: gcc-patches
CC: juzhe.zhong; kito.cheng; rdapp.gcc; palmer; jeffreyalaw; lehua.ding
Subject: [PATCH V2 06/14] RISC-V: P6: Add computing reaching definition data flow
This sub-patch add some helper functions for computing reaching defintion data
and three computational functions for different object. These three functions
are used by phase 2 and 3.
 
gcc/ChangeLog:
 
* config/riscv/riscv-vsetvl.cc (bitmap_union_of_preds_with_entry): New.
(compute_reaching_defintion): New.
(pre_vsetvl::compute_avl_def_data): New.
(pre_vsetvl::compute_vsetvl_def_data): New.
(pre_vsetvl::compute_vsetvl_lcm_data): New.
 
---
gcc/config/riscv/riscv-vsetvl.cc | 468 +++++++++++++++++++++++++++++++
1 file changed, 468 insertions(+)
 
diff --git a/gcc/config/riscv/riscv-vsetvl.cc b/gcc/config/riscv/riscv-vsetvl.cc
index 33bdcec04d8..b1269e8cf4f 100644
--- a/gcc/config/riscv/riscv-vsetvl.cc
+++ b/gcc/config/riscv/riscv-vsetvl.cc
@@ -103,6 +103,121 @@ along with GCC; see the file COPYING3.  If not see
using namespace rtl_ssa;
using namespace riscv_vector;
 
+/* Set the bitmap DST to the union of SRC of predecessors of
+   basic block B.
+   It's a bit different from bitmap_union_of_preds in cfganal.cc. This function
+   takes into account the case where pred is ENTRY basic block. The main reason
+   for this difference is to make it easier to insert some special value into
+   the ENTRY base block. For example, vsetvl_info with a status of UNKNOW.  */
+static void
+bitmap_union_of_preds_with_entry (sbitmap dst, sbitmap *src, basic_block b)
+{
+  unsigned int set_size = dst->size;
+  edge e;
+  unsigned ix;
+
+  for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
+    {
+      e = EDGE_PRED (b, ix);
+      bitmap_copy (dst, src[e->src->index]);
+      break;
+    }
+
+  if (ix == EDGE_COUNT (b->preds))
+    bitmap_clear (dst);
+  else
+    for (ix++; ix < EDGE_COUNT (b->preds); ix++)
+      {
+ unsigned int i;
+ SBITMAP_ELT_TYPE *p, *r;
+
+ e = EDGE_PRED (b, ix);
+ p = src[e->src->index]->elms;
+ r = dst->elms;
+ for (i = 0; i < set_size; i++)
+   *r++ |= *p++;
+      }
+}
+
+/* Compute the reaching defintion in and out based on the gen and KILL
+   informations in each Base Blocks.
+   This function references the compute_avaiable implementation in lcm.cc  */
+static void
+compute_reaching_defintion (sbitmap *gen, sbitmap *kill, sbitmap *in,
+     sbitmap *out)
+{
+  edge e;
+  basic_block *worklist, *qin, *qout, *qend, bb;
+  unsigned int qlen;
+  edge_iterator ei;
+
+  /* Allocate a worklist array/queue.  Entries are only added to the
+     list if they were not already on the list.  So the size is
+     bounded by the number of basic blocks.  */
+  qin = qout = worklist
+    = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
+
+  /* Put every block on the worklist; this is necessary because of the
+     optimistic initialization of AVOUT above.  Use reverse postorder
+     to make the forward dataflow problem require less iterations.  */
+  int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
+  int n = pre_and_rev_post_order_compute_fn (cfun, NULL, rpo, false);
+  for (int i = 0; i < n; ++i)
+    {
+      bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
+      *qin++ = bb;
+      bb->aux = bb;
+    }
+  free (rpo);
+
+  qin = worklist;
+  qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
+
+  /* Mark blocks which are successors of the entry block so that we
+     can easily identify them below.  */
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+
+  /* Iterate until the worklist is empty.  */
+  while (qlen)
+    {
+      /* Take the first entry off the worklist.  */
+      bb = *qout++;
+      qlen--;
+
+      if (qout >= qend)
+ qout = worklist;
+
+      /* Do not clear the aux field for blocks which are successors of the
+ ENTRY block.  That way we never add then to the worklist again.  */
+      if (bb->aux != ENTRY_BLOCK_PTR_FOR_FN (cfun))
+ bb->aux = NULL;
+
+      bitmap_union_of_preds_with_entry (in[bb->index], out, bb);
+
+      if (bitmap_ior_and_compl (out[bb->index], gen[bb->index], in[bb->index],
+ kill[bb->index]))
+ /* If the out state of this block changed, then we need
+    to add the successors of this block to the worklist
+    if they are not already on the worklist.  */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+   if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
+     {
+       *qin++ = e->dest;
+       e->dest->aux = e;
+       qlen++;
+
+       if (qin >= qend)
+ qin = worklist;
+     }
+    }
+
+  clear_aux_for_edges ();
+  clear_aux_for_blocks ();
+  free (worklist);
+}
+
static CONSTEXPR const unsigned ALL_SEW[] = {8, 16, 32, 64};
static CONSTEXPR const vlmul_type ALL_LMUL[]
   = {LMUL_1, LMUL_2, LMUL_4, LMUL_8, LMUL_F8, LMUL_F4, LMUL_F2};
@@ -2669,6 +2784,359 @@ public:
   }
};
 
+void
+pre_vsetvl::compute_avl_def_data ()
+{
+  if (bitmap_empty_p (avl_regs))
+    return;
+
+  unsigned num_regs = GP_REG_LAST + 1;
+  unsigned num_bbs = last_basic_block_for_fn (cfun);
+
+  sbitmap *avl_def_loc_temp = sbitmap_vector_alloc (num_bbs, num_regs);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      bitmap_and (avl_def_loc_temp[bb->index ()], avl_regs,
+   reg_def_loc[bb->index ()]);
+
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.has_info ())
+ {
+   vsetvl_info &footer_info = block_info.get_footer_info ();
+   gcc_assert (footer_info.valid_p ());
+   if (footer_info.has_reg_vl ())
+     bitmap_set_bit (avl_def_loc_temp[bb->index ()],
+     REGNO (footer_info.get_vl ()));
+ }
+    }
+
+  if (avl_def_in)
+    sbitmap_vector_free (avl_def_in);
+  if (avl_def_out)
+    sbitmap_vector_free (avl_def_out);
+
+  unsigned num_exprs = num_bbs * num_regs;
+  sbitmap *avl_def_loc = sbitmap_vector_alloc (num_bbs, num_exprs);
+  sbitmap *kill = sbitmap_vector_alloc (num_bbs, num_exprs);
+  avl_def_in = sbitmap_vector_alloc (num_bbs, num_exprs);
+  avl_def_out = sbitmap_vector_alloc (num_bbs, num_exprs);
+
+  bitmap_vector_clear (avl_def_loc, num_bbs);
+  bitmap_vector_clear (kill, num_bbs);
+  bitmap_vector_clear (avl_def_out, num_bbs);
+
+  unsigned regno;
+  sbitmap_iterator sbi;
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    EXECUTE_IF_SET_IN_BITMAP (avl_def_loc_temp[bb->index ()], 0, regno, sbi)
+      {
+ bitmap_set_bit (avl_def_loc[bb->index ()],
+ get_expr_id (bb->index (), regno, num_bbs));
+ bitmap_set_range (kill[bb->index ()], regno * num_bbs, num_bbs);
+      }
+
+  basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  EXECUTE_IF_SET_IN_BITMAP (avl_regs, 0, regno, sbi)
+    bitmap_set_bit (avl_def_out[entry->index],
+     get_expr_id (entry->index, regno, num_bbs));
+
+  compute_reaching_defintion (avl_def_loc, kill, avl_def_in, avl_def_out);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file,
+        "  Compute avl reaching defition data (num_bbs %d, num_regs "
+        "%d):\n\n",
+        num_bbs, num_regs);
+      fprintf (dump_file, "    avl_regs: ");
+      dump_bitmap_file (dump_file, avl_regs);
+      fprintf (dump_file, "\n    bitmap data:\n");
+      for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+   unsigned int i = bb->index ();
+   fprintf (dump_file, "      BB %u:\n", i);
+   fprintf (dump_file, "        avl_def_loc:");
+   unsigned expr_id;
+   sbitmap_iterator sbi;
+   EXECUTE_IF_SET_IN_BITMAP (avl_def_loc[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n        kill:");
+   EXECUTE_IF_SET_IN_BITMAP (kill[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n        avl_def_in:");
+   EXECUTE_IF_SET_IN_BITMAP (avl_def_in[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n        avl_def_out:");
+   EXECUTE_IF_SET_IN_BITMAP (avl_def_out[i], 0, expr_id, sbi)
+     {
+       fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+        get_bb_index (expr_id, num_bbs));
+     }
+   fprintf (dump_file, "\n");
+ }
+    }
+
+  sbitmap_vector_free (avl_def_loc);
+  sbitmap_vector_free (kill);
+  sbitmap_vector_free (avl_def_loc_temp);
+
+  dem.set_avl_in_out_data (avl_def_in, avl_def_out);
+}
+
+void
+pre_vsetvl::compute_vsetvl_def_data ()
+{
+  vsetvl_def_exprs.truncate (0);
+  add_expr (vsetvl_def_exprs, unknow_info);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ continue;
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+      add_expr (vsetvl_def_exprs, footer_info);
+    }
+
+  if (vsetvl_def_in)
+    sbitmap_vector_free (vsetvl_def_in);
+  if (vsetvl_def_out)
+    sbitmap_vector_free (vsetvl_def_out);
+
+  sbitmap *def_loc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+    vsetvl_def_exprs.length ());
+  sbitmap *kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ vsetvl_def_exprs.length ());
+
+  vsetvl_def_in = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ vsetvl_def_exprs.length ());
+  vsetvl_def_out = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ vsetvl_def_exprs.length ());
+
+  bitmap_vector_clear (def_loc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (kill, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (vsetvl_def_out, last_basic_block_for_fn (cfun));
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ {
+   for (unsigned i = 0; i < vsetvl_def_exprs.length (); i += 1)
+     {
+       const vsetvl_info &info = *vsetvl_def_exprs[i];
+       if (!info.has_reg_avl ())
+ continue;
+       unsigned int regno;
+       sbitmap_iterator sbi;
+       EXECUTE_IF_SET_IN_BITMAP (reg_def_loc[bb->index ()], 0, regno,
+ sbi)
+ if (regno == REGNO (info.get_avl ()))
+   bitmap_set_bit (kill[bb->index ()], i);
+     }
+   continue;
+ }
+
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      bitmap_ones (kill[bb->index ()]);
+      bitmap_set_bit (def_loc[bb->index ()],
+       get_expr_index (vsetvl_def_exprs, footer_info));
+    }
+
+  /* Set the def_out of the ENTRY basic block to unknow_info expr.  */
+  basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  bitmap_set_bit (vsetvl_def_out[entry->index],
+   get_expr_index (vsetvl_def_exprs, unknow_info));
+
+  compute_reaching_defintion (def_loc, kill, vsetvl_def_in, vsetvl_def_out);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file,
+        "\n  Compute vsetvl info reaching defition data:\n\n");
+      fprintf (dump_file, "    Expression List (%d):\n",
+        vsetvl_def_exprs.length ());
+      for (unsigned i = 0; i < vsetvl_def_exprs.length (); i++)
+ {
+   const auto &info = *vsetvl_def_exprs[i];
+   fprintf (dump_file, "      Expr[%u]: ", i);
+   info.dump (dump_file, "        ");
+ }
+      fprintf (dump_file, "\n    bitmap data:\n");
+      for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+   unsigned int i = bb->index ();
+   fprintf (dump_file, "      BB %u:\n", i);
+   fprintf (dump_file, "        def_loc: ");
+   dump_bitmap_file (dump_file, def_loc[i]);
+   fprintf (dump_file, "        kill: ");
+   dump_bitmap_file (dump_file, kill[i]);
+   fprintf (dump_file, "        vsetvl_def_in: ");
+   dump_bitmap_file (dump_file, vsetvl_def_in[i]);
+   fprintf (dump_file, "        vsetvl_def_out: ");
+   dump_bitmap_file (dump_file, vsetvl_def_out[i]);
+ }
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ continue;
+      vsetvl_info &curr_info = block_info.get_header_info ();
+      if (!curr_info.valid_p ())
+ continue;
+
+      unsigned int expr_index;
+      sbitmap_iterator sbi;
+      bool full_available = true;
+      EXECUTE_IF_SET_IN_BITMAP (vsetvl_def_in[bb->index ()], 0, expr_index, sbi)
+ {
+   vsetvl_info &prev_info = *vsetvl_def_exprs[expr_index];
+   if (!prev_info.valid_p ()
+       || !dem.available_with (prev_info, curr_info))
+     {
+       full_available = false;
+       break;
+     }
+ }
+      block_info.full_available = full_available;
+    }
+
+  sbitmap_vector_free (def_loc);
+  sbitmap_vector_free (kill);
+}
+
+void
+pre_vsetvl::compute_vsetvl_lcm_data ()
+{
+  exprs.truncate (0);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+ continue;
+      vsetvl_info &header_info = block_info.get_header_info ();
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+      add_expr (exprs, header_info);
+      add_expr (exprs, footer_info);
+    }
+
+  int num_exprs = exprs.length ();
+  if (avloc)
+    sbitmap_vector_free (avloc);
+  if (kill)
+    sbitmap_vector_free (kill);
+  if (antloc)
+    sbitmap_vector_free (antloc);
+  if (transp)
+    sbitmap_vector_free (transp);
+  if (avin)
+    sbitmap_vector_free (avin);
+  if (avout)
+    sbitmap_vector_free (avout);
+
+  avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+
+  bitmap_vector_clear (avloc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (antloc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (transp, last_basic_block_for_fn (cfun));
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      vsetvl_block_info &block_info = get_block_info (bb);
+
+      /* Compute transp */
+      if (block_info.empty_p ())
+ {
+   bitmap_ones (transp[bb_index]);
+   for (int i = 0; i < num_exprs; i += 1)
+     {
+       const vsetvl_info &info = *exprs[i];
+       if (!info.has_reg_avl () && !info.has_reg_vl ())
+ continue;
+
+       unsigned int regno;
+       sbitmap_iterator sbi;
+       EXECUTE_IF_SET_IN_BITMAP (reg_def_loc[bb->index ()], 0, regno,
+ sbi)
+ {
+   if (regno == REGNO (info.get_avl ()))
+     bitmap_clear_bit (transp[bb->index ()], i);
+ }
+
+       for (const insn_info *insn : bb->real_nondebug_insns ())
+ {
+   if ((info.has_reg_avl ()
+        && find_access (insn->defs (), REGNO (info.get_avl ())))
+       || (info.has_reg_vl ()
+   && find_access (insn->uses (),
+   REGNO (info.get_vl ()))))
+     {
+       bitmap_clear_bit (transp[bb_index], i);
+       break;
+     }
+ }
+     }
+
+   continue;
+ }
+
+      vsetvl_info &header_info = block_info.get_header_info ();
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+
+      if (header_info.valid_p ()
+   && (anticpatable_exp_p (header_info) || block_info.full_available))
+ bitmap_set_bit (antloc[bb_index], get_expr_index (exprs, header_info));
+
+      if (footer_info.valid_p ())
+ for (int i = 0; i < num_exprs; i += 1)
+   {
+     const vsetvl_info &info = *exprs[i];
+     if (!info.valid_p ())
+       continue;
+     if (available_exp_p (footer_info, info))
+       bitmap_set_bit (avloc[bb_index], i);
+   }
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      bitmap_ior (kill[bb_index], transp[bb_index], avloc[bb_index]);
+      bitmap_not (kill[bb_index], kill[bb_index]);
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      edge e;
+      edge_iterator ei;
+      FOR_EACH_EDGE (e, ei, bb->cfg_bb ()->preds)
+ if (e->flags & EDGE_COMPLEX)
+   {
+     bitmap_clear (antloc[bb_index]);
+     bitmap_clear (transp[bb_index]);
+   }
+    }
+}
+
void
pre_vsetvl::fuse_local_vsetvl_info ()
{
--
2.36.3
  

Patch

diff --git a/gcc/config/riscv/riscv-vsetvl.cc b/gcc/config/riscv/riscv-vsetvl.cc
index 33bdcec04d8..b1269e8cf4f 100644
--- a/gcc/config/riscv/riscv-vsetvl.cc
+++ b/gcc/config/riscv/riscv-vsetvl.cc
@@ -103,6 +103,121 @@  along with GCC; see the file COPYING3.  If not see
 using namespace rtl_ssa;
 using namespace riscv_vector;

+/* Set the bitmap DST to the union of SRC of predecessors of
+   basic block B.
+   It's a bit different from bitmap_union_of_preds in cfganal.cc. This function
+   takes into account the case where pred is ENTRY basic block. The main reason
+   for this difference is to make it easier to insert some special value into
+   the ENTRY base block. For example, vsetvl_info with a status of UNKNOW.  */
+static void
+bitmap_union_of_preds_with_entry (sbitmap dst, sbitmap *src, basic_block b)
+{
+  unsigned int set_size = dst->size;
+  edge e;
+  unsigned ix;
+
+  for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
+    {
+      e = EDGE_PRED (b, ix);
+      bitmap_copy (dst, src[e->src->index]);
+      break;
+    }
+
+  if (ix == EDGE_COUNT (b->preds))
+    bitmap_clear (dst);
+  else
+    for (ix++; ix < EDGE_COUNT (b->preds); ix++)
+      {
+	unsigned int i;
+	SBITMAP_ELT_TYPE *p, *r;
+
+	e = EDGE_PRED (b, ix);
+	p = src[e->src->index]->elms;
+	r = dst->elms;
+	for (i = 0; i < set_size; i++)
+	  *r++ |= *p++;
+      }
+}
+
+/* Compute the reaching defintion in and out based on the gen and KILL
+   informations in each Base Blocks.
+   This function references the compute_avaiable implementation in lcm.cc  */
+static void
+compute_reaching_defintion (sbitmap *gen, sbitmap *kill, sbitmap *in,
+			    sbitmap *out)
+{
+  edge e;
+  basic_block *worklist, *qin, *qout, *qend, bb;
+  unsigned int qlen;
+  edge_iterator ei;
+
+  /* Allocate a worklist array/queue.  Entries are only added to the
+     list if they were not already on the list.  So the size is
+     bounded by the number of basic blocks.  */
+  qin = qout = worklist
+    = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
+
+  /* Put every block on the worklist; this is necessary because of the
+     optimistic initialization of AVOUT above.  Use reverse postorder
+     to make the forward dataflow problem require less iterations.  */
+  int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
+  int n = pre_and_rev_post_order_compute_fn (cfun, NULL, rpo, false);
+  for (int i = 0; i < n; ++i)
+    {
+      bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
+      *qin++ = bb;
+      bb->aux = bb;
+    }
+  free (rpo);
+
+  qin = worklist;
+  qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
+
+  /* Mark blocks which are successors of the entry block so that we
+     can easily identify them below.  */
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+
+  /* Iterate until the worklist is empty.  */
+  while (qlen)
+    {
+      /* Take the first entry off the worklist.  */
+      bb = *qout++;
+      qlen--;
+
+      if (qout >= qend)
+	qout = worklist;
+
+      /* Do not clear the aux field for blocks which are successors of the
+	 ENTRY block.  That way we never add then to the worklist again.  */
+      if (bb->aux != ENTRY_BLOCK_PTR_FOR_FN (cfun))
+	bb->aux = NULL;
+
+      bitmap_union_of_preds_with_entry (in[bb->index], out, bb);
+
+      if (bitmap_ior_and_compl (out[bb->index], gen[bb->index], in[bb->index],
+				kill[bb->index]))
+	/* If the out state of this block changed, then we need
+	   to add the successors of this block to the worklist
+	   if they are not already on the worklist.  */
+	FOR_EACH_EDGE (e, ei, bb->succs)
+	  if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
+	    {
+	      *qin++ = e->dest;
+	      e->dest->aux = e;
+	      qlen++;
+
+	      if (qin >= qend)
+		qin = worklist;
+	    }
+    }
+
+  clear_aux_for_edges ();
+  clear_aux_for_blocks ();
+  free (worklist);
+}
+
 static CONSTEXPR const unsigned ALL_SEW[] = {8, 16, 32, 64};
 static CONSTEXPR const vlmul_type ALL_LMUL[]
   = {LMUL_1, LMUL_2, LMUL_4, LMUL_8, LMUL_F8, LMUL_F4, LMUL_F2};
@@ -2669,6 +2784,359 @@  public:
   }
 };

+void
+pre_vsetvl::compute_avl_def_data ()
+{
+  if (bitmap_empty_p (avl_regs))
+    return;
+
+  unsigned num_regs = GP_REG_LAST + 1;
+  unsigned num_bbs = last_basic_block_for_fn (cfun);
+
+  sbitmap *avl_def_loc_temp = sbitmap_vector_alloc (num_bbs, num_regs);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      bitmap_and (avl_def_loc_temp[bb->index ()], avl_regs,
+		  reg_def_loc[bb->index ()]);
+
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.has_info ())
+	{
+	  vsetvl_info &footer_info = block_info.get_footer_info ();
+	  gcc_assert (footer_info.valid_p ());
+	  if (footer_info.has_reg_vl ())
+	    bitmap_set_bit (avl_def_loc_temp[bb->index ()],
+			    REGNO (footer_info.get_vl ()));
+	}
+    }
+
+  if (avl_def_in)
+    sbitmap_vector_free (avl_def_in);
+  if (avl_def_out)
+    sbitmap_vector_free (avl_def_out);
+
+  unsigned num_exprs = num_bbs * num_regs;
+  sbitmap *avl_def_loc = sbitmap_vector_alloc (num_bbs, num_exprs);
+  sbitmap *kill = sbitmap_vector_alloc (num_bbs, num_exprs);
+  avl_def_in = sbitmap_vector_alloc (num_bbs, num_exprs);
+  avl_def_out = sbitmap_vector_alloc (num_bbs, num_exprs);
+
+  bitmap_vector_clear (avl_def_loc, num_bbs);
+  bitmap_vector_clear (kill, num_bbs);
+  bitmap_vector_clear (avl_def_out, num_bbs);
+
+  unsigned regno;
+  sbitmap_iterator sbi;
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    EXECUTE_IF_SET_IN_BITMAP (avl_def_loc_temp[bb->index ()], 0, regno, sbi)
+      {
+	bitmap_set_bit (avl_def_loc[bb->index ()],
+			get_expr_id (bb->index (), regno, num_bbs));
+	bitmap_set_range (kill[bb->index ()], regno * num_bbs, num_bbs);
+      }
+
+  basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  EXECUTE_IF_SET_IN_BITMAP (avl_regs, 0, regno, sbi)
+    bitmap_set_bit (avl_def_out[entry->index],
+		    get_expr_id (entry->index, regno, num_bbs));
+
+  compute_reaching_defintion (avl_def_loc, kill, avl_def_in, avl_def_out);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file,
+	       "  Compute avl reaching defition data (num_bbs %d, num_regs "
+	       "%d):\n\n",
+	       num_bbs, num_regs);
+      fprintf (dump_file, "    avl_regs: ");
+      dump_bitmap_file (dump_file, avl_regs);
+      fprintf (dump_file, "\n    bitmap data:\n");
+      for (const bb_info *bb : crtl->ssa->bbs ())
+	{
+	  unsigned int i = bb->index ();
+	  fprintf (dump_file, "      BB %u:\n", i);
+	  fprintf (dump_file, "        avl_def_loc:");
+	  unsigned expr_id;
+	  sbitmap_iterator sbi;
+	  EXECUTE_IF_SET_IN_BITMAP (avl_def_loc[i], 0, expr_id, sbi)
+	    {
+	      fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+		       get_bb_index (expr_id, num_bbs));
+	    }
+	  fprintf (dump_file, "\n        kill:");
+	  EXECUTE_IF_SET_IN_BITMAP (kill[i], 0, expr_id, sbi)
+	    {
+	      fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+		       get_bb_index (expr_id, num_bbs));
+	    }
+	  fprintf (dump_file, "\n        avl_def_in:");
+	  EXECUTE_IF_SET_IN_BITMAP (avl_def_in[i], 0, expr_id, sbi)
+	    {
+	      fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+		       get_bb_index (expr_id, num_bbs));
+	    }
+	  fprintf (dump_file, "\n        avl_def_out:");
+	  EXECUTE_IF_SET_IN_BITMAP (avl_def_out[i], 0, expr_id, sbi)
+	    {
+	      fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+		       get_bb_index (expr_id, num_bbs));
+	    }
+	  fprintf (dump_file, "\n");
+	}
+    }
+
+  sbitmap_vector_free (avl_def_loc);
+  sbitmap_vector_free (kill);
+  sbitmap_vector_free (avl_def_loc_temp);
+
+  dem.set_avl_in_out_data (avl_def_in, avl_def_out);
+}
+
+void
+pre_vsetvl::compute_vsetvl_def_data ()
+{
+  vsetvl_def_exprs.truncate (0);
+  add_expr (vsetvl_def_exprs, unknow_info);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+	continue;
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+      add_expr (vsetvl_def_exprs, footer_info);
+    }
+
+  if (vsetvl_def_in)
+    sbitmap_vector_free (vsetvl_def_in);
+  if (vsetvl_def_out)
+    sbitmap_vector_free (vsetvl_def_out);
+
+  sbitmap *def_loc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+					   vsetvl_def_exprs.length ());
+  sbitmap *kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+					vsetvl_def_exprs.length ());
+
+  vsetvl_def_in = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+					vsetvl_def_exprs.length ());
+  vsetvl_def_out = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+					 vsetvl_def_exprs.length ());
+
+  bitmap_vector_clear (def_loc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (kill, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (vsetvl_def_out, last_basic_block_for_fn (cfun));
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+	{
+	  for (unsigned i = 0; i < vsetvl_def_exprs.length (); i += 1)
+	    {
+	      const vsetvl_info &info = *vsetvl_def_exprs[i];
+	      if (!info.has_reg_avl ())
+		continue;
+	      unsigned int regno;
+	      sbitmap_iterator sbi;
+	      EXECUTE_IF_SET_IN_BITMAP (reg_def_loc[bb->index ()], 0, regno,
+					sbi)
+		if (regno == REGNO (info.get_avl ()))
+		  bitmap_set_bit (kill[bb->index ()], i);
+	    }
+	  continue;
+	}
+
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      bitmap_ones (kill[bb->index ()]);
+      bitmap_set_bit (def_loc[bb->index ()],
+		      get_expr_index (vsetvl_def_exprs, footer_info));
+    }
+
+  /* Set the def_out of the ENTRY basic block to unknow_info expr.  */
+  basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  bitmap_set_bit (vsetvl_def_out[entry->index],
+		  get_expr_index (vsetvl_def_exprs, unknow_info));
+
+  compute_reaching_defintion (def_loc, kill, vsetvl_def_in, vsetvl_def_out);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file,
+	       "\n  Compute vsetvl info reaching defition data:\n\n");
+      fprintf (dump_file, "    Expression List (%d):\n",
+	       vsetvl_def_exprs.length ());
+      for (unsigned i = 0; i < vsetvl_def_exprs.length (); i++)
+	{
+	  const auto &info = *vsetvl_def_exprs[i];
+	  fprintf (dump_file, "      Expr[%u]: ", i);
+	  info.dump (dump_file, "        ");
+	}
+      fprintf (dump_file, "\n    bitmap data:\n");
+      for (const bb_info *bb : crtl->ssa->bbs ())
+	{
+	  unsigned int i = bb->index ();
+	  fprintf (dump_file, "      BB %u:\n", i);
+	  fprintf (dump_file, "        def_loc: ");
+	  dump_bitmap_file (dump_file, def_loc[i]);
+	  fprintf (dump_file, "        kill: ");
+	  dump_bitmap_file (dump_file, kill[i]);
+	  fprintf (dump_file, "        vsetvl_def_in: ");
+	  dump_bitmap_file (dump_file, vsetvl_def_in[i]);
+	  fprintf (dump_file, "        vsetvl_def_out: ");
+	  dump_bitmap_file (dump_file, vsetvl_def_out[i]);
+	}
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+	continue;
+      vsetvl_info &curr_info = block_info.get_header_info ();
+      if (!curr_info.valid_p ())
+	continue;
+
+      unsigned int expr_index;
+      sbitmap_iterator sbi;
+      bool full_available = true;
+      EXECUTE_IF_SET_IN_BITMAP (vsetvl_def_in[bb->index ()], 0, expr_index, sbi)
+	{
+	  vsetvl_info &prev_info = *vsetvl_def_exprs[expr_index];
+	  if (!prev_info.valid_p ()
+	      || !dem.available_with (prev_info, curr_info))
+	    {
+	      full_available = false;
+	      break;
+	    }
+	}
+      block_info.full_available = full_available;
+    }
+
+  sbitmap_vector_free (def_loc);
+  sbitmap_vector_free (kill);
+}
+
+void
+pre_vsetvl::compute_vsetvl_lcm_data ()
+{
+  exprs.truncate (0);
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      vsetvl_block_info &block_info = get_block_info (bb);
+      if (block_info.empty_p ())
+	continue;
+      vsetvl_info &header_info = block_info.get_header_info ();
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+      gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+      add_expr (exprs, header_info);
+      add_expr (exprs, footer_info);
+    }
+
+  int num_exprs = exprs.length ();
+  if (avloc)
+    sbitmap_vector_free (avloc);
+  if (kill)
+    sbitmap_vector_free (kill);
+  if (antloc)
+    sbitmap_vector_free (antloc);
+  if (transp)
+    sbitmap_vector_free (transp);
+  if (avin)
+    sbitmap_vector_free (avin);
+  if (avout)
+    sbitmap_vector_free (avout);
+
+  avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+
+  bitmap_vector_clear (avloc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (antloc, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (transp, last_basic_block_for_fn (cfun));
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      vsetvl_block_info &block_info = get_block_info (bb);
+
+      /* Compute transp */
+      if (block_info.empty_p ())
+	{
+	  bitmap_ones (transp[bb_index]);
+	  for (int i = 0; i < num_exprs; i += 1)
+	    {
+	      const vsetvl_info &info = *exprs[i];
+	      if (!info.has_reg_avl () && !info.has_reg_vl ())
+		continue;
+
+	      unsigned int regno;
+	      sbitmap_iterator sbi;
+	      EXECUTE_IF_SET_IN_BITMAP (reg_def_loc[bb->index ()], 0, regno,
+					sbi)
+		{
+		  if (regno == REGNO (info.get_avl ()))
+		    bitmap_clear_bit (transp[bb->index ()], i);
+		}
+
+	      for (const insn_info *insn : bb->real_nondebug_insns ())
+		{
+		  if ((info.has_reg_avl ()
+		       && find_access (insn->defs (), REGNO (info.get_avl ())))
+		      || (info.has_reg_vl ()
+			  && find_access (insn->uses (),
+					  REGNO (info.get_vl ()))))
+		    {
+		      bitmap_clear_bit (transp[bb_index], i);
+		      break;
+		    }
+		}
+	    }
+
+	  continue;
+	}
+
+      vsetvl_info &header_info = block_info.get_header_info ();
+      vsetvl_info &footer_info = block_info.get_footer_info ();
+
+      if (header_info.valid_p ()
+	  && (anticpatable_exp_p (header_info) || block_info.full_available))
+	bitmap_set_bit (antloc[bb_index], get_expr_index (exprs, header_info));
+
+      if (footer_info.valid_p ())
+	for (int i = 0; i < num_exprs; i += 1)
+	  {
+	    const vsetvl_info &info = *exprs[i];
+	    if (!info.valid_p ())
+	      continue;
+	    if (available_exp_p (footer_info, info))
+	      bitmap_set_bit (avloc[bb_index], i);
+	  }
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      bitmap_ior (kill[bb_index], transp[bb_index], avloc[bb_index]);
+      bitmap_not (kill[bb_index], kill[bb_index]);
+    }
+
+  for (const bb_info *bb : crtl->ssa->bbs ())
+    {
+      unsigned bb_index = bb->index ();
+      edge e;
+      edge_iterator ei;
+      FOR_EACH_EDGE (e, ei, bb->cfg_bb ()->preds)
+	if (e->flags & EDGE_COMPLEX)
+	  {
+	    bitmap_clear (antloc[bb_index]);
+	    bitmap_clear (transp[bb_index]);
+	  }
+    }
+}
+
 void
 pre_vsetvl::fuse_local_vsetvl_info ()
 {