This patch set is LGTM, thanks!

On Thu, Dec 18, 2025 at 3:02 AM Robin Dapp <[email protected]> wrote:
>
> This patch changes the gather/scatter mode iterators from a ratio
> scheme to a more direct one where the index mode size is
> 1/2, 1/4, 1/8, 2, 4, 8 times the data mode size.  It also adds VLS modes
> to the iterators and removes the now unnecessary
> gather_scatter_valid_offset_p.
>
> gcc/ChangeLog:
>
>         * config/riscv/autovec.md 
> (mask_len_gather_load<RATIO64:mode><RATIO64I:mode>): Change from this...
>         (mask_len_gather_load<mode><vindex>): ...to this.
>         (mask_len_gather_load<RATIO32:mode><RATIO32I:mode>): Ditto.
>         (mask_len_gather_load<mode><vindex_double_trunc>): Ditto.
>         (mask_len_gather_load<RATIO16:mode><RATIO16I:mode>): Ditto.
>         (mask_len_gather_load<mode><vindex_quad_trunc>): Ditto.
>         (mask_len_gather_load<RATIO8:mode><RATIO8I:mode>): Ditto.
>         (mask_len_gather_load<mode><vindex_oct_trunc>): Ditto.
>         (mask_len_gather_load<RATIO4:mode><RATIO4I:mode>): Ditto.
>         (mask_len_gather_load<mode><vindex_double_ext>): Ditto.
>         (mask_len_gather_load<RATIO2:mode><RATIO2I:mode>): Ditto.
>         (mask_len_gather_load<mode><vindex_quad_ext>): Ditto.
>         (mask_len_gather_load<mode><mode>): Ditto.
>         (mask_len_gather_load<mode><vindex_oct_ext>): Ditto.
>         (mask_len_scatter_store<RATIO64:mode><RATIO64I:mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex>): Ditto.
>         (mask_len_scatter_store<RATIO32:mode><RATIO32I:mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex_double_trunc>): Ditto.
>         (mask_len_scatter_store<RATIO16:mode><RATIO16I:mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex_quad_trunc>): Ditto.
>         (mask_len_scatter_store<RATIO8:mode><RATIO8I:mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex_oct_trunc>): Ditto.
>         (mask_len_scatter_store<RATIO4:mode><RATIO4I:mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex_double_ext>): Ditto.
>         (mask_len_scatter_store<RATIO2:mode><RATIO2I:mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex_quad_ext>): Ditto.
>         (mask_len_scatter_store<mode><mode>): Ditto.
>         (mask_len_scatter_store<mode><vindex_oct_ext>): Ditto.
>         * config/riscv/riscv-v.cc (prepare_gather_scatter): Use new
>         scheme
>         (get_gather_scatter_code): Ditto.
>         (expand_gather_scatter): Ditto.
>         * config/riscv/riscv-vector-builtins-bases.cc: Ditto.
>         * config/riscv/vector-iterators.md: Ditto.
>         * config/riscv/vector.md 
> (@pred_indexed_<order>store<RATIO64:mode><RATIO64I:mode>): Go from this...
>         (@pred_indexed_<order>store<mode>_same_eew): ...to this.
>         (@pred_indexed_<order>store<RATIO32:mode><RATIO32I:mode>):
>         Ditto.
>         (@pred_indexed_<order>store<mode>_x2_greater_eew): Ditto.
>         (@pred_indexed_<order>store<RATIO16:mode><RATIO16I:mode>):
>         Ditto.
>         (@pred_indexed_<order>store<mode>_x4_greater_eew): Ditto.
>         (@pred_indexed_<order>store<RATIO8:mode><RATIO8I:mode>): Ditto.
>         (@pred_indexed_<order>store<mode>_x8_greater_eew): Ditto.
>         (@pred_indexed_<order>store<RATIO4:mode><RATIO4I:mode>): Ditto.
>         (@pred_indexed_<order>store<mode>_x2_smaller_eew): Ditto.
>         (@pred_indexed_<order>store<RATIO2:mode><RATIO2I:mode>): Ditto.
>         (@pred_indexed_<order>store<mode>_x4_smaller_eew): Ditto.
>         (@pred_indexed_<order>store<RATIO1:mode><RATIO1:mode>): Ditto.
>         (@pred_indexed_<order>store<mode>_x8_smaller_eew): Ditto.
> ---
>  gcc/config/riscv/autovec.md                   |  147 +-
>  gcc/config/riscv/riscv-v.cc                   |  114 +-
>  .../riscv/riscv-vector-builtins-bases.cc      |   54 +-
>  gcc/config/riscv/vector-iterators.md          | 1417 ++++++++++++++++-
>  gcc/config/riscv/vector.md                    |   72 +-
>  5 files changed, 1565 insertions(+), 239 deletions(-)
>
> diff --git a/gcc/config/riscv/autovec.md b/gcc/config/riscv/autovec.md
> index 1f3ff16ed67..8ff3f55ffc4 100644
> --- a/gcc/config/riscv/autovec.md
> +++ b/gcc/config/riscv/autovec.md
> @@ -51,110 +51,113 @@ (define_expand "mask_len_store<mode><vm>"
>  ;; == Gather Load
>  ;; =========================================================================
>
> -(define_expand "mask_len_gather_load<RATIO64:mode><RATIO64I:mode>"
> -  [(match_operand:RATIO64 0 "register_operand")
> +;; Same element size for index, extension operand is irrelevant.
> +(define_expand "mask_len_gather_load<mode><vindex>"
> +  [(match_operand:VINDEXED 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO64I 2 "register_operand")
> +   (match_operand:<VINDEX> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
> -   (match_operand:<RATIO64:VM> 5 "vector_mask_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "maskload_else_operand")
>     (match_operand 7 "autovec_length_operand")
>     (match_operand 8 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO64I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, true);
>    DONE;
>  })
>
> -(define_expand "mask_len_gather_load<RATIO32:mode><RATIO32I:mode>"
> -  [(match_operand:RATIO32 0 "register_operand")
> +;; e.g. DImode, index SImode
> +(define_expand "mask_len_gather_load<mode><vindex_double_trunc>"
> +  [(match_operand:VEEWEXT2 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO32I 2 "register_operand")
> +   (match_operand:<VINDEX_DOUBLE_TRUNC> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
> -   (match_operand:<RATIO32:VM> 5 "vector_mask_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "maskload_else_operand")
>     (match_operand 7 "autovec_length_operand")
>     (match_operand 8 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO32I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, true);
>    DONE;
>  })
>
> -(define_expand "mask_len_gather_load<RATIO16:mode><RATIO16I:mode>"
> -  [(match_operand:RATIO16 0 "register_operand")
> +;; e.g. DImode, index HImode
> +(define_expand "mask_len_gather_load<mode><vindex_quad_trunc>"
> +  [(match_operand:VEEWEXT4 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO16I 2 "register_operand")
> +   (match_operand:<VINDEX_QUAD_TRUNC> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
> -   (match_operand:<RATIO16:VM> 5 "vector_mask_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "maskload_else_operand")
>     (match_operand 7 "autovec_length_operand")
>     (match_operand 8 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO16I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, true);
>    DONE;
>  })
>
> -(define_expand "mask_len_gather_load<RATIO8:mode><RATIO8I:mode>"
> -  [(match_operand:RATIO8 0 "register_operand")
> +;; e.g. DImode, index QImode
> +(define_expand "mask_len_gather_load<mode><vindex_oct_trunc>"
> +  [(match_operand:VEEWEXT8 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO8I 2 "register_operand")
> +   (match_operand:<VINDEX_OCT_TRUNC> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
> -   (match_operand:<RATIO8:VM> 5 "vector_mask_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "maskload_else_operand")
>     (match_operand 7 "autovec_length_operand")
>     (match_operand 8 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO8I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, true);
>    DONE;
>  })
>
> -(define_expand "mask_len_gather_load<RATIO4:mode><RATIO4I:mode>"
> -  [(match_operand:RATIO4 0 "register_operand")
> +;; e.g. SImode, index DImode
> +(define_expand "mask_len_gather_load<mode><vindex_double_ext>"
> +  [(match_operand:VEEWTRUNC2 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO4I 2 "register_operand")
> +   (match_operand:<VINDEX_DOUBLE_EXT> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
> -   (match_operand:<RATIO4:VM> 5 "vector_mask_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "maskload_else_operand")
>     (match_operand 7 "autovec_length_operand")
>     (match_operand 8 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO4I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, true);
>    DONE;
>  })
>
> -(define_expand "mask_len_gather_load<RATIO2:mode><RATIO2I:mode>"
> -  [(match_operand:RATIO2 0 "register_operand")
> +;; e.g. HImode, index DImode
> +(define_expand "mask_len_gather_load<mode><vindex_quad_ext>"
> +  [(match_operand:VEEWTRUNC4 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO2I 2 "register_operand")
> +   (match_operand:<VINDEX_QUAD_EXT> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
> -   (match_operand:<RATIO2:VM> 5 "vector_mask_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "maskload_else_operand")
>     (match_operand 7 "autovec_length_operand")
>     (match_operand 8 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO2I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, true);
>    DONE;
>  })
>
> -;; When SEW = 8 and LMUL = 8, we can't find any index mode with
> -;; larger SEW. Since RVV indexed load/store support zero extend
> -;; implicitly and not support scaling, we should only allow
> -;; operands[3] and operands[4] to be const_1_operand.
> -(define_expand "mask_len_gather_load<mode><mode>"
> -  [(match_operand:RATIO1 0 "register_operand")
> +;; e.g. QImode, index DImode
> +(define_expand "mask_len_gather_load<mode><vindex_oct_ext>"
> +  [(match_operand:VEEWTRUNC8 0 "register_operand")
>     (match_operand 1 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO1 2 "register_operand")
> +   (match_operand:<VINDEX_OCT_EXT> 2 "register_operand")
>     (match_operand 3 "const_1_operand")
>     (match_operand 4 "const_1_operand")
>     (match_operand:<VM> 5 "vector_mask_operand")
> @@ -171,106 +174,102 @@ (define_expand "mask_len_gather_load<mode><mode>"
>  ;; == Scatter Store
>  ;; =========================================================================
>
> -(define_expand "mask_len_scatter_store<RATIO64:mode><RATIO64I:mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO64I 1 "register_operand")
> +   (match_operand:<VINDEX> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO64 4 "register_operand")
> -   (match_operand:<RATIO64:VM> 5 "vector_mask_operand")
> +   (match_operand:VINDEXED 4 "register_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO64I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, false);
>    DONE;
>  })
>
> -(define_expand "mask_len_scatter_store<RATIO32:mode><RATIO32I:mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex_double_trunc>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO32I 1 "register_operand")
> +   (match_operand:<VINDEX_DOUBLE_TRUNC> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO32 4 "register_operand")
> -   (match_operand:<RATIO32:VM> 5 "vector_mask_operand")
> +   (match_operand:VEEWEXT2 4 "register_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO32I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, false);
>    DONE;
>  })
>
> -(define_expand "mask_len_scatter_store<RATIO16:mode><RATIO16I:mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex_quad_trunc>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO16I 1 "register_operand")
> +   (match_operand:<VINDEX_QUAD_TRUNC> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO16 4 "register_operand")
> -   (match_operand:<RATIO16:VM> 5 "vector_mask_operand")
> +   (match_operand:VEEWEXT4 4 "register_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO16I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, false);
>    DONE;
>  })
>
> -(define_expand "mask_len_scatter_store<RATIO8:mode><RATIO8I:mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex_oct_trunc>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO8I 1 "register_operand")
> +   (match_operand:<VINDEX_OCT_TRUNC> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO8 4 "register_operand")
> -   (match_operand:<RATIO8:VM> 5 "vector_mask_operand")
> +   (match_operand:VEEWEXT8 4 "register_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO8I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, false);
>    DONE;
>  })
>
> -(define_expand "mask_len_scatter_store<RATIO4:mode><RATIO4I:mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex_double_ext>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO4I 1 "register_operand")
> +   (match_operand:<VINDEX_DOUBLE_EXT> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO4 4 "register_operand")
> -   (match_operand:<RATIO4:VM> 5 "vector_mask_operand")
> +   (match_operand:VEEWTRUNC2 4 "register_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO4I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, false);
>    DONE;
>  })
>
> -(define_expand "mask_len_scatter_store<RATIO2:mode><RATIO2I:mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex_quad_ext>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO2I 1 "register_operand")
> +   (match_operand:<VINDEX_QUAD_EXT> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO2 4 "register_operand")
> -   (match_operand:<RATIO2:VM> 5 "vector_mask_operand")
> +   (match_operand:VEEWTRUNC4 4 "register_operand")
> +   (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> -  "TARGET_VECTOR && riscv_vector::gather_scatter_valid_offset_p 
> (<RATIO2I:MODE>mode)"
> +  "TARGET_VECTOR"
>  {
>    riscv_vector::expand_gather_scatter (operands, false);
>    DONE;
>  })
>
> -;; When SEW = 8 and LMUL = 8, we can't find any index mode with
> -;; larger SEW. Since RVV indexed load/store support zero extend
> -;; implicitly and not support scaling, we should only allow
> -;; operands[3] and operands[4] to be const_1_operand.
> -(define_expand "mask_len_scatter_store<mode><mode>"
> +(define_expand "mask_len_scatter_store<mode><vindex_oct_ext>"
>    [(match_operand 0 "pmode_reg_or_0_operand")
> -   (match_operand:RATIO1 1 "register_operand")
> +   (match_operand:<VINDEX_OCT_EXT> 1 "register_operand")
>     (match_operand 2 "const_1_operand")
>     (match_operand 3 "const_1_operand")
> -   (match_operand:RATIO1 4 "register_operand")
> +   (match_operand:VEEWTRUNC8 4 "register_operand")
>     (match_operand:<VM> 5 "vector_mask_operand")
>     (match_operand 6 "autovec_length_operand")
>     (match_operand 7 "const_0_operand")]
> diff --git a/gcc/config/riscv/riscv-v.cc b/gcc/config/riscv/riscv-v.cc
> index f3c44313967..ae8db718b80 100644
> --- a/gcc/config/riscv/riscv-v.cc
> +++ b/gcc/config/riscv/riscv-v.cc
> @@ -4692,53 +4692,81 @@ expand_cond_binop (unsigned icode, rtx *ops)
>
>  /* Prepare insn_code for gather_load/scatter_store according to
>     the vector mode and index mode.  */
> -static insn_code
> -prepare_gather_scatter (machine_mode vec_mode, machine_mode idx_mode,
> -                       bool is_load)
> +insn_code
> +get_gather_scatter_code (machine_mode vec_mode, machine_mode idx_mode,
> +                        bool is_load)
>  {
> -  if (!is_load)
> -    return code_for_pred_indexed_store (UNSPEC_UNORDERED, vec_mode, 
> idx_mode);
> -  else
> +  unsigned src_eew_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (idx_mode));
> +  unsigned dst_eew_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
> +  if (dst_eew_bitsize == src_eew_bitsize)
>      {
> -      unsigned src_eew_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER 
> (idx_mode));
> -      unsigned dst_eew_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER 
> (vec_mode));
> -      if (dst_eew_bitsize == src_eew_bitsize)
> -       return code_for_pred_indexed_load_same_eew (UNSPEC_UNORDERED, 
> vec_mode);
> -      else if (dst_eew_bitsize > src_eew_bitsize)
> +      if (is_load)
> +       return code_for_pred_indexed_load_same_eew
> +         (UNSPEC_UNORDERED, vec_mode);
> +      else
> +       return code_for_pred_indexed_store_same_eew
> +         (UNSPEC_UNORDERED, vec_mode);
> +    }
> +  else if (dst_eew_bitsize > src_eew_bitsize)
> +    {
> +      unsigned factor = dst_eew_bitsize / src_eew_bitsize;
> +      switch (factor)
>         {
> -         unsigned factor = dst_eew_bitsize / src_eew_bitsize;
> -         switch (factor)
> -           {
> -           case 2:
> -             return code_for_pred_indexed_load_x2_greater_eew (
> -               UNSPEC_UNORDERED, vec_mode);
> -           case 4:
> -             return code_for_pred_indexed_load_x4_greater_eew (
> -               UNSPEC_UNORDERED, vec_mode);
> -           case 8:
> -             return code_for_pred_indexed_load_x8_greater_eew (
> -               UNSPEC_UNORDERED, vec_mode);
> -           default:
> -             gcc_unreachable ();
> -           }
> +       case 2:
> +         if (is_load)
> +           return
> +             code_for_pred_indexed_load_x2_greater_eew
> +               (UNSPEC_UNORDERED, vec_mode);
> +         else
> +           return
> +             code_for_pred_indexed_store_x2_greater_eew
> +               (UNSPEC_UNORDERED, vec_mode);
> +       case 4:
> +         if (is_load)
> +           return code_for_pred_indexed_load_x4_greater_eew
> +               (UNSPEC_UNORDERED, vec_mode);
> +         else
> +           return code_for_pred_indexed_store_x4_greater_eew
> +               (UNSPEC_UNORDERED, vec_mode);
> +       case 8:
> +         if (is_load)
> +           return code_for_pred_indexed_load_x8_greater_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +         else
> +           return code_for_pred_indexed_store_x8_greater_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +       default:
> +         gcc_unreachable ();
>         }
> -      else
> +    }
> +  else
> +    {
> +      unsigned factor = src_eew_bitsize / dst_eew_bitsize;
> +      switch (factor)
>         {
> -         unsigned factor = src_eew_bitsize / dst_eew_bitsize;
> -         switch (factor)
> -           {
> -           case 2:
> -             return code_for_pred_indexed_load_x2_smaller_eew (
> -               UNSPEC_UNORDERED, vec_mode);
> -           case 4:
> -             return code_for_pred_indexed_load_x4_smaller_eew (
> -               UNSPEC_UNORDERED, vec_mode);
> -           case 8:
> -             return code_for_pred_indexed_load_x8_smaller_eew (
> -               UNSPEC_UNORDERED, vec_mode);
> -           default:
> -             gcc_unreachable ();
> -           }
> +       case 2:
> +         if (is_load)
> +           return code_for_pred_indexed_load_x2_smaller_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +         else
> +           return code_for_pred_indexed_store_x2_smaller_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +       case 4:
> +         if (is_load)
> +           return code_for_pred_indexed_load_x4_smaller_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +         else
> +           return code_for_pred_indexed_store_x4_smaller_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +       case 8:
> +         if (is_load)
> +           return code_for_pred_indexed_load_x8_smaller_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +         else
> +           return code_for_pred_indexed_store_x8_smaller_eew
> +             (UNSPEC_UNORDERED, vec_mode);
> +       default:
> +         gcc_unreachable ();
>         }
>      }
>  }
> @@ -4769,7 +4797,7 @@ expand_gather_scatter (rtx *ops, bool is_load)
>    machine_mode idx_mode = GET_MODE (vec_offset);
>    bool is_vlmax = is_vlmax_len_p (vec_mode, len);
>
> -  insn_code icode = prepare_gather_scatter (vec_mode, idx_mode, is_load);
> +  insn_code icode = get_gather_scatter_code (vec_mode, idx_mode, is_load);
>    if (is_vlmax)
>      {
>        if (is_load)
> diff --git a/gcc/config/riscv/riscv-vector-builtins-bases.cc 
> b/gcc/config/riscv/riscv-vector-builtins-bases.cc
> index d00403a1fc5..15866d18342 100644
> --- a/gcc/config/riscv/riscv-vector-builtins-bases.cc
> +++ b/gcc/config/riscv/riscv-vector-builtins-bases.cc
> @@ -199,9 +199,57 @@ public:
>        {
>         int unspec = ORDERED_P ? UNSPEC_ORDERED : UNSPEC_UNORDERED;
>         if (STORE_P)
> -         return e.use_exact_insn (
> -           code_for_pred_indexed_store (unspec, e.vector_mode (),
> -                                        e.index_mode ()));
> +         {
> +           unsigned src_eew_bitsize
> +             = GET_MODE_BITSIZE (GET_MODE_INNER (e.index_mode ()));
> +           unsigned dst_eew_bitsize
> +             = GET_MODE_BITSIZE (GET_MODE_INNER (e.vector_mode ()));
> +           if (dst_eew_bitsize == src_eew_bitsize)
> +             return e.use_exact_insn (
> +               code_for_pred_indexed_store_same_eew (unspec, e.vector_mode 
> ()));
> +           else if (dst_eew_bitsize > src_eew_bitsize)
> +             {
> +               unsigned factor = dst_eew_bitsize / src_eew_bitsize;
> +               switch (factor)
> +                 {
> +                 case 2:
> +                   return e.use_exact_insn (
> +                     code_for_pred_indexed_store_x2_greater_eew (
> +                       unspec, e.vector_mode ()));
> +                 case 4:
> +                   return e.use_exact_insn (
> +                     code_for_pred_indexed_store_x4_greater_eew (
> +                       unspec, e.vector_mode ()));
> +                 case 8:
> +                   return e.use_exact_insn (
> +                     code_for_pred_indexed_store_x8_greater_eew (
> +                       unspec, e.vector_mode ()));
> +                 default:
> +                   gcc_unreachable ();
> +                 }
> +             }
> +           else
> +             {
> +               unsigned factor = src_eew_bitsize / dst_eew_bitsize;
> +               switch (factor)
> +                 {
> +                 case 2:
> +                   return e.use_exact_insn (
> +                     code_for_pred_indexed_store_x2_smaller_eew (
> +                       unspec, e.vector_mode ()));
> +                 case 4:
> +                   return e.use_exact_insn (
> +                     code_for_pred_indexed_store_x4_smaller_eew (
> +                       unspec, e.vector_mode ()));
> +                 case 8:
> +                   return e.use_exact_insn (
> +                     code_for_pred_indexed_store_x8_smaller_eew (
> +                       unspec, e.vector_mode ()));
> +                 default:
> +                   gcc_unreachable ();
> +                 }
> +             }
> +         }
>         else
>           {
>             unsigned src_eew_bitsize
> diff --git a/gcc/config/riscv/vector-iterators.md 
> b/gcc/config/riscv/vector-iterators.md
> index e4f3c449637..b6282607ceb 100644
> --- a/gcc/config/riscv/vector-iterators.md
> +++ b/gcc/config/riscv/vector-iterators.md
> @@ -345,6 +345,85 @@ (define_mode_iterator VEEWEXT2 [
>
>    (RVVM8DF "TARGET_VECTOR_ELEN_FP_64") (RVVM4DF "TARGET_VECTOR_ELEN_FP_64")
>    (RVVM2DF "TARGET_VECTOR_ELEN_FP_64") (RVVM1DF "TARGET_VECTOR_ELEN_FP_64")
> +
> +  (V1HI "riscv_vector::vls_mode_valid_p (V1HImode)")
> +  (V2HI "riscv_vector::vls_mode_valid_p (V2HImode)")
> +  (V4HI "riscv_vector::vls_mode_valid_p (V4HImode)")
> +  (V8HI "riscv_vector::vls_mode_valid_p (V8HImode)")
> +  (V16HI "riscv_vector::vls_mode_valid_p (V16HImode)")
> +  (V32HI "riscv_vector::vls_mode_valid_p (V32HImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V64HI "riscv_vector::vls_mode_valid_p (V64HImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V128HI "riscv_vector::vls_mode_valid_p (V128HImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V256HI "riscv_vector::vls_mode_valid_p (V256HImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V512HI "riscv_vector::vls_mode_valid_p (V512HImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V1024HI "riscv_vector::vls_mode_valid_p (V1024HImode) && TARGET_MIN_VLEN 
> >= 2048")
> +  (V1SI "riscv_vector::vls_mode_valid_p (V1SImode)")
> +  (V2SI "riscv_vector::vls_mode_valid_p (V2SImode)")
> +  (V4SI "riscv_vector::vls_mode_valid_p (V4SImode)")
> +  (V8SI "riscv_vector::vls_mode_valid_p (V8SImode)")
> +  (V16SI "riscv_vector::vls_mode_valid_p (V16SImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V32SI "riscv_vector::vls_mode_valid_p (V32SImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V64SI "riscv_vector::vls_mode_valid_p (V64SImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V128SI "riscv_vector::vls_mode_valid_p (V128SImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V256SI "riscv_vector::vls_mode_valid_p (V256SImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V512SI "riscv_vector::vls_mode_valid_p (V512SImode) && TARGET_MIN_VLEN >= 
> 2048")
> +  (V1024SI "riscv_vector::vls_mode_valid_p (V1024SImode) && TARGET_MIN_VLEN 
> >= 4096")
> +  (V1DI "riscv_vector::vls_mode_valid_p (V1DImode) && TARGET_VECTOR_ELEN_64")
> +  (V2DI "riscv_vector::vls_mode_valid_p (V2DImode) && TARGET_VECTOR_ELEN_64")
> +  (V4DI "riscv_vector::vls_mode_valid_p (V4DImode) && TARGET_VECTOR_ELEN_64")
> +  (V8DI "riscv_vector::vls_mode_valid_p (V8DImode) && TARGET_VECTOR_ELEN_64 
> && TARGET_MIN_VLEN >= 64")
> +  (V16DI "riscv_vector::vls_mode_valid_p (V16DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 128")
> +  (V32DI "riscv_vector::vls_mode_valid_p (V32DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 256")
> +  (V64DI "riscv_vector::vls_mode_valid_p (V64DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 512")
> +  (V128DI "riscv_vector::vls_mode_valid_p (V128DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 1024")
> +  (V256DI "riscv_vector::vls_mode_valid_p (V256DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 2048")
> +  (V512DI "riscv_vector::vls_mode_valid_p (V512DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 4096")
> +
> +  (V1HF "riscv_vector::vls_mode_valid_p (V1HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V2HF "riscv_vector::vls_mode_valid_p (V2HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V4HF "riscv_vector::vls_mode_valid_p (V4HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V8HF "riscv_vector::vls_mode_valid_p (V8HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V16HF "riscv_vector::vls_mode_valid_p (V16HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V32HF "riscv_vector::vls_mode_valid_p (V32HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 64")
> +  (V64HF "riscv_vector::vls_mode_valid_p (V64HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 128")
> +  (V128HF "riscv_vector::vls_mode_valid_p (V128HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 256")
> +  (V256HF "riscv_vector::vls_mode_valid_p (V256HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 512")
> +  (V512HF "riscv_vector::vls_mode_valid_p (V512HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 1024")
> +  (V1024HF "riscv_vector::vls_mode_valid_p (V1024HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 2048")
> +  (V2048HF "riscv_vector::vls_mode_valid_p (V2048HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 4096")
> +  (V1BF "riscv_vector::vls_mode_valid_p (V1BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V2BF "riscv_vector::vls_mode_valid_p (V2BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V4BF "riscv_vector::vls_mode_valid_p (V4BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V8BF "riscv_vector::vls_mode_valid_p (V8BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V16BF "riscv_vector::vls_mode_valid_p (V16BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V32BF "riscv_vector::vls_mode_valid_p (V32BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 64")
> +  (V64BF "riscv_vector::vls_mode_valid_p (V64BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 128")
> +  (V128BF "riscv_vector::vls_mode_valid_p (V128BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 256")
> +  (V256BF "riscv_vector::vls_mode_valid_p (V256BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 512")
> +  (V512BF "riscv_vector::vls_mode_valid_p (V512BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 1024")
> +  (V1024BF "riscv_vector::vls_mode_valid_p (V1024BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 2048")
> +  (V2048BF "riscv_vector::vls_mode_valid_p (V2048BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 4096")
> +  (V1SF "riscv_vector::vls_mode_valid_p (V1SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V2SF "riscv_vector::vls_mode_valid_p (V2SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V4SF "riscv_vector::vls_mode_valid_p (V4SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V8SF "riscv_vector::vls_mode_valid_p (V8SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V16SF "riscv_vector::vls_mode_valid_p (V16SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 64")
> +  (V32SF "riscv_vector::vls_mode_valid_p (V32SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 128")
> +  (V64SF "riscv_vector::vls_mode_valid_p (V64SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 256")
> +  (V128SF "riscv_vector::vls_mode_valid_p (V128SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 512")
> +  (V256SF "riscv_vector::vls_mode_valid_p (V256SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 1024")
> +  (V512SF "riscv_vector::vls_mode_valid_p (V512SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 2048")
> +  (V1024SF "riscv_vector::vls_mode_valid_p (V1024SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 4096")
> +  (V1DF "riscv_vector::vls_mode_valid_p (V1DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V2DF "riscv_vector::vls_mode_valid_p (V2DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V4DF "riscv_vector::vls_mode_valid_p (V4DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V8DF "riscv_vector::vls_mode_valid_p (V8DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 64")
> +  (V16DF "riscv_vector::vls_mode_valid_p (V16DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 128")
> +  (V32DF "riscv_vector::vls_mode_valid_p (V32DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 256")
> +  (V64DF "riscv_vector::vls_mode_valid_p (V64DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 512")
> +  (V128DF "riscv_vector::vls_mode_valid_p (V128DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 1024")
> +  (V256DF "riscv_vector::vls_mode_valid_p (V256DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 2048")
> +  (V512DF "riscv_vector::vls_mode_valid_p (V512DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 4096")
>  ])
>
>  (define_mode_iterator VEEWEXT4 [
> @@ -358,6 +437,50 @@ (define_mode_iterator VEEWEXT4 [
>
>    (RVVM8DF "TARGET_VECTOR_ELEN_FP_64") (RVVM4DF "TARGET_VECTOR_ELEN_FP_64")
>    (RVVM2DF "TARGET_VECTOR_ELEN_FP_64") (RVVM1DF "TARGET_VECTOR_ELEN_FP_64")
> +
> +  (V1SI "riscv_vector::vls_mode_valid_p (V1SImode)")
> +  (V2SI "riscv_vector::vls_mode_valid_p (V2SImode)")
> +  (V4SI "riscv_vector::vls_mode_valid_p (V4SImode)")
> +  (V8SI "riscv_vector::vls_mode_valid_p (V8SImode)")
> +  (V16SI "riscv_vector::vls_mode_valid_p (V16SImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V32SI "riscv_vector::vls_mode_valid_p (V32SImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V64SI "riscv_vector::vls_mode_valid_p (V64SImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V128SI "riscv_vector::vls_mode_valid_p (V128SImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V256SI "riscv_vector::vls_mode_valid_p (V256SImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V512SI "riscv_vector::vls_mode_valid_p (V512SImode) && TARGET_MIN_VLEN >= 
> 2048")
> +  (V1024SI "riscv_vector::vls_mode_valid_p (V1024SImode) && TARGET_MIN_VLEN 
> >= 4096")
> +  (V1DI "riscv_vector::vls_mode_valid_p (V1DImode) && TARGET_VECTOR_ELEN_64")
> +  (V2DI "riscv_vector::vls_mode_valid_p (V2DImode) && TARGET_VECTOR_ELEN_64")
> +  (V4DI "riscv_vector::vls_mode_valid_p (V4DImode) && TARGET_VECTOR_ELEN_64")
> +  (V8DI "riscv_vector::vls_mode_valid_p (V8DImode) && TARGET_VECTOR_ELEN_64 
> && TARGET_MIN_VLEN >= 64")
> +  (V16DI "riscv_vector::vls_mode_valid_p (V16DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 128")
> +  (V32DI "riscv_vector::vls_mode_valid_p (V32DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 256")
> +  (V64DI "riscv_vector::vls_mode_valid_p (V64DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 512")
> +  (V128DI "riscv_vector::vls_mode_valid_p (V128DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 1024")
> +  (V256DI "riscv_vector::vls_mode_valid_p (V256DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 2048")
> +  (V512DI "riscv_vector::vls_mode_valid_p (V512DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 4096")
> +
> +  (V1SF "riscv_vector::vls_mode_valid_p (V1SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V2SF "riscv_vector::vls_mode_valid_p (V2SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V4SF "riscv_vector::vls_mode_valid_p (V4SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V8SF "riscv_vector::vls_mode_valid_p (V8SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V16SF "riscv_vector::vls_mode_valid_p (V16SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 64")
> +  (V32SF "riscv_vector::vls_mode_valid_p (V32SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 128")
> +  (V64SF "riscv_vector::vls_mode_valid_p (V64SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 256")
> +  (V128SF "riscv_vector::vls_mode_valid_p (V128SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 512")
> +  (V256SF "riscv_vector::vls_mode_valid_p (V256SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 1024")
> +  (V512SF "riscv_vector::vls_mode_valid_p (V512SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 2048")
> +  (V1024SF "riscv_vector::vls_mode_valid_p (V1024SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 4096")
> +  (V1DF "riscv_vector::vls_mode_valid_p (V1DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V2DF "riscv_vector::vls_mode_valid_p (V2DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V4DF "riscv_vector::vls_mode_valid_p (V4DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V8DF "riscv_vector::vls_mode_valid_p (V8DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 64")
> +  (V16DF "riscv_vector::vls_mode_valid_p (V16DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 128")
> +  (V32DF "riscv_vector::vls_mode_valid_p (V32DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 256")
> +  (V64DF "riscv_vector::vls_mode_valid_p (V64DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 512")
> +  (V128DF "riscv_vector::vls_mode_valid_p (V128DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 1024")
> +  (V256DF "riscv_vector::vls_mode_valid_p (V256DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 2048")
> +  (V512DF "riscv_vector::vls_mode_valid_p (V512DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 4096")
>  ])
>
>  (define_mode_iterator VEEWEXT8 [
> @@ -366,6 +489,28 @@ (define_mode_iterator VEEWEXT8 [
>
>    (RVVM8DF "TARGET_VECTOR_ELEN_FP_64") (RVVM4DF "TARGET_VECTOR_ELEN_FP_64")
>    (RVVM2DF "TARGET_VECTOR_ELEN_FP_64") (RVVM1DF "TARGET_VECTOR_ELEN_FP_64")
> +
> +  (V1DI "riscv_vector::vls_mode_valid_p (V1DImode) && TARGET_VECTOR_ELEN_64")
> +  (V2DI "riscv_vector::vls_mode_valid_p (V2DImode) && TARGET_VECTOR_ELEN_64")
> +  (V4DI "riscv_vector::vls_mode_valid_p (V4DImode) && TARGET_VECTOR_ELEN_64")
> +  (V8DI "riscv_vector::vls_mode_valid_p (V8DImode) && TARGET_VECTOR_ELEN_64 
> && TARGET_MIN_VLEN >= 64")
> +  (V16DI "riscv_vector::vls_mode_valid_p (V16DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 128")
> +  (V32DI "riscv_vector::vls_mode_valid_p (V32DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 256")
> +  (V64DI "riscv_vector::vls_mode_valid_p (V64DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 512")
> +  (V128DI "riscv_vector::vls_mode_valid_p (V128DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 1024")
> +  (V256DI "riscv_vector::vls_mode_valid_p (V256DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 2048")
> +  (V512DI "riscv_vector::vls_mode_valid_p (V512DImode) && 
> TARGET_VECTOR_ELEN_64 && TARGET_MIN_VLEN >= 4096")
> +
> +  (V1DF "riscv_vector::vls_mode_valid_p (V1DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V2DF "riscv_vector::vls_mode_valid_p (V2DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V4DF "riscv_vector::vls_mode_valid_p (V4DFmode) && 
> TARGET_VECTOR_ELEN_FP_64")
> +  (V8DF "riscv_vector::vls_mode_valid_p (V8DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 64")
> +  (V16DF "riscv_vector::vls_mode_valid_p (V16DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 128")
> +  (V32DF "riscv_vector::vls_mode_valid_p (V32DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 256")
> +  (V64DF "riscv_vector::vls_mode_valid_p (V64DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 512")
> +  (V128DF "riscv_vector::vls_mode_valid_p (V128DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 1024")
> +  (V256DF "riscv_vector::vls_mode_valid_p (V256DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 2048")
> +  (V512DF "riscv_vector::vls_mode_valid_p (V512DFmode) && 
> TARGET_VECTOR_ELEN_FP_64 && TARGET_MIN_VLEN >= 4096")
>  ])
>
>  (define_mode_iterator VEEWTRUNC2 [
> @@ -390,6 +535,73 @@ (define_mode_iterator VEEWTRUNC2 [
>    (RVVM2SF "TARGET_VECTOR_ELEN_FP_32 && TARGET_64BIT")
>    (RVVM1SF "TARGET_VECTOR_ELEN_FP_32 && TARGET_64BIT")
>    (RVVMF2SF "TARGET_VECTOR_ELEN_FP_32 && TARGET_VECTOR_ELEN_64 && 
> TARGET_64BIT")
> +
> +  (V1QI "riscv_vector::vls_mode_valid_p (V1QImode)")
> +  (V2QI "riscv_vector::vls_mode_valid_p (V2QImode)")
> +  (V4QI "riscv_vector::vls_mode_valid_p (V4QImode)")
> +  (V8QI "riscv_vector::vls_mode_valid_p (V8QImode)")
> +  (V16QI "riscv_vector::vls_mode_valid_p (V16QImode)")
> +  (V32QI "riscv_vector::vls_mode_valid_p (V32QImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V64QI "riscv_vector::vls_mode_valid_p (V64QImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V128QI "riscv_vector::vls_mode_valid_p (V128QImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V256QI "riscv_vector::vls_mode_valid_p (V256QImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V512QI "riscv_vector::vls_mode_valid_p (V512QImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V1024QI "riscv_vector::vls_mode_valid_p (V1024QImode) && TARGET_MIN_VLEN 
> >= 2048")
> +  (V2048QI "riscv_vector::vls_mode_valid_p (V1024QImode) && TARGET_MIN_VLEN 
> >= 2048")
> +  (V1HI "riscv_vector::vls_mode_valid_p (V1HImode)")
> +  (V2HI "riscv_vector::vls_mode_valid_p (V2HImode)")
> +  (V4HI "riscv_vector::vls_mode_valid_p (V4HImode)")
> +  (V8HI "riscv_vector::vls_mode_valid_p (V8HImode)")
> +  (V16HI "riscv_vector::vls_mode_valid_p (V16HImode)")
> +  (V32HI "riscv_vector::vls_mode_valid_p (V32HImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V64HI "riscv_vector::vls_mode_valid_p (V64HImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V128HI "riscv_vector::vls_mode_valid_p (V128HImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V256HI "riscv_vector::vls_mode_valid_p (V256HImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V512HI "riscv_vector::vls_mode_valid_p (V512HImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V1024HI "riscv_vector::vls_mode_valid_p (V1024HImode) && TARGET_MIN_VLEN 
> >= 2048")
> +  (V1SI "riscv_vector::vls_mode_valid_p (V1SImode)")
> +  (V2SI "riscv_vector::vls_mode_valid_p (V2SImode)")
> +  (V4SI "riscv_vector::vls_mode_valid_p (V4SImode)")
> +  (V8SI "riscv_vector::vls_mode_valid_p (V8SImode)")
> +  (V16SI "riscv_vector::vls_mode_valid_p (V16SImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V32SI "riscv_vector::vls_mode_valid_p (V32SImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V64SI "riscv_vector::vls_mode_valid_p (V64SImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V128SI "riscv_vector::vls_mode_valid_p (V128SImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V256SI "riscv_vector::vls_mode_valid_p (V256SImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V512SI "riscv_vector::vls_mode_valid_p (V512SImode) && TARGET_MIN_VLEN >= 
> 2048")
> +
> +  (V1HF "riscv_vector::vls_mode_valid_p (V1HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V2HF "riscv_vector::vls_mode_valid_p (V2HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V4HF "riscv_vector::vls_mode_valid_p (V4HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V8HF "riscv_vector::vls_mode_valid_p (V8HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V16HF "riscv_vector::vls_mode_valid_p (V16HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V32HF "riscv_vector::vls_mode_valid_p (V32HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 64")
> +  (V64HF "riscv_vector::vls_mode_valid_p (V64HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 128")
> +  (V128HF "riscv_vector::vls_mode_valid_p (V128HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 256")
> +  (V256HF "riscv_vector::vls_mode_valid_p (V256HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 512")
> +  (V512HF "riscv_vector::vls_mode_valid_p (V512HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 1024")
> +  (V1024HF "riscv_vector::vls_mode_valid_p (V1024HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 2048")
> +  (V1BF "riscv_vector::vls_mode_valid_p (V1BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V2BF "riscv_vector::vls_mode_valid_p (V2BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V4BF "riscv_vector::vls_mode_valid_p (V4BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V8BF "riscv_vector::vls_mode_valid_p (V8BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V16BF "riscv_vector::vls_mode_valid_p (V16BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V32BF "riscv_vector::vls_mode_valid_p (V32BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 64")
> +  (V64BF "riscv_vector::vls_mode_valid_p (V64BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 128")
> +  (V128BF "riscv_vector::vls_mode_valid_p (V128BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 256")
> +  (V256BF "riscv_vector::vls_mode_valid_p (V256BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 512")
> +  (V512BF "riscv_vector::vls_mode_valid_p (V512BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 1024")
> +  (V1024BF "riscv_vector::vls_mode_valid_p (V1024BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 2048")
> +  (V1SF "riscv_vector::vls_mode_valid_p (V1SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V2SF "riscv_vector::vls_mode_valid_p (V2SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V4SF "riscv_vector::vls_mode_valid_p (V4SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V8SF "riscv_vector::vls_mode_valid_p (V8SFmode) && 
> TARGET_VECTOR_ELEN_FP_32")
> +  (V16SF "riscv_vector::vls_mode_valid_p (V16SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 64")
> +  (V32SF "riscv_vector::vls_mode_valid_p (V32SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 128")
> +  (V64SF "riscv_vector::vls_mode_valid_p (V64SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 256")
> +  (V128SF "riscv_vector::vls_mode_valid_p (V128SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 512")
> +  (V256SF "riscv_vector::vls_mode_valid_p (V256SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 1024")
> +  (V512SF "riscv_vector::vls_mode_valid_p (V512SFmode) && 
> TARGET_VECTOR_ELEN_FP_32 && TARGET_MIN_VLEN >= 2048")
>  ])
>
>  (define_mode_iterator VEEWTRUNC4 [
> @@ -409,6 +621,49 @@ (define_mode_iterator VEEWTRUNC4 [
>    (RVVM1HF "TARGET_VECTOR_ELEN_FP_16 && TARGET_64BIT")
>    (RVVMF2HF "TARGET_VECTOR_ELEN_FP_16 && TARGET_64BIT")
>    (RVVMF4HF "TARGET_VECTOR_ELEN_FP_16 && TARGET_VECTOR_ELEN_64 && 
> TARGET_64BIT")
> +
> +  (V1QI "riscv_vector::vls_mode_valid_p (V1QImode)")
> +  (V2QI "riscv_vector::vls_mode_valid_p (V2QImode)")
> +  (V4QI "riscv_vector::vls_mode_valid_p (V4QImode)")
> +  (V8QI "riscv_vector::vls_mode_valid_p (V8QImode)")
> +  (V16QI "riscv_vector::vls_mode_valid_p (V16QImode)")
> +  (V32QI "riscv_vector::vls_mode_valid_p (V32QImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V64QI "riscv_vector::vls_mode_valid_p (V64QImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V128QI "riscv_vector::vls_mode_valid_p (V128QImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V256QI "riscv_vector::vls_mode_valid_p (V256QImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V512QI "riscv_vector::vls_mode_valid_p (V512QImode) && TARGET_MIN_VLEN >= 
> 1024")
> +  (V1024QI "riscv_vector::vls_mode_valid_p (V1024QImode) && TARGET_MIN_VLEN 
> >= 2048")
> +  (V1HI "riscv_vector::vls_mode_valid_p (V1HImode)")
> +  (V2HI "riscv_vector::vls_mode_valid_p (V2HImode)")
> +  (V4HI "riscv_vector::vls_mode_valid_p (V4HImode)")
> +  (V8HI "riscv_vector::vls_mode_valid_p (V8HImode)")
> +  (V16HI "riscv_vector::vls_mode_valid_p (V16HImode)")
> +  (V32HI "riscv_vector::vls_mode_valid_p (V32HImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V64HI "riscv_vector::vls_mode_valid_p (V64HImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V128HI "riscv_vector::vls_mode_valid_p (V128HImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V256HI "riscv_vector::vls_mode_valid_p (V256HImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V512HI "riscv_vector::vls_mode_valid_p (V512HImode) && TARGET_MIN_VLEN >= 
> 1024")
> +
> +  (V1HF "riscv_vector::vls_mode_valid_p (V1HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V2HF "riscv_vector::vls_mode_valid_p (V2HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V4HF "riscv_vector::vls_mode_valid_p (V4HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V8HF "riscv_vector::vls_mode_valid_p (V8HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V16HF "riscv_vector::vls_mode_valid_p (V16HFmode) && 
> TARGET_VECTOR_ELEN_FP_16")
> +  (V32HF "riscv_vector::vls_mode_valid_p (V32HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 64")
> +  (V64HF "riscv_vector::vls_mode_valid_p (V64HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 128")
> +  (V128HF "riscv_vector::vls_mode_valid_p (V128HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 256")
> +  (V256HF "riscv_vector::vls_mode_valid_p (V256HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 512")
> +  (V512HF "riscv_vector::vls_mode_valid_p (V512HFmode) && 
> TARGET_VECTOR_ELEN_FP_16 && TARGET_MIN_VLEN >= 1024")
> +  (V1BF "riscv_vector::vls_mode_valid_p (V1BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V2BF "riscv_vector::vls_mode_valid_p (V2BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V4BF "riscv_vector::vls_mode_valid_p (V4BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V8BF "riscv_vector::vls_mode_valid_p (V8BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V16BF "riscv_vector::vls_mode_valid_p (V16BFmode) && 
> TARGET_VECTOR_ELEN_BF_16")
> +  (V32BF "riscv_vector::vls_mode_valid_p (V32BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 64")
> +  (V64BF "riscv_vector::vls_mode_valid_p (V64BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 128")
> +  (V128BF "riscv_vector::vls_mode_valid_p (V128BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 256")
> +  (V256BF "riscv_vector::vls_mode_valid_p (V256BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 512")
> +  (V512BF "riscv_vector::vls_mode_valid_p (V512BFmode) && 
> TARGET_VECTOR_ELEN_BF_16 && TARGET_MIN_VLEN >= 1024")
>  ])
>
>  (define_mode_iterator VEEWTRUNC8 [
> @@ -416,6 +671,17 @@ (define_mode_iterator VEEWTRUNC8 [
>    (RVVMF2QI "TARGET_64BIT")
>    (RVVMF4QI "TARGET_64BIT")
>    (RVVMF8QI "TARGET_VECTOR_ELEN_64 && TARGET_64BIT")
> +
> +  (V1QI "riscv_vector::vls_mode_valid_p (V1QImode)")
> +  (V2QI "riscv_vector::vls_mode_valid_p (V2QImode)")
> +  (V4QI "riscv_vector::vls_mode_valid_p (V4QImode)")
> +  (V8QI "riscv_vector::vls_mode_valid_p (V8QImode)")
> +  (V16QI "riscv_vector::vls_mode_valid_p (V16QImode)")
> +  (V32QI "riscv_vector::vls_mode_valid_p (V32QImode) && TARGET_MIN_VLEN >= 
> 64")
> +  (V64QI "riscv_vector::vls_mode_valid_p (V64QImode) && TARGET_MIN_VLEN >= 
> 128")
> +  (V128QI "riscv_vector::vls_mode_valid_p (V128QImode) && TARGET_MIN_VLEN >= 
> 256")
> +  (V256QI "riscv_vector::vls_mode_valid_p (V256QImode) && TARGET_MIN_VLEN >= 
> 512")
> +  (V512QI "riscv_vector::vls_mode_valid_p (V512QImode) && TARGET_MIN_VLEN >= 
> 1024")
>  ])
>
>  (define_mode_iterator VEI16 [
> @@ -1924,6 +2190,117 @@ (define_mode_attr VINDEX [
>    (V512DF "V512DI")
>  ])
>
> +(define_mode_attr vindex [
> +  (RVVM8QI "rvvm8qi") (RVVM4QI "rvvm4qi") (RVVM2QI "rvvm2qi") (RVVM1QI 
> "rvvm1qi")
> +  (RVVMF2QI "rvvmf2qi") (RVVMF4QI "rvvmf4qi") (RVVMF8QI "rvvmf8qi")
> +
> +  (RVVM8HI "rvvm8hi") (RVVM4HI "rvvm4hi") (RVVM2HI "rvvm2hi") (RVVM1HI 
> "rvvm1hi") (RVVMF2HI "rvvmf2hi") (RVVMF4HI "rvvmf4hi")
> +
> +  (RVVM8BF "rvvm8hi") (RVVM4BF "rvvm4hi") (RVVM2BF "rvvm2hi") (RVVM1BF 
> "rvvm1hi") (RVVMF2BF "rvvmf2hi") (RVVMF4BF "rvvmf4hi")
> +
> +  (RVVM8HF "rvvm8hi") (RVVM4HF "rvvm4hi") (RVVM2HF "rvvm2hi") (RVVM1HF 
> "rvvm1hi") (RVVMF2HF "rvvmf2hi") (RVVMF4HF "rvvmf4hi")
> +
> +  (RVVM8SI "rvvm8si") (RVVM4SI "rvvm4si") (RVVM2SI "rvvm2si") (RVVM1SI 
> "rvvm1si") (RVVMF2SI "rvvmf2si")
> +
> +  (RVVM8SF "rvvm8si") (RVVM4SF "rvvm4si") (RVVM2SF "rvvm2si") (RVVM1SF 
> "rvvm1si") (RVVMF2SF "rvvmf2si")
> +
> +  (RVVM8DI "rvvm8di") (RVVM4DI "rvvm4di") (RVVM2DI "rvvm2di") (RVVM1DI 
> "rvvm1di")
> +
> +  (RVVM8DF "rvvm8di") (RVVM4DF "rvvm4di") (RVVM2DF "rvvm2di") (RVVM1DF 
> "rvvm1di")
> +
> +  (V1QI "v1qi")
> +  (V2QI "v2qi")
> +  (V4QI "v4qi")
> +  (V8QI "v8qi")
> +  (V16QI "v16qi")
> +  (V32QI "v32qi")
> +  (V64QI "v64qi")
> +  (V128QI "v128qi")
> +  (V256QI "v256qi")
> +  (V512QI "v512qi")
> +  (V1024QI "v1024qi")
> +  (V2048QI "v2048qi")
> +  (V4096QI "v4096qi")
> +  (V1HI "v1hi")
> +  (V2HI "v2hi")
> +  (V4HI "v4hi")
> +  (V8HI "v8hi")
> +  (V16HI "v16hi")
> +  (V32HI "v32hi")
> +  (V64HI "v64hi")
> +  (V128HI "v128hi")
> +  (V256HI "v256hi")
> +  (V512HI "v512hi")
> +  (V1024HI "v1024hi")
> +  (V2048HI "v2048hi")
> +  (V1SI "v1si")
> +  (V2SI "v2si")
> +  (V4SI "v4si")
> +  (V8SI "V8SI")
> +  (V16SI "v16si")
> +  (V32SI "v32si")
> +  (V64SI "v64si")
> +  (V128SI "v128si")
> +  (V256SI "v256si")
> +  (V512SI "v512si")
> +  (V1024SI "v1024si")
> +  (V1DI "v1di")
> +  (V2DI "v2di")
> +  (V4DI "v4di")
> +  (V8DI "v8di")
> +  (V16DI "v16di")
> +  (V32DI "v32di")
> +  (V64DI "v64di")
> +  (V128DI "v128di")
> +  (V256DI "v256di")
> +  (V512DI "v512di")
> +  (V1HF "v1hi")
> +  (V2HF "v2hi")
> +  (V4HF "v4hi")
> +  (V8HF "v8hi")
> +  (V16HF "v16hi")
> +  (V32HF "v32hi")
> +  (V64HF "v64hi")
> +  (V128HF "v128hi")
> +  (V256HF "v256hi")
> +  (V512HF "v512hi")
> +  (V1024HF "v1024hi")
> +  (V2048HF "v2048hi")
> +  (V1BF "v1hi")
> +  (V2BF "v2hi")
> +  (V4BF "v4hi")
> +  (V8BF "v8hi")
> +  (V16BF "v16hi")
> +  (V32BF "v32hi")
> +  (V64BF "v64hi")
> +  (V128BF "v128hi")
> +  (V256BF "v256hi")
> +  (V512BF "v512hi")
> +  (V1024BF "v1024hi")
> +  (V2048BF "v2048hi")
> +  (V1SF "v1si")
> +  (V2SF "v2si")
> +  (V4SF "v4si")
> +  (V8SF "v8si")
> +  (V16SF "v16si")
> +  (V32SF "v32si")
> +  (V64SF "v64si")
> +  (V128SF "v128si")
> +  (V256SF "v256si")
> +  (V512SF "v512si")
> +  (V1024SF "v1024si")
> +  (V1DF "v1di")
> +  (V2DF "v2di")
> +  (V4DF "v4di")
> +  (V8DF "v8di")
> +  (V16DF "v16di")
> +  (V32DF "v32di")
> +  (V64DF "v64di")
> +  (V128DF "v128di")
> +  (V256DF "v256di")
> +  (V512DF "v512di")
> +])
> +
>  (define_mode_attr VINDEXEI16 [
>    (RVVM4QI "RVVM8HI") (RVVM2QI "RVVM4HI") (RVVM1QI "RVVM2HI") (RVVMF2QI 
> "RVVM1HI") (RVVMF4QI "RVVMF2HI") (RVVMF8QI "RVVMF4HI")
>
> @@ -2779,6 +3156,85 @@ (define_mode_attr double_trunc_sew [
>    (RVVM8DI "32") (RVVM4DI "32") (RVVM2DI "32") (RVVM1DI "32")
>
>    (RVVM8DF "32") (RVVM4DF "32") (RVVM2DF "32") (RVVM1DF "32")
> +
> +  (V1HI "8")
> +  (V2HI "8")
> +  (V4HI "8")
> +  (V8HI "8")
> +  (V16HI "8")
> +  (V32HI "8")
> +  (V64HI "8")
> +  (V128HI "8")
> +  (V256HI "8")
> +  (V512HI "8")
> +  (V1024HI "8")
> +  (V2048HI "8")
> +  (V1SI "16")
> +  (V2SI "16")
> +  (V4SI "16")
> +  (V8SI "16")
> +  (V16SI "16")
> +  (V32SI "16")
> +  (V64SI "16")
> +  (V128SI "16")
> +  (V256SI "16")
> +  (V512SI "16")
> +  (V1024SI "16")
> +  (V1DI "32")
> +  (V2DI "32")
> +  (V4DI "32")
> +  (V8DI "32")
> +  (V16DI "32")
> +  (V32DI "32")
> +  (V64DI "32")
> +  (V128DI "32")
> +  (V256DI "32")
> +  (V512DI "32")
> +  (V1HF "8")
> +  (V2HF "8")
> +  (V4HF "8")
> +  (V8HF "8")
> +  (V16HF "8")
> +  (V32HF "8")
> +  (V64HF "8")
> +  (V128HF "8")
> +  (V256HF "8")
> +  (V512HF "8")
> +  (V1024HF "8")
> +  (V2048HF "8")
> +  (V1BF "8")
> +  (V2BF "8")
> +  (V4BF "8")
> +  (V8BF "8")
> +  (V16BF "8")
> +  (V32BF "8")
> +  (V64BF "8")
> +  (V128BF "8")
> +  (V256BF "8")
> +  (V512BF "8")
> +  (V1024BF "8")
> +  (V2048BF "8")
> +  (V1SF "16")
> +  (V2SF "16")
> +  (V4SF "16")
> +  (V8SF "16")
> +  (V16SF "16")
> +  (V32SF "16")
> +  (V64SF "16")
> +  (V128SF "16")
> +  (V256SF "16")
> +  (V512SF "16")
> +  (V1024SF "16")
> +  (V1DF "32")
> +  (V2DF "32")
> +  (V4DF "32")
> +  (V8DF "32")
> +  (V16DF "32")
> +  (V32DF "32")
> +  (V64DF "32")
> +  (V128DF "32")
> +  (V256DF "32")
> +  (V512DF "32")
>  ])
>
>  (define_mode_attr quad_trunc_sew [
> @@ -2789,12 +3245,76 @@ (define_mode_attr quad_trunc_sew [
>    (RVVM8DI "16") (RVVM4DI "16") (RVVM2DI "16") (RVVM1DI "16")
>
>    (RVVM8DF "16") (RVVM4DF "16") (RVVM2DF "16") (RVVM1DF "16")
> +
> +  (V1SI "8")
> +  (V2SI "8")
> +  (V4SI "8")
> +  (V8SI "8")
> +  (V16SI "8")
> +  (V32SI "8")
> +  (V64SI "8")
> +  (V128SI "8")
> +  (V256SI "8")
> +  (V512SI "8")
> +  (V1024SI "8")
> +  (V1DI "16")
> +  (V2DI "16")
> +  (V4DI "16")
> +  (V8DI "16")
> +  (V16DI "16")
> +  (V32DI "16")
> +  (V64DI "16")
> +  (V128DI "16")
> +  (V256DI "16")
> +  (V512DI "16")
> +  (V1SF "8")
> +  (V2SF "8")
> +  (V4SF "8")
> +  (V8SF "8")
> +  (V16SF "8")
> +  (V32SF "8")
> +  (V64SF "8")
> +  (V128SF "8")
> +  (V256SF "8")
> +  (V512SF "8")
> +  (V1024SF "8")
> +  (V1DF "16")
> +  (V2DF "16")
> +  (V4DF "16")
> +  (V8DF "16")
> +  (V16DF "16")
> +  (V32DF "16")
> +  (V64DF "16")
> +  (V128DF "16")
> +  (V256DF "16")
> +  (V512DF "16")
>  ])
>
>  (define_mode_attr oct_trunc_sew [
>    (RVVM8DI "8") (RVVM4DI "8") (RVVM2DI "8") (RVVM1DI "8")
>
>    (RVVM8DF "8") (RVVM4DF "8") (RVVM2DF "8") (RVVM1DF "8")
> +
> +  (V1DI "8")
> +  (V2DI "8")
> +  (V4DI "8")
> +  (V8DI "8")
> +  (V16DI "8")
> +  (V32DI "8")
> +  (V64DI "8")
> +  (V128DI "8")
> +  (V256DI "8")
> +  (V512DI "8")
> +  (V1DF "8")
> +  (V2DF "8")
> +  (V4DF "8")
> +  (V8DF "8")
> +  (V16DF "8")
> +  (V32DF "8")
> +  (V64DF "8")
> +  (V128DF "8")
> +  (V256DF "8")
> +  (V512DF "8")
>  ])
>
>  (define_mode_attr double_ext_sew [
> @@ -2809,6 +3329,72 @@ (define_mode_attr double_ext_sew [
>    (RVVM4SI "64") (RVVM2SI "64") (RVVM1SI "64") (RVVMF2SI "64")
>
>    (RVVM4SF "64") (RVVM2SF "64") (RVVM1SF "64") (RVVMF2SF "64")
> +
> +  (V1QI "16")
> +  (V2QI "16")
> +  (V4QI "16")
> +  (V8QI "16")
> +  (V16QI "16")
> +  (V32QI "16")
> +  (V64QI "16")
> +  (V128QI "16")
> +  (V256QI "16")
> +  (V512QI "16")
> +  (V1024QI "16")
> +  (V2048QI "16")
> +  (V1HI "32")
> +  (V2HI "32")
> +  (V4HI "32")
> +  (V8HI "32")
> +  (V16HI "32")
> +  (V32HI "32")
> +  (V64HI "32")
> +  (V128HI "32")
> +  (V256HI "32")
> +  (V512HI "32")
> +  (V1024HI "32")
> +  (V1SI "64")
> +  (V2SI "64")
> +  (V4SI "64")
> +  (V8SI "64")
> +  (V16SI "64")
> +  (V32SI "64")
> +  (V64SI "64")
> +  (V128SI "64")
> +  (V256SI "64")
> +  (V512SI "64")
> +  (V1HF "32")
> +  (V2HF "32")
> +  (V4HF "32")
> +  (V8HF "32")
> +  (V16HF "32")
> +  (V32HF "32")
> +  (V64HF "32")
> +  (V128HF "32")
> +  (V256HF "32")
> +  (V512HF "32")
> +  (V1024HF "32")
> +  (V1BF "32")
> +  (V2BF "32")
> +  (V4BF "32")
> +  (V8BF "32")
> +  (V16BF "32")
> +  (V32BF "32")
> +  (V64BF "32")
> +  (V128BF "32")
> +  (V256BF "32")
> +  (V512BF "32")
> +  (V1024BF "32")
> +  (V1SF "64")
> +  (V2SF "64")
> +  (V4SF "64")
> +  (V8SF "64")
> +  (V16SF "64")
> +  (V32SF "64")
> +  (V64SF "64")
> +  (V128SF "64")
> +  (V256SF "64")
> +  (V512SF "64")
>  ])
>
>  (define_mode_attr quad_ext_sew [
> @@ -2817,10 +3403,63 @@ (define_mode_attr quad_ext_sew [
>    (RVVM2HI "64") (RVVM1HI "64") (RVVMF2HI "64") (RVVMF4HI "64")
>
>    (RVVM2HF "64") (RVVM1HF "64") (RVVMF2HF "64") (RVVMF4HF "64")
> +
> +  (V1QI "32")
> +  (V2QI "32")
> +  (V4QI "32")
> +  (V8QI "32")
> +  (V16QI "32")
> +  (V32QI "32")
> +  (V64QI "32")
> +  (V128QI "32")
> +  (V256QI "32")
> +  (V512QI "32")
> +  (V1024QI "32")
> +  (V1HI "64")
> +  (V2HI "64")
> +  (V4HI "64")
> +  (V8HI "64")
> +  (V16HI "64")
> +  (V32HI "64")
> +  (V64HI "64")
> +  (V128HI "64")
> +  (V256HI "64")
> +  (V512HI "64")
> +  (V1HF "64")
> +  (V2HF "64")
> +  (V4HF "64")
> +  (V8HF "64")
> +  (V16HF "64")
> +  (V32HF "64")
> +  (V64HF "64")
> +  (V128HF "64")
> +  (V256HF "64")
> +  (V512HF "64")
> +  (V1BF "64")
> +  (V2BF "64")
> +  (V4BF "64")
> +  (V8BF "64")
> +  (V16BF "64")
> +  (V32BF "64")
> +  (V64BF "64")
> +  (V128BF "64")
> +  (V256BF "64")
> +  (V512BF "64")
>  ])
>
>  (define_mode_attr oct_ext_sew [
>    (RVVM1QI "64") (RVVMF2QI "64") (RVVMF4QI "64") (RVVMF8QI "64")
> +
> +  (V1QI "64")
> +  (V2QI "64")
> +  (V4QI "64")
> +  (V8QI "64")
> +  (V16QI "64")
> +  (V32QI "64")
> +  (V64QI "64")
> +  (V128QI "64")
> +  (V256QI "64")
> +  (V512QI "64")
>  ])
>
>  (define_mode_attr V_DOUBLE_EXTEND [
> @@ -2957,6 +3596,34 @@ (define_mode_attr V_DOUBLE_TRUNC [
>    (V512DF "V512SF")
>  ])
>
> +(define_mode_attr VF_DOUBLE_TRUNC_INDEX [
> +  (RVVM8SF "RVVM4HI") (RVVM4SF "RVVM2HI") (RVVM2SF "RVVM1HI") (RVVM1SF 
> "RVVMF2HI") (RVVMF2SF "RVVMF4HI")
> +
> +  (RVVM8DF "RVVM4SI") (RVVM4DF "RVVM2SI") (RVVM2DF "RVVM1SI") (RVVM1DF 
> "RVVMF2SI")
> +
> +  (V1SF "V1HI")
> +  (V2SF "V2HI")
> +  (V4SF "V4HI")
> +  (V8SF "V8HI")
> +  (V16SF "V16HI")
> +  (V32SF "V32HI")
> +  (V64SF "V64HI")
> +  (V128SF "V128HI")
> +  (V256SF "V256HI")
> +  (V512SF "V512HI")
> +  (V1024SF "V1024HI")
> +  (V1DF "V1SI")
> +  (V2DF "V2SI")
> +  (V4DF "V4SI")
> +  (V8DF "V8SI")
> +  (V16DF "V16SI")
> +  (V32DF "V32SI")
> +  (V64DF "V64SI")
> +  (V128DF "V128SI")
> +  (V256DF "V256SI")
> +  (V512DF "V512SI")
> +])
> +
>  (define_mode_attr V_QUAD_TRUNC [
>    (RVVM8SI "RVVM2QI") (RVVM4SI "RVVM1QI") (RVVM2SI "RVVMF2QI") (RVVM1SI 
> "RVVMF4QI") (RVVMF2SI "RVVMF8QI")
>
> @@ -2997,32 +3664,253 @@ (define_mode_attr V_QUAD_TRUNC [
>    (V512DF "V512HF")
>  ])
>
> -(define_mode_attr V_OCT_TRUNC [
> -  (RVVM8DI "RVVM1QI") (RVVM4DI "RVVMF2QI") (RVVM2DI "RVVMF4QI") (RVVM1DI 
> "RVVMF8QI")
> +(define_mode_attr V_OCT_TRUNC [
> +  (RVVM8DI "RVVM1QI") (RVVM4DI "RVVMF2QI") (RVVM2DI "RVVMF4QI") (RVVM1DI 
> "RVVMF8QI")
> +
> +  (V1DI "V1QI")
> +  (V2DI "V2QI")
> +  (V4DI "V4QI")
> +  (V8DI "V8QI")
> +  (V16DI "V16QI")
> +  (V32DI "V32QI")
> +  (V64DI "V64QI")
> +  (V128DI "V128QI")
> +  (V256DI "V256QI")
> +  (V512DI "V512QI")
> +])
> +
> +; Again in lower case.
> +(define_mode_attr v_double_trunc [
> +  (RVVM8HI "rvvm4qi") (RVVM4HI "rvvm2qi") (RVVM2HI "rvvm1qi") (RVVM1HI 
> "rvvmf2qi") (RVVMF2HI "rvvmf4qi") (RVVMF4HI "rvvmf8qi")
> +
> +  (RVVM8SI "rvvm4hi") (RVVM4SI "rvvm2hi") (RVVM2SI "rvvm1hi") (RVVM1SI 
> "rvvmf2hi") (RVVMF2SI "rvvmf4hi")
> +
> +  (RVVM8SF "rvvm4hf") (RVVM4SF "rvvm2hf") (RVVM2SF "rvvm1hf") (RVVM1SF 
> "rvvmf2hf") (RVVMF2SF "rvvmf4hf")
> +
> +  (RVVM8DI "rvvm4si") (RVVM4DI "rvvm2si") (RVVM2DI "rvvm1si") (RVVM1DI 
> "rvvmf2si")
> +
> +  (RVVM8DF "rvvm4sf") (RVVM4DF "rvvm2sf") (RVVM2DF "rvvm1sf") (RVVM1DF 
> "rvvmf2sf")
> +
> +  (V1HI "v1qi")
> +  (V2HI "v2qi")
> +  (V4HI "v4qi")
> +  (V8HI "v8qi")
> +  (V16HI "v16qi")
> +  (V32HI "v32qi")
> +  (V64HI "v64qi")
> +  (V128HI "v128qi")
> +  (V256HI "v256qi")
> +  (V512HI "v512qi")
> +  (V1024HI "v1024qi")
> +  (V2048HI "v2048qi")
> +  (V1SI "v1hi")
> +  (V2SI "v2hi")
> +  (V4SI "v4hi")
> +  (V8SI "v8hi")
> +  (V16SI "v16hi")
> +  (V32SI "v32hi")
> +  (V64SI "v64hi")
> +  (V128SI "v128hi")
> +  (V256SI "v256hi")
> +  (V512SI "v512hi")
> +  (V1024SI "v1024hi")
> +  (V1DI "v1si")
> +  (V2DI "v2si")
> +  (V4DI "v4si")
> +  (V8DI "v8si")
> +  (V16DI "v16si")
> +  (V32DI "v32si")
> +  (V64DI "v64si")
> +  (V128DI "v128si")
> +  (V256DI "v256si")
> +  (V512DI "v512si")
> +  (V1SF "v1hf")
> +  (V2SF "v2hf")
> +  (V4SF "v4hf")
> +  (V8SF "v8hf")
> +  (V16SF "v16hf")
> +  (V32SF "v32hf")
> +  (V64SF "v64hf")
> +  (V128SF "v128hf")
> +  (V256SF "v256hf")
> +  (V512SF "v512hf")
> +  (V1024SF "v1024hf")
> +  (V1DF "v1sf")
> +  (V2DF "v2sf")
> +  (V4DF "v4sf")
> +  (V8DF "v8sf")
> +  (V16DF "v16sf")
> +  (V32DF "v32sf")
> +  (V64DF "v64sf")
> +  (V128DF "v128sf")
> +  (V256DF "v256sf")
> +  (V512DF "v512sf")
> +])
> +
> +(define_mode_attr v_quad_trunc [
> +  (RVVM8SI "rvvm2qi") (RVVM4SI "rvvm1qi") (RVVM2SI "rvvmf2qi") (RVVM1SI 
> "rvvmf4qi") (RVVMF2SI "rvvmf8qi")
> +
> +  (RVVM8DI "rvvm2hi") (RVVM4DI "rvvm1hi") (RVVM2DI "rvvmf2hi") (RVVM1DI 
> "rvvmf4hi")
> +
> +  (RVVM8DF "rvvm2hf") (RVVM4DF "rvvm1hf") (RVVM2DF "rvvmf2hf") (RVVM1DF 
> "rvvmf4hf")
> +
> +  (V1SI "v1qi")
> +  (V2SI "v2qi")
> +  (V4SI "v4qi")
> +  (V8SI "v8qi")
> +  (V16SI "v16qi")
> +  (V32SI "v32qi")
> +  (V64SI "v64qi")
> +  (V128SI "v128qi")
> +  (V256SI "v256qi")
> +  (V512SI "v512qi")
> +  (V1024SI "v1024qi")
> +  (V1DI "v1hi")
> +  (V2DI "v2hi")
> +  (V4DI "v4hi")
> +  (V8DI "v8hi")
> +  (V16DI "v16hi")
> +  (V32DI "v32hi")
> +  (V64DI "v64hi")
> +  (V128DI "v128hi")
> +  (V256DI "v256hi")
> +  (V512DI "v512hi")
> +  (V1DF "v1hf")
> +  (V2DF "v2hf")
> +  (V4DF "v4hf")
> +  (V8DF "v8hf")
> +  (V16DF "v16hf")
> +  (V32DF "v32hf")
> +  (V64DF "v64hf")
> +  (V128DF "v128hf")
> +  (V256DF "v256hf")
> +  (V512DF "v512hf")
> +])
> +
> +(define_mode_attr v_oct_trunc [
> +  (RVVM8DI "rvvm1qi") (RVVM4DI "rvvmf2qi") (RVVM2DI "rvvmf4qi") (RVVM1DI 
> "rvvmf8qi")
> +
> +  (V1DI "v1qi")
> +  (V2DI "v2qi")
> +  (V4DI "v4qi")
> +  (V8DI "v8qi")
> +  (V16DI "v16qi")
> +  (V32DI "v32qi")
> +  (V64DI "v64qi")
> +  (V128DI "v128qi")
> +  (V256DI "v256qi")
> +  (V512DI "v512qi")
> +])
> +
> +(define_mode_attr VINDEX_DOUBLE_TRUNC [
> +  (RVVM8HI "RVVM4QI") (RVVM4HI "RVVM2QI") (RVVM2HI "RVVM1QI") (RVVM1HI 
> "RVVMF2QI") (RVVMF2HI "RVVMF4QI") (RVVMF4HI "RVVMF8QI")
> +
> +  (RVVM8BF "RVVM4QI") (RVVM4BF "RVVM2QI") (RVVM2BF "RVVM1QI") (RVVM1BF 
> "RVVMF2QI") (RVVMF2BF "RVVMF4QI") (RVVMF4BF "RVVMF8QI")
> +
> +  (RVVM8HF "RVVM4QI") (RVVM4HF "RVVM2QI") (RVVM2HF "RVVM1QI") (RVVM1HF 
> "RVVMF2QI") (RVVMF2HF "RVVMF4QI") (RVVMF4HF "RVVMF8QI")
> +
> +  (RVVM8SI "RVVM4HI") (RVVM4SI "RVVM2HI") (RVVM2SI "RVVM1HI") (RVVM1SI 
> "RVVMF2HI") (RVVMF2SI "RVVMF4HI")
> +
> +  (RVVM8SF "RVVM4HI") (RVVM4SF "RVVM2HI") (RVVM2SF "RVVM1HI") (RVVM1SF 
> "RVVMF2HI") (RVVMF2SF "RVVMF4HI")
> +
> +  (RVVM8DI "RVVM4SI") (RVVM4DI "RVVM2SI") (RVVM2DI "RVVM1SI") (RVVM1DI 
> "RVVMF2SI")
> +
> +  (RVVM8DF "RVVM4SI") (RVVM4DF "RVVM2SI") (RVVM2DF "RVVM1SI") (RVVM1DF 
> "RVVMF2SI")
> +
> +  (V1HI "V1QI")
> +  (V2HI "V2QI")
> +  (V4HI "V4QI")
> +  (V8HI "V8QI")
> +  (V16HI "V16QI")
> +  (V32HI "V32QI")
> +  (V64HI "V64QI")
> +  (V128HI "V128QI")
> +  (V256HI "V256QI")
> +  (V512HI "V512QI")
> +  (V1024HI "V1024QI")
> +  (V2048HI "V2048QI")
> +  (V1SI "V1HI")
> +  (V2SI "V2HI")
> +  (V4SI "V4HI")
> +  (V8SI "V8HI")
> +  (V16SI "V16HI")
> +  (V32SI "V32HI")
> +  (V64SI "V64HI")
> +  (V128SI "V128HI")
> +  (V256SI "V256HI")
> +  (V512SI "V512HI")
> +  (V1024SI "V1024HI")
> +  (V1DI "V1SI")
> +  (V2DI "V2SI")
> +  (V4DI "V4SI")
> +  (V8DI "V8SI")
> +  (V16DI "V16SI")
> +  (V32DI "V32SI")
> +  (V64DI "V64SI")
> +  (V128DI "V128SI")
> +  (V256DI "V256SI")
> +  (V512DI "V512SI")
> +  (V1HF "V1QI")
> +  (V2HF "V2QI")
> +  (V4HF "V4QI")
> +  (V8HF "V8QI")
> +  (V16HF "V16QI")
> +  (V32HF "V32QI")
> +  (V64HF "V64QI")
> +  (V128HF "V128QI")
> +  (V256HF "V256QI")
> +  (V512HF "V512QI")
> +  (V1024HF "V1024QI")
> +  (V2048HF "V2048QI")
> +  (V1BF "V1QI")
> +  (V2BF "V2QI")
> +  (V4BF "V4QI")
> +  (V8BF "V8QI")
> +  (V16BF "V16QI")
> +  (V32BF "V32QI")
> +  (V64BF "V64QI")
> +  (V128BF "V128QI")
> +  (V256BF "V256QI")
> +  (V512BF "V512QI")
> +  (V1024BF "V1024QI")
> +  (V2048BF "V2048QI")
> +  (V1SF "V1HI")
> +  (V2SF "V2HI")
> +  (V4SF "V4HI")
> +  (V8SF "V8HI")
> +  (V16SF "V16HI")
> +  (V32SF "V32HI")
> +  (V64SF "V64HI")
> +  (V128SF "V128HI")
> +  (V256SF "V256HI")
> +  (V512SF "V512HI")
> +  (V1024SF "V1024HI")
> +  (V1DF "V1SI")
> +  (V2DF "V2SI")
> +  (V4DF "V4SI")
> +  (V8DF "V8SI")
> +  (V16DF "V16SI")
> +  (V32DF "V32SI")
> +  (V64DF "V64SI")
> +  (V128DF "V128SI")
> +  (V256DF "V256SI")
> +  (V512DF "V512SI")
> +])
> +
> +(define_mode_attr vindex_double_trunc [
> +  (RVVM8HI "rvvm4qi") (RVVM4HI "rvvm2qi") (RVVM2HI "rvvm1qi") (RVVM1HI 
> "rvvmf2qi") (RVVMF2HI "rvvmf4qi") (RVVMF4HI "rvvmf8qi")
>
> -  (V1DI "V1QI")
> -  (V2DI "V2QI")
> -  (V4DI "V4QI")
> -  (V8DI "V8QI")
> -  (V16DI "V16QI")
> -  (V32DI "V32QI")
> -  (V64DI "V64QI")
> -  (V128DI "V128QI")
> -  (V256DI "V256QI")
> -  (V512DI "V512QI")
> -])
> +  (RVVM8BF "rvvm4qi") (RVVM4BF "rvvm2qi") (RVVM2BF "rvvm1qi") (RVVM1BF 
> "rvvmf2qi") (RVVMF2BF "rvvmf4qi") (RVVMF4BF "rvvmf8qi")
>
> -; Again in lower case.
> -(define_mode_attr v_double_trunc [
> -  (RVVM8HI "rvvm4qi") (RVVM4HI "rvvm2qi") (RVVM2HI "rvvm1qi") (RVVM1HI 
> "rvvmf2qi") (RVVMF2HI "rvvmf4qi") (RVVMF4HI "rvvmf8qi")
> +  (RVVM8HF "rvvm4qi") (RVVM4HF "rvvm2qi") (RVVM2HF "rvvm1qi") (RVVM1HF 
> "rvvmf2qi") (RVVMF2HF "rvvmf4qi") (RVVMF4HF "rvvmf8qi")
>
>    (RVVM8SI "rvvm4hi") (RVVM4SI "rvvm2hi") (RVVM2SI "rvvm1hi") (RVVM1SI 
> "rvvmf2hi") (RVVMF2SI "rvvmf4hi")
>
> -  (RVVM8SF "rvvm4hf") (RVVM4SF "rvvm2hf") (RVVM2SF "rvvm1hf") (RVVM1SF 
> "rvvmf2hf") (RVVMF2SF "rvvmf4hf")
> +  (RVVM8SF "rvvm4hi") (RVVM4SF "rvvm2hi") (RVVM2SF "rvvm1hi") (RVVM1SF 
> "rvvmf2hi") (RVVMF2SF "rvvmf4hi")
>
>    (RVVM8DI "rvvm4si") (RVVM4DI "rvvm2si") (RVVM2DI "rvvm1si") (RVVM1DI 
> "rvvmf2si")
>
> -  (RVVM8DF "rvvm4sf") (RVVM4DF "rvvm2sf") (RVVM2DF "rvvm1sf") (RVVM1DF 
> "rvvmf2sf")
> +  (RVVM8DF "rvvm4si") (RVVM4DF "rvvm2si") (RVVM2DF "rvvm1si") (RVVM1DF 
> "rvvmf2si")
>
>    (V1HI "v1qi")
>    (V2HI "v2qi")
> @@ -3057,35 +3945,114 @@ (define_mode_attr v_double_trunc [
>    (V128DI "v128si")
>    (V256DI "v256si")
>    (V512DI "v512si")
> -  (V1SF "v1hf")
> -  (V2SF "v2hf")
> -  (V4SF "v4hf")
> -  (V8SF "v8hf")
> -  (V16SF "v16hf")
> -  (V32SF "v32hf")
> -  (V64SF "v64hf")
> -  (V128SF "v128hf")
> -  (V256SF "v256hf")
> -  (V512SF "v512hf")
> -  (V1024SF "v1024hf")
> -  (V1DF "v1sf")
> -  (V2DF "v2sf")
> -  (V4DF "v4sf")
> -  (V8DF "v8sf")
> -  (V16DF "v16sf")
> -  (V32DF "v32sf")
> -  (V64DF "v64sf")
> -  (V128DF "v128sf")
> -  (V256DF "v256sf")
> -  (V512DF "v512sf")
> +  (V1HF "v1qi")
> +  (V2HF "v2qi")
> +  (V4HF "v4qi")
> +  (V8HF "v8qi")
> +  (V16HF "v16qi")
> +  (V32HF "v32qi")
> +  (V64HF "v64qi")
> +  (V128HF "v128qi")
> +  (V256HF "v256qi")
> +  (V512HF "v512qi")
> +  (V1024HF "v1024qi")
> +  (V2048HF "v2048qi")
> +  (V1BF "v1qi")
> +  (V2BF "v2qi")
> +  (V4BF "v4qi")
> +  (V8BF "v8qi")
> +  (V16BF "v16qi")
> +  (V32BF "v32qi")
> +  (V64BF "v64qi")
> +  (V128BF "v128qi")
> +  (V256BF "v256qi")
> +  (V512BF "v512qi")
> +  (V1024BF "v1024qi")
> +  (V2048BF "v2048qi")
> +  (V1SF "v1hi")
> +  (V2SF "v2hi")
> +  (V4SF "v4hi")
> +  (V8SF "v8hi")
> +  (V16SF "v16hi")
> +  (V32SF "v32hi")
> +  (V64SF "v64hi")
> +  (V128SF "v128hi")
> +  (V256SF "v256hi")
> +  (V512SF "v512hi")
> +  (V1024SF "v1024hi")
> +  (V1DF "v1si")
> +  (V2DF "v2si")
> +  (V4DF "v4si")
> +  (V8DF "v8si")
> +  (V16DF "v16si")
> +  (V32DF "v32si")
> +  (V64DF "v64si")
> +  (V128DF "v128si")
> +  (V256DF "v256si")
> +  (V512DF "v512si")
>  ])
>
> -(define_mode_attr v_quad_trunc [
> +(define_mode_attr VINDEX_QUAD_TRUNC [
> +  (RVVM8SI "RVVM2QI") (RVVM4SI "RVVM1QI") (RVVM2SI "RVVMF2QI") (RVVM1SI 
> "RVVMF4QI") (RVVMF2SI "RVVMF8QI")
> +
> +  (RVVM8SF "RVVM2QI") (RVVM4SF "RVVM1QI") (RVVM2SF "RVVMF2QI") (RVVM1SF 
> "RVVMF4QI") (RVVMF2SF "RVVMF8QI")
> +
> +  (RVVM8DI "RVVM2HI") (RVVM4DI "RVVM1HI") (RVVM2DI "RVVMF2HI") (RVVM1DI 
> "RVVMF4HI")
> +
> +  (RVVM8DF "RVVM2HI") (RVVM4DF "RVVM1HI") (RVVM2DF "RVVMF2HI") (RVVM1DF 
> "RVVMF4HI")
> +
> +  (V1SI "V1QI")
> +  (V2SI "V2QI")
> +  (V4SI "V4QI")
> +  (V8SI "V8QI")
> +  (V16SI "V16QI")
> +  (V32SI "V32QI")
> +  (V64SI "V64QI")
> +  (V128SI "V128QI")
> +  (V256SI "V256QI")
> +  (V512SI "V512QI")
> +  (V1024SI "V1024QI")
> +  (V1DI "V1HI")
> +  (V2DI "V2HI")
> +  (V4DI "V4HI")
> +  (V8DI "V8HI")
> +  (V16DI "V16HI")
> +  (V32DI "V32HI")
> +  (V64DI "V64HI")
> +  (V128DI "V128HI")
> +  (V256DI "V256HI")
> +  (V512DI "V512HI")
> +  (V1SF "V1QI")
> +  (V2SF "V2QI")
> +  (V4SF "V4QI")
> +  (V8SF "V8QI")
> +  (V16SF "V16QI")
> +  (V32SF "V32QI")
> +  (V64SF "V64QI")
> +  (V128SF "V128QI")
> +  (V256SF "V256QI")
> +  (V512SF "V512QI")
> +  (V1024SF "V512QI")
> +  (V1DF "V1HI")
> +  (V2DF "V2HI")
> +  (V4DF "V4HI")
> +  (V8DF "V8HI")
> +  (V16DF "V16HI")
> +  (V32DF "V32HI")
> +  (V64DF "V64HI")
> +  (V128DF "V128HI")
> +  (V256DF "V256HI")
> +  (V512DF "V512HI")
> +])
> +
> +(define_mode_attr vindex_quad_trunc [
>    (RVVM8SI "rvvm2qi") (RVVM4SI "rvvm1qi") (RVVM2SI "rvvmf2qi") (RVVM1SI 
> "rvvmf4qi") (RVVMF2SI "rvvmf8qi")
>
> +  (RVVM8SF "rvvm2qi") (RVVM4SF "rvvm1qi") (RVVM2SF "rvvmf2qi") (RVVM1SF 
> "rvvmf4qi") (RVVMF2SF "rvvmf8qi")
> +
>    (RVVM8DI "rvvm2hi") (RVVM4DI "rvvm1hi") (RVVM2DI "rvvmf2hi") (RVVM1DI 
> "rvvmf4hi")
>
> -  (RVVM8DF "rvvm2hf") (RVVM4DF "rvvm1hf") (RVVM2DF "rvvmf2hf") (RVVM1DF 
> "rvvmf4hf")
> +  (RVVM8DF "rvvm2hi") (RVVM4DF "rvvm1hi") (RVVM2DF "rvvmf2hi") (RVVM1DF 
> "rvvmf4hi")
>
>    (V1SI "v1qi")
>    (V2SI "v2qi")
> @@ -3108,21 +4075,61 @@ (define_mode_attr v_quad_trunc [
>    (V128DI "v128hi")
>    (V256DI "v256hi")
>    (V512DI "v512hi")
> -  (V1DF "v1hf")
> -  (V2DF "v2hf")
> -  (V4DF "v4hf")
> -  (V8DF "v8hf")
> -  (V16DF "v16hf")
> -  (V32DF "v32hf")
> -  (V64DF "v64hf")
> -  (V128DF "v128hf")
> -  (V256DF "v256hf")
> -  (V512DF "v512hf")
> +  (V1SF "v1qi")
> +  (V2SF "v2qi")
> +  (V4SF "v4qi")
> +  (V8SF "v8qi")
> +  (V16SF "v16qi")
> +  (V32SF "v32qi")
> +  (V64SF "v64qi")
> +  (V128SF "v128qi")
> +  (V256SF "v256qi")
> +  (V512SF "v512qi")
> +  (V1024SF "v512qi")
> +  (V1DF "v1hi")
> +  (V2DF "v2hi")
> +  (V4DF "v4hi")
> +  (V8DF "v8hi")
> +  (V16DF "v16hi")
> +  (V32DF "v32hi")
> +  (V64DF "v64hi")
> +  (V128DF "v128hi")
> +  (V256DF "v256hi")
> +  (V512DF "v512hi")
>  ])
>
> -(define_mode_attr v_oct_trunc [
> +(define_mode_attr VINDEX_OCT_TRUNC [
> +  (RVVM8DI "RVVM1QI") (RVVM4DI "RVVMF2QI") (RVVM2DI "RVVMF4QI") (RVVM1DI 
> "RVVMF8QI")
> +
> +  (RVVM8DF "RVVM1QI") (RVVM4DF "RVVMF2QI") (RVVM2DF "RVVMF4QI") (RVVM1DF 
> "RVVMF8QI")
> +
> +  (V1DI "V1QI")
> +  (V2DI "V2QI")
> +  (V4DI "V4QI")
> +  (V8DI "V8QI")
> +  (V16DI "V16QI")
> +  (V32DI "V32QI")
> +  (V64DI "V64QI")
> +  (V128DI "V128QI")
> +  (V256DI "V256QI")
> +  (V512DI "V512QI")
> +  (V1DF "V1QI")
> +  (V2DF "V2QI")
> +  (V4DF "V4QI")
> +  (V8DF "V8QI")
> +  (V16DF "V16QI")
> +  (V32DF "V32QI")
> +  (V64DF "V64QI")
> +  (V128DF "V128QI")
> +  (V256DF "V256QI")
> +  (V512DF "V512QI")
> +])
> +
> +(define_mode_attr vindex_oct_trunc [
>    (RVVM8DI "rvvm1qi") (RVVM4DI "rvvmf2qi") (RVVM2DI "rvvmf4qi") (RVVM1DI 
> "rvvmf8qi")
>
> +  (RVVM8DF "rvvm1qi") (RVVM4DF "rvvmf2qi") (RVVM2DF "rvvmf4qi") (RVVM1DF 
> "rvvmf8qi")
> +
>    (V1DI "v1qi")
>    (V2DI "v2qi")
>    (V4DI "v4qi")
> @@ -3133,52 +4140,176 @@ (define_mode_attr v_oct_trunc [
>    (V128DI "v128qi")
>    (V256DI "v256qi")
>    (V512DI "v512qi")
> +  (V1DF "v1qi")
> +  (V2DF "v2qi")
> +  (V4DF "v4qi")
> +  (V8DF "v8qi")
> +  (V16DF "v16qi")
> +  (V32DF "v32qi")
> +  (V64DF "v64qi")
> +  (V128DF "v128qi")
> +  (V256DF "v256qi")
> +  (V512DF "v512qi")
>  ])
>
> -(define_mode_attr VINDEX_DOUBLE_TRUNC [
> -  (RVVM8HI "RVVM4QI") (RVVM4HI "RVVM2QI") (RVVM2HI "RVVM1QI") (RVVM1HI 
> "RVVMF2QI") (RVVMF2HI "RVVMF4QI") (RVVMF4HI "RVVMF8QI")
> -
> -  (RVVM8BF "RVVM4QI") (RVVM4BF "RVVM2QI") (RVVM2BF "RVVM1QI") (RVVM1BF 
> "RVVMF2QI") (RVVMF2BF "RVVMF4QI") (RVVMF4BF "RVVMF8QI")
> -
> -  (RVVM8HF "RVVM4QI") (RVVM4HF "RVVM2QI") (RVVM2HF "RVVM1QI") (RVVM1HF 
> "RVVMF2QI") (RVVMF2HF "RVVMF4QI") (RVVMF4HF "RVVMF8QI")
> -
> -  (RVVM8SI "RVVM4HI") (RVVM4SI "RVVM2HI") (RVVM2SI "RVVM1HI") (RVVM1SI 
> "RVVMF2HI") (RVVMF2SI "RVVMF4HI")
> -
> -  (RVVM8SF "RVVM4HI") (RVVM4SF "RVVM2HI") (RVVM2SF "RVVM1HI") (RVVM1SF 
> "RVVMF2HI") (RVVMF2SF "RVVMF4HI")
> -
> -  (RVVM8DI "RVVM4SI") (RVVM4DI "RVVM2SI") (RVVM2DI "RVVM1SI") (RVVM1DI 
> "RVVMF2SI")
> -
> -  (RVVM8DF "RVVM4SI") (RVVM4DF "RVVM2SI") (RVVM2DF "RVVM1SI") (RVVM1DF 
> "RVVMF2SI")
> -])
> +(define_mode_attr VINDEX_DOUBLE_EXT [
> +  (RVVM4QI "RVVM8HI") (RVVM2QI "RVVM4HI") (RVVM1QI "RVVM2HI") (RVVMF2QI 
> "RVVM1HI") (RVVMF4QI "RVVMF2HI") (RVVMF8QI "RVVMF4HI")
>
> -(define_mode_attr VINDEX_QUAD_TRUNC [
> -  (RVVM8SI "RVVM2QI") (RVVM4SI "RVVM1QI") (RVVM2SI "RVVMF2QI") (RVVM1SI 
> "RVVMF4QI") (RVVMF2SI "RVVMF8QI")
> +  (RVVM4HI "RVVM8SI") (RVVM2HI "RVVM4SI") (RVVM1HI "RVVM2SI") (RVVMF2HI 
> "RVVM1SI") (RVVMF4HI "RVVMF2SI")
>
> -  (RVVM8SF "RVVM2QI") (RVVM4SF "RVVM1QI") (RVVM2SF "RVVMF2QI") (RVVM1SF 
> "RVVMF4QI") (RVVMF2SF "RVVMF8QI")
> +  (RVVM4BF "RVVM8SI") (RVVM2BF "RVVM4SI") (RVVM1BF "RVVM2SI") (RVVMF2BF 
> "RVVM1SI") (RVVMF4BF "RVVMF2SI")
>
> -  (RVVM8DI "RVVM2HI") (RVVM4DI "RVVM1HI") (RVVM2DI "RVVMF2HI") (RVVM1DI 
> "RVVMF4HI")
> +  (RVVM4HF "RVVM8SI") (RVVM2HF "RVVM4SI") (RVVM1HF "RVVM2SI") (RVVMF2HF 
> "RVVM1SI") (RVVMF4HF "RVVMF2SI")
>
> -  (RVVM8DF "RVVM2HI") (RVVM4DF "RVVM1HI") (RVVM2DF "RVVMF2HI") (RVVM1DF 
> "RVVMF4HI")
> -])
> +  (RVVM4SI "RVVM8DI") (RVVM2SI "RVVM4DI") (RVVM1SI "RVVM2DI") (RVVMF2SI 
> "RVVM1DI")
>
> -(define_mode_attr VINDEX_OCT_TRUNC [
> -  (RVVM8DI "RVVM1QI") (RVVM4DI "RVVMF2QI") (RVVM2DI "RVVMF4QI") (RVVM1DI 
> "RVVMF8QI")
> +  (RVVM4SF "RVVM8DI") (RVVM2SF "RVVM4DI") (RVVM1SF "RVVM2DI") (RVVMF2SF 
> "RVVM1DI")
>
> -  (RVVM8DF "RVVM1QI") (RVVM4DF "RVVMF2QI") (RVVM2DF "RVVMF4QI") (RVVM1DF 
> "RVVMF8QI")
> +  (V1QI "V1HI")
> +  (V2QI "V2HI")
> +  (V4QI "V4HI")
> +  (V8QI "V8HI")
> +  (V16QI "V16HI")
> +  (V32QI "V32HI")
> +  (V64QI "V64HI")
> +  (V128QI "V128HI")
> +  (V256QI "V256HI")
> +  (V512QI "V512HI")
> +  (V1024QI "V1024HI")
> +  (V2048QI "V2048HI")
> +  (V1HI "V1SI")
> +  (V2HI "V2SI")
> +  (V4HI "V4SI")
> +  (V8HI "V8SI")
> +  (V16HI "V16SI")
> +  (V32HI "V32SI")
> +  (V64HI "V64SI")
> +  (V128HI "V128SI")
> +  (V256HI "V256SI")
> +  (V512HI "V512SI")
> +  (V1024HI "V1024SI")
> +  (V1SI "V1DI")
> +  (V2SI "V2DI")
> +  (V4SI "V4DI")
> +  (V8SI "V8DI")
> +  (V16SI "V16DI")
> +  (V32SI "V32DI")
> +  (V64SI "V64DI")
> +  (V128SI "V128DI")
> +  (V256SI "V256DI")
> +  (V512SI "V512DI")
> +  (V1HF "V1SI")
> +  (V2HF "V2SI")
> +  (V4HF "V4SI")
> +  (V8HF "V8SI")
> +  (V16HF "V16SI")
> +  (V32HF "V32SI")
> +  (V64HF "V64SI")
> +  (V128HF "V128SI")
> +  (V256HF "V256SI")
> +  (V512HF "V512SI")
> +  (V1024HF "V1024SI")
> +  (V1BF "V1SI")
> +  (V2BF "V2SI")
> +  (V4BF "V4SI")
> +  (V8BF "V8SI")
> +  (V16BF "V16SI")
> +  (V32BF "V32SI")
> +  (V64BF "V64SI")
> +  (V128BF "V128SI")
> +  (V256BF "V256SI")
> +  (V512BF "V512SI")
> +  (V1024BF "V1024SI")
> +  (V1SF "V1DI")
> +  (V2SF "V2DI")
> +  (V4SF "V4DI")
> +  (V8SF "V8DI")
> +  (V16SF "V16DI")
> +  (V32SF "V32DI")
> +  (V64SF "V64DI")
> +  (V128SF "V128DI")
> +  (V256SF "V256DI")
> +  (V512SF "V512DI")
>  ])
>
> -(define_mode_attr VINDEX_DOUBLE_EXT [
> -  (RVVM4QI "RVVM8HI") (RVVM2QI "RVVM4HI") (RVVM1QI "RVVM2HI") (RVVMF2QI 
> "RVVM1HI") (RVVMF4QI "RVVMF2HI") (RVVMF8QI "RVVMF4HI")
> +(define_mode_attr vindex_double_ext [
> +  (RVVM4QI "rvvm8hi") (RVVM2QI "rvvm4hi") (RVVM1QI "rvvm2hi") (RVVMF2QI 
> "rvvm1hi") (RVVMF4QI "rvvmf2hi") (RVVMF8QI "rvvmf4hi")
>
> -  (RVVM4HI "RVVM8SI") (RVVM2HI "RVVM4SI") (RVVM1HI "RVVM2SI") (RVVMF2HI 
> "RVVM1SI") (RVVMF4HI "RVVMF2SI")
> +  (RVVM4HI "rvvm8si") (RVVM2HI "rvvm4si") (RVVM1HI "rvvm2si") (RVVMF2HI 
> "rvvm1si") (RVVMF4HI "rvvmf2si")
>
> -  (RVVM4BF "RVVM8SI") (RVVM2BF "RVVM4SI") (RVVM1BF "RVVM2SI") (RVVMF2BF 
> "RVVM1SI") (RVVMF4BF "RVVMF2SI")
> +  (RVVM4BF "rvvm8si") (RVVM2BF "rvvm4si") (RVVM1BF "rvvm2si") (RVVMF2BF 
> "rvvm1si") (RVVMF4BF "rvvmf2si")
>
> -  (RVVM4HF "RVVM8SI") (RVVM2HF "RVVM4SI") (RVVM1HF "RVVM2SI") (RVVMF2HF 
> "RVVM1SI") (RVVMF4HF "RVVMF2SI")
> +  (RVVM4HF "rvvm8si") (RVVM2HF "rvvm4si") (RVVM1HF "rvvm2si") (RVVMF2HF 
> "rvvm1si") (RVVMF4HF "rvvmf2si")
>
> -  (RVVM4SI "RVVM8DI") (RVVM2SI "RVVM4DI") (RVVM1SI "RVVM2DI") (RVVMF2SI 
> "RVVM1DI")
> +  (RVVM4SI "rvvm8di") (RVVM2SI "rvvm4di") (RVVM1SI "rvvm2di") (RVVMF2SI 
> "rvvm1di")
>
> -  (RVVM4SF "RVVM8DI") (RVVM2SF "RVVM4DI") (RVVM1SF "RVVM2DI") (RVVMF2SF 
> "RVVM1DI")
> +  (RVVM4SF "rvvm8di") (RVVM2SF "rvvm4di") (RVVM1SF "rvvm2di") (RVVMF2SF 
> "rvvm1di")
> +
> +  (V1QI "v1hi")
> +  (V2QI "v2hi")
> +  (V4QI "v4hi")
> +  (V8QI "v8hi")
> +  (V16QI "v16hi")
> +  (V32QI "v32hi")
> +  (V64QI "v64hi")
> +  (V128QI "v128hi")
> +  (V256QI "v256hi")
> +  (V512QI "v512hi")
> +  (V1024QI "v1024hi")
> +  (V2048QI "v2048hi")
> +  (V1HI "v1si")
> +  (V2HI "v2si")
> +  (V4HI "v4si")
> +  (V8HI "v8si")
> +  (V16HI "v16si")
> +  (V32HI "v32si")
> +  (V64HI "v64si")
> +  (V128HI "v128si")
> +  (V256HI "v256si")
> +  (V512HI "v512si")
> +  (V1024HI "v1024si")
> +  (V1SI "v1di")
> +  (V2SI "v2di")
> +  (V4SI "v4di")
> +  (V8SI "v8di")
> +  (V16SI "v16di")
> +  (V32SI "v32di")
> +  (V64SI "v64di")
> +  (V128SI "v128di")
> +  (V256SI "v256di")
> +  (V512SI "v512di")
> +  (V1HF "v1si")
> +  (V2HF "v2si")
> +  (V4HF "v4si")
> +  (V8HF "v8si")
> +  (V16HF "v16si")
> +  (V32HF "v32si")
> +  (V64HF "v64si")
> +  (V128HF "v128si")
> +  (V256HF "v256si")
> +  (V512HF "v512si")
> +  (V1024HF "v1024si")
> +  (V1BF "v1si")
> +  (V2BF "v2si")
> +  (V4BF "v4si")
> +  (V8BF "v8si")
> +  (V16BF "v16si")
> +  (V32BF "v32si")
> +  (V64BF "v64si")
> +  (V128BF "v128si")
> +  (V256BF "v256si")
> +  (V512BF "v512si")
> +  (V1024BF "v1024si")
> +  (V1SF "v1di")
> +  (V2SF "v2di")
> +  (V4SF "v4di")
> +  (V8SF "v8di")
> +  (V16SF "v16di")
> +  (V32SF "v32di")
> +  (V64SF "v64di")
> +  (V128SF "v128di")
> +  (V256SF "v256di")
> +  (V512SF "v512di")
>  ])
>
>  (define_mode_attr VINDEX_QUAD_EXT [
> @@ -3189,10 +4320,130 @@ (define_mode_attr VINDEX_QUAD_EXT [
>    (RVVM2BF "RVVM8DI") (RVVM1BF "RVVM4DI") (RVVMF2BF "RVVM2DI") (RVVMF4BF 
> "RVVM1DI")
>
>    (RVVM2HF "RVVM8DI") (RVVM1HF "RVVM4DI") (RVVMF2HF "RVVM2DI") (RVVMF4HF 
> "RVVM1DI")
> +
> +  (V1QI "V1SI")
> +  (V2QI "V2SI")
> +  (V4QI "V4SI")
> +  (V8QI "V8SI")
> +  (V16QI "V16SI")
> +  (V32QI "V32SI")
> +  (V64QI "V64SI")
> +  (V128QI "V128SI")
> +  (V256QI "V256SI")
> +  (V512QI "V512SI")
> +  (V1024QI "V1024SI")
> +  (V1HI "V1DI")
> +  (V2HI "V2DI")
> +  (V4HI "V4DI")
> +  (V8HI "V8DI")
> +  (V16HI "V16DI")
> +  (V32HI "V32DI")
> +  (V64HI "V64DI")
> +  (V128HI "V128DI")
> +  (V256HI "V256DI")
> +  (V512HI "V512DI")
> +  (V1HF "V1DI")
> +  (V2HF "V2DI")
> +  (V4HF "V4DI")
> +  (V8HF "V8DI")
> +  (V16HF "V16DI")
> +  (V32HF "V32DI")
> +  (V64HF "V64DI")
> +  (V128HF "V128DI")
> +  (V256HF "V256DI")
> +  (V512HF "V512DI")
> +  (V1BF "V1DI")
> +  (V2BF "V2DI")
> +  (V4BF "V4DI")
> +  (V8BF "V8DI")
> +  (V16BF "V16DI")
> +  (V32BF "V32DI")
> +  (V64BF "V64DI")
> +  (V128BF "V128DI")
> +  (V256BF "V256DI")
> +  (V512BF "V512DI")
> +])
> +
> +(define_mode_attr vindex_quad_ext [
> +  (RVVM2QI "rvvm8si") (RVVM1QI "rvvm4si") (RVVMF2QI "rvvm2si") (RVVMF4QI 
> "rvvm1si") (RVVMF8QI "rvvmf2si")
> +
> +  (RVVM2HI "rvvm8di") (RVVM1HI "rvvm4di") (RVVMF2HI "rvvm2di") (RVVMF4HI 
> "rvvm1di")
> +
> +  (RVVM2BF "rvvm8di") (RVVM1BF "rvvm4di") (RVVMF2BF "rvvm2di") (RVVMF4BF 
> "rvvm1di")
> +
> +  (RVVM2HF "rvvm8di") (RVVM1HF "rvvm4di") (RVVMF2HF "rvvm2di") (RVVMF4HF 
> "rvvm1di")
> +
> +  (V1QI "v1si")
> +  (V2QI "v2si")
> +  (V4QI "v4si")
> +  (V8QI "v8si")
> +  (V16QI "v16si")
> +  (V32QI "v32si")
> +  (V64QI "v64si")
> +  (V128QI "v128si")
> +  (V256QI "v256si")
> +  (V512QI "v512si")
> +  (V1024QI "v1024si")
> +  (V1HI "v1di")
> +  (V2HI "v2di")
> +  (V4HI "v4di")
> +  (V8HI "v8di")
> +  (V16HI "v16di")
> +  (V32HI "v32di")
> +  (V64HI "v64di")
> +  (V128HI "v1d8di")
> +  (V256HI "v256di")
> +  (V512HI "v512di")
> +  (V1HF "v1di")
> +  (V2HF "v2di")
> +  (V4HF "v4di")
> +  (V8HF "v8di")
> +  (V16HF "v16di")
> +  (V32HF "v32di")
> +  (V64HF "v64di")
> +  (V128HF "v128di")
> +  (V256HF "v256di")
> +  (V512HF "v512di")
> +  (V1BF "v1di")
> +  (V2BF "v2di")
> +  (V4BF "v4di")
> +  (V8BF "v8di")
> +  (V16BF "v16di")
> +  (V32BF "v32di")
> +  (V64BF "v64di")
> +  (V128BF "v128di")
> +  (V256BF "v256di")
> +  (V512BF "v512di")
>  ])
>
>  (define_mode_attr VINDEX_OCT_EXT [
>    (RVVM1QI "RVVM8DI") (RVVMF2QI "RVVM4DI") (RVVMF4QI "RVVM2DI") (RVVMF8QI 
> "RVVM1DI")
> +
> +  (V1QI "V1DI")
> +  (V2QI "V2DI")
> +  (V4QI "V4DI")
> +  (V8QI "V8DI")
> +  (V16QI "V16DI")
> +  (V32QI "V32DI")
> +  (V64QI "V64DI")
> +  (V128QI "V128DI")
> +  (V256QI "V256DI")
> +  (V512QI "V512DI")
> +])
> +
> +(define_mode_attr vindex_oct_ext [
> +  (RVVM1QI "rvvm8di") (RVVMF2QI "rvvm4di") (RVVMF4QI "rvvm2di") (RVVMF8QI 
> "rvvm1di")
> +
> +  (V1QI "v1di")
> +  (V2QI "v2di")
> +  (V4QI "v4di")
> +  (V8QI "v8di")
> +  (V16QI "v16di")
> +  (V32QI "v32di")
> +  (V64QI "v64di")
> +  (V128QI "v128di")
> +  (V256QI "v256di")
> +  (V512QI "v512di")
>  ])
>
>  (define_mode_attr VCONVERT [
> diff --git a/gcc/config/riscv/vector.md b/gcc/config/riscv/vector.md
> index ba4a43b185c..fb23f49c603 100644
> --- a/gcc/config/riscv/vector.md
> +++ b/gcc/config/riscv/vector.md
> @@ -2680,7 +2680,7 @@ (define_insn 
> "@pred_indexed_<order>load<mode>_x8_smaller_eew"
>    [(set_attr "type" "vld<order>x")
>     (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO64:mode><RATIO64I:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_same_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2690,14 +2690,14 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO64:mode><RATIO64I:mode>"
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
>            (match_operand 1 "pmode_reg_or_0_operand"      "  rJ")
> -          (match_operand:RATIO64I 2 "register_operand" "  vr")
> -          (match_operand:RATIO64 3 "register_operand"  "  vr")] ORDER))]
> +          (match_operand:<VINDEX> 2 "register_operand" "  vr")
> +          (match_operand:VINDEXED 3 "register_operand"  "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO64I:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO64:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO32:mode><RATIO32I:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_x2_greater_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2707,14 +2707,14 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO32:mode><RATIO32I:mode>"
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
>            (match_operand 1 "pmode_reg_or_0_operand"      "  rJ")
> -          (match_operand:RATIO32I 2 "register_operand" "  vr")
> -          (match_operand:RATIO32 3 "register_operand"  "  vr")] ORDER))]
> +          (match_operand:<VINDEX_DOUBLE_TRUNC> 2 "register_operand" "  vr")
> +          (match_operand:VEEWEXT2 3 "register_operand"  "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO32I:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<double_trunc_sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO32:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO16:mode><RATIO16I:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_x4_greater_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2724,14 +2724,14 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO16:mode><RATIO16I:mode>"
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
>            (match_operand 1 "pmode_reg_or_0_operand"      "  rJ")
> -          (match_operand:RATIO16I 2 "register_operand" "  vr")
> -          (match_operand:RATIO16 3 "register_operand"  "  vr")] ORDER))]
> +          (match_operand:<VINDEX_QUAD_TRUNC> 2 "register_operand" "  vr")
> +          (match_operand:VEEWEXT4 3 "register_operand"  "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO16I:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<quad_trunc_sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO16:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO8:mode><RATIO8I:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_x8_greater_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2741,14 +2741,14 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO8:mode><RATIO8I:mode>"
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
>            (match_operand 1 "pmode_reg_or_0_operand"      "  rJ")
> -          (match_operand:RATIO8I 2 "register_operand" "  vr")
> -          (match_operand:RATIO8 3 "register_operand"  "  vr")] ORDER))]
> +          (match_operand:<VINDEX_OCT_TRUNC> 2 "register_operand" "  vr")
> +          (match_operand:VEEWEXT8 3 "register_operand"  "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO8I:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<quad_trunc_sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO8:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO4:mode><RATIO4I:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_x2_smaller_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2758,14 +2758,14 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO4:mode><RATIO4I:mode>"
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
>            (match_operand 1 "pmode_reg_or_0_operand"      "  rJ")
> -          (match_operand:RATIO4I 2 "register_operand" "  vr")
> -          (match_operand:RATIO4 3 "register_operand"  "  vr")] ORDER))]
> +          (match_operand:<VINDEX_DOUBLE_EXT> 2 "register_operand" "  vr")
> +          (match_operand:VEEWTRUNC2 3 "register_operand"  "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO4I:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<double_ext_sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO4:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO2:mode><RATIO2I:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_x4_smaller_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2774,15 +2774,15 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO2:mode><RATIO2I:mode>"
>              (match_operand 5 "const_int_operand"        "    i")
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
> -          (match_operand 1 "pmode_reg_or_0_operand"       "  rJ")
> -          (match_operand:RATIO2I 2 "register_operand"  "  vr")
> -          (match_operand:RATIO2 3 "register_operand"   "  vr")] ORDER))]
> +          (match_operand 1 "pmode_reg_or_0_operand"      "  rJ")
> +          (match_operand:<VINDEX_QUAD_EXT> 2 "register_operand" "  vr")
> +          (match_operand:VEEWTRUNC4 3 "register_operand"  "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO2I:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<quad_ext_sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO2:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
> -(define_insn "@pred_indexed_<order>store<RATIO1:mode><RATIO1:mode>"
> +(define_insn "@pred_indexed_<order>store<mode>_x8_smaller_eew"
>    [(set (mem:BLK (scratch))
>         (unspec:BLK
>           [(unspec:<VM>
> @@ -2792,12 +2792,12 @@ (define_insn 
> "@pred_indexed_<order>store<RATIO1:mode><RATIO1:mode>"
>              (reg:SI VL_REGNUM)
>              (reg:SI VTYPE_REGNUM)] UNSPEC_VPREDICATE)
>            (match_operand 1 "pmode_reg_or_0_operand"       "  rJ")
> -          (match_operand:RATIO1 2 "register_operand"   "  vr")
> -          (match_operand:RATIO1 3 "register_operand"    "  vr")] ORDER))]
> +          (match_operand:<VINDEX_OCT_EXT> 2 "register_operand"  "  vr")
> +          (match_operand:VEEWTRUNC8 3 "register_operand"   "  vr")] ORDER))]
>    "TARGET_VECTOR"
> -  "vs<order>xei<RATIO1:sew>.v\t%3,(%z1),%2%p0"
> +  "vs<order>xei<oct_ext_sew>.v\t%3,(%z1),%2%p0"
>    [(set_attr "type" "vst<order>x")
> -   (set_attr "mode" "<RATIO1:MODE>")])
> +   (set_attr "mode" "<MODE>")])
>
>  ;; 
> -------------------------------------------------------------------------------
>  ;; ---- Predicated integer binary operations
> --
> 2.51.1
>

Reply via email to