|
56 | 56 | vssegtux,vssegtox,vlsegdff,vandn,vbrev,vbrev8,vrev8,vcpop,vclz,vctz,vrol,\ |
57 | 57 | vror,vwsll,vclmul,vclmulh,vghsh,vgmul,vaesef,vaesem,vaesdf,vaesdm,\ |
58 | 58 | vaeskf1,vaeskf2,vaesz,vsha2ms,vsha2ch,vsha2cl,vsm4k,vsm4r,vsm3me,vsm3c,\ |
59 | | - vfncvtbf16,vfwcvtbf16,vfwmaccbf16") |
| 59 | + vfncvtbf16,vfwcvtbf16,vfwmaccbf16,\ |
| 60 | + sf_vqmacc,sf_vfnrclip") |
60 | 61 | (const_string "true")] |
61 | 62 | (const_string "false"))) |
62 | 63 |
|
|
893 | 894 | vfredo,vfwredu,vfwredo,vslideup,vslidedown,vislide1up,\ |
894 | 895 | vislide1down,vfslide1up,vfslide1down,vgather,viwmuladd,vfwmuladd,\ |
895 | 896 | vlsegds,vlsegdux,vlsegdox,vandn,vrol,vror,vwsll,vclmul,vclmulh,\ |
896 | | - vfwmaccbf16") |
| 897 | + vfwmaccbf16,sf_vqmacc,sf_vfnrclip") |
897 | 898 | (symbol_ref "riscv_vector::get_ta(operands[6])") |
898 | 899 |
|
899 | 900 | (eq_attr "type" "vimuladd,vfmuladd") |
|
924 | 925 | vfwalu,vfwmul,vfsgnj,vfcmp,vslideup,vslidedown,\ |
925 | 926 | vislide1up,vislide1down,vfslide1up,vfslide1down,vgather,\ |
926 | 927 | viwmuladd,vfwmuladd,vlsegds,vlsegdux,vlsegdox,vandn,vrol,\ |
927 | | - vror,vwsll,vclmul,vclmulh,vfwmaccbf16") |
| 928 | + vror,vwsll,vclmul,vclmulh,vfwmaccbf16,sf_vqmacc,sf_vfnrclip") |
928 | 929 | (symbol_ref "riscv_vector::get_ma(operands[7])") |
929 | 930 |
|
930 | 931 | (eq_attr "type" "vimuladd,vfmuladd") |
|
0 commit comments