@@ -1091,8 +1091,6 @@ srli rd, rd, XLEN - 32
10911091|vmsgeu.vi vd, va, i, vm | vmsgtu.vi vd, va, i-1, vm | Vector >= Immediate, unsigned|
10921092|vmsge.vv vd, va, vb, vm | vmsle.vv vd, vb, va, vm | Vector >= Vector|
10931093|vmsgeu.vv vd, va, vb, vm | vmsleu.vv vd, vb, va, vm | Vector >= Vector, unsigned |
1094- |vmsge.vx vd, va, x, vm | vmsle.vx vd, x, va, vm | Vector >= scalar|
1095- |vmsgeu.vx vd, va, x, vm | vmsleu.vx vd, x, va, vm | Vector >= scalar, unsigned|
10961094|vmsgt.vv vd, va, vb, vm | vmslt.vv vd, vb, va, vm | Vector > Vector|
10971095|vmsgtu.vv vd, va, vb, vm | vmsltu.vv vd, vb, va, vm | Vector > Vector, unsigned|
10981096|vmslt.vi vd, va, i, vm | vmsle.vi vd, va, i-1, vm | Vector < immediate|
@@ -1107,8 +1105,37 @@ srli rd, rd, XLEN - 32
11071105|vl4r.v v4,x0 | vl4re8.v v4, x0 | Equal to vl4re8.v |
11081106|vl8r.v v8,x0 | vl8re8.v v8, x0 | Equal to vl8re8.v |
11091107
1110-
1111-
1108+ |vmsge{u}.vx vd, va, x|
1109+ addi t0, x, -1 +
1110+ vmsgt{u}.vx vd, va, t0, vm
1111+ | Vector >= scalar
1112+ | When x > minimum
1113+
1114+ |vmsge{u}.vx vd, va, x
1115+ |vmslt{u}.vx vd, va, x +
1116+ vmnand.mm vd, vd, vd
1117+ | Vector >= scalar, unmasked
1118+ | For any x
1119+
1120+ | vmsge{u}.vx vd, va, x, v0.t
1121+ | vmslt{u}.vx vd, va, x, v0.t +
1122+ vmxor.mm vd, vd, v0
1123+ | Vector >= scalar, masked
1124+ | When vd≠v0
1125+
1126+ | vmsge{u}.vx vd, va, x, v0.t, vt
1127+ | vmslt{u}.vx vt, va, x +
1128+ vmandn.mm vd, vd, vt
1129+ | Vector >= scalar, masked
1130+ | When vd=v0
1131+
1132+ |vmsge{u}.vx vd, va, x, v0.t, vt
1133+ |vmslt{u}.vx vt, va, x +
1134+ vmandn.mm vt, v0, vt +
1135+ vmandn.mm vd, vd, v0 +
1136+ vmor.mm vd, vt, vd
1137+ | Vector >= scalar, masked
1138+ | For any vd
11121139
11131140
11141141|call offset
0 commit comments