@@ -230,3 +230,92 @@ body: |
230230 $vgpr0 = COPY %3
231231 SI_RETURN implicit $vgpr0
232232 ...
233+
234+ ---
235+ name : cndmask-not-converted
236+ tracksRegLiveness : true
237+ body : |
238+ ; CHECK-LABEL: name: cndmask-not-converted
239+ ; CHECK: bb.0:
240+ ; CHECK-NEXT: successors: %bb.1(0x40000000), %bb.2(0x40000000)
241+ ; CHECK-NEXT: liveins: $vgpr0, $sgpr8_sgpr9
242+ ; CHECK-NEXT: {{ $}}
243+ ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr8_sgpr9
244+ ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32(s32) = COPY $vgpr0
245+ ; CHECK-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM [[COPY]], 0, 0
246+ ; CHECK-NEXT: S_BITCMP1_B32 [[S_LOAD_DWORDX2_IMM]].sub1, 0, implicit-def $scc
247+ ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc
248+ ; CHECK-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
249+ ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32 = S_AND_B32 $exec_lo, [[S_CSELECT_B32_]], implicit-def dead $scc
250+ ; CHECK-NEXT: $vcc_lo = COPY [[S_AND_B32_]]
251+ ; CHECK-NEXT: S_CBRANCH_VCCNZ %bb.2, implicit $vcc_lo
252+ ; CHECK-NEXT: S_BRANCH %bb.1
253+ ; CHECK-NEXT: {{ $}}
254+ ; CHECK-NEXT: bb.1:
255+ ; CHECK-NEXT: successors: %bb.2(0x80000000)
256+ ; CHECK-NEXT: {{ $}}
257+ ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_64 = COPY [[S_LOAD_DWORDX2_IMM]]
258+ ; CHECK-NEXT: [[V_MUL_U32_U24_e64_:%[0-9]+]]:vgpr_32 = V_MUL_U32_U24_e64 [[COPY1]](s32), 5, 0, implicit $exec
259+ ; CHECK-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
260+ ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[V_MUL_U32_U24_e64_]], %subreg.sub0, killed [[V_MOV_B32_e32_1]], %subreg.sub1
261+ ; CHECK-NEXT: [[GLOBAL_LOAD_USHORT:%[0-9]+]]:vgpr_32 = GLOBAL_LOAD_USHORT [[REG_SEQUENCE]], 3, 0, implicit $exec
262+ ; CHECK-NEXT: [[V_AND_B32_e64_:%[0-9]+]]:vgpr_32 = V_AND_B32_e64 [[GLOBAL_LOAD_USHORT]], 255, implicit $exec
263+ ; CHECK-NEXT: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 65535, implicit $exec
264+ ; CHECK-NEXT: [[V_AND_B32_sdwa:%[0-9]+]]:vgpr_32 = V_AND_B32_sdwa 0, [[V_MOV_B32_e32_2]], 0, [[GLOBAL_LOAD_USHORT]], 0, 6, 0, 6, 0, implicit $exec
265+ ; CHECK-NEXT: S_CMP_EQ_U32 [[COPY2]].sub0, 0, implicit-def $scc
266+ ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc
267+ ; CHECK-NEXT: $vcc_lo = COPY [[S_CSELECT_B32_1]]
268+ ; CHECK-NEXT: [[V_CNDMASK_B32_e32_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e32 0, killed [[V_AND_B32_sdwa]], implicit $vcc_lo, implicit $exec
269+ ; CHECK-NEXT: [[V_MOV_B32_e32_3:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 24, implicit $exec
270+ ; CHECK-NEXT: [[V_LSHRREV_B32_sdwa:%[0-9]+]]:vgpr_32 = V_LSHRREV_B32_sdwa 0, [[V_MOV_B32_e32_3]], 0, [[V_CNDMASK_B32_e32_]], 0, 1, 0, 6, 6, implicit $exec
271+ ; CHECK-NEXT: [[V_LSHRREV_B32_e64_:%[0-9]+]]:vgpr_32 = V_LSHRREV_B32_e64 16, [[V_CNDMASK_B32_e32_]], implicit $exec
272+ ; CHECK-NEXT: [[V_MOV_B32_e32_4:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 255, implicit $exec
273+ ; CHECK-NEXT: [[V_AND_B32_sdwa1:%[0-9]+]]:vgpr_32 = V_AND_B32_sdwa 0, [[V_CNDMASK_B32_e32_]], 0, [[V_MOV_B32_e32_4]], 0, 6, 0, 5, 6, implicit $exec
274+ ; CHECK-NEXT: [[V_OR_B32_sdwa:%[0-9]+]]:vgpr_32 = V_OR_B32_sdwa 0, [[V_AND_B32_sdwa1]], 0, [[V_LSHRREV_B32_sdwa]], 0, 5, 0, 6, 6, implicit $exec
275+ ; CHECK-NEXT: {{ $}}
276+ ; CHECK-NEXT: bb.2:
277+ ; CHECK-NEXT: [[PHI:%[0-9]+]]:vgpr_32 = PHI [[V_MOV_B32_e32_]], %bb.0, [[V_OR_B32_sdwa]], %bb.1
278+ ; CHECK-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64 = V_MOV_B64_PSEUDO 0, implicit $exec
279+ ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[V_MOV_B]], [[PHI]], 0, 0, implicit $exec
280+ ; CHECK-NEXT: S_ENDPGM 0
281+ bb.0:
282+ successors: %bb.1(0x40000000), %bb.2(0x40000000)
283+ liveins: $vgpr0, $sgpr8_sgpr9
284+
285+ %0:sgpr_64 = COPY $sgpr8_sgpr9
286+ %1:vgpr_32 = COPY $vgpr0
287+ %2:sreg_64_xexec = S_LOAD_DWORDX2_IMM %0, 0, 0
288+ S_BITCMP1_B32 %2.sub1, 0, implicit-def $scc
289+ %3:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc
290+ %4:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
291+ %5:sreg_32 = S_AND_B32 $exec_lo, %3, implicit-def dead $scc
292+ $vcc_lo = COPY %5
293+ S_CBRANCH_VCCNZ %bb.2, implicit $vcc
294+ S_BRANCH %bb.1
295+
296+ bb.1:
297+ successors: %bb.2(0x80000000)
298+
299+ %6:sreg_64 = COPY %2
300+ %7:vgpr_32 = V_MUL_U32_U24_e64 %1(s32), 5, 0, implicit $exec
301+ %8:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
302+ %9:vreg_64 = REG_SEQUENCE %7, %subreg.sub0, killed %8, %subreg.sub1
303+ %10:vgpr_32 = GLOBAL_LOAD_USHORT %9, 3, 0, implicit $exec
304+ %11:vgpr_32 = V_AND_B32_e64 %10, 255, implicit $exec
305+ %12:vgpr_32 = V_AND_B32_e64 65535, killed %11, implicit $exec
306+ S_CMP_EQ_U32 %6.sub0, 0, implicit-def $scc
307+ %13:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc
308+ %14:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, killed %12, %13, implicit $exec
309+ %15:vgpr_32 = V_LSHRREV_B32_e64 24, %14, implicit $exec
310+ %16:vgpr_32 = V_LSHLREV_B16_e64 8, %15, implicit $exec
311+ %17:vgpr_32 = V_LSHRREV_B32_e64 16, %14, implicit $exec
312+ %18:vgpr_32 = V_AND_B32_e64 %17, 255, implicit $exec
313+ %19:vgpr_32 = V_OR_B32_e64 killed %18, killed %16, implicit $exec
314+ %20:vgpr_32 = V_LSHLREV_B32_e64 16, killed %19, implicit $exec
315+
316+ bb.2:
317+ %21:vgpr_32 = PHI %4, %bb.0, %20, %bb.1
318+ %22:vreg_64 = V_MOV_B64_PSEUDO 0, implicit $exec
319+ GLOBAL_STORE_BYTE killed %22, %21, 0, 0, implicit $exec
320+ S_ENDPGM 0
321+ ...
0 commit comments