1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py 2# RUN: llc -march=amdgcn -mcpu=gfx900 -verify-machineinstrs -run-pass=si-i1-copies %s -o - | FileCheck -check-prefix=GCN %s 3--- 4 5name: kernel_i1_copy_phi_with_phi_incoming_value 6tracksRegLiveness: true 7body: | 8 ; GCN-LABEL: name: kernel_i1_copy_phi_with_phi_incoming_value 9 ; GCN: bb.0: 10 ; GCN: successors: %bb.1(0x40000000), %bb.5(0x40000000) 11 ; GCN: liveins: $vgpr0, $sgpr4_sgpr5 12 ; GCN: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr4_sgpr5 13 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32(s32) = COPY $vgpr0 14 ; GCN: [[S_LOAD_DWORD_IMM:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s32), align 16, addrspace 4) 15 ; GCN: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORD_IMM]] 16 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY [[COPY1]](s32) 17 ; GCN: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_I32_e64 [[COPY1]](s32), [[S_LOAD_DWORD_IMM]], implicit $exec 18 ; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 19 ; GCN: [[SI_IF:%[0-9]+]]:sreg_64 = SI_IF killed [[V_CMP_LT_I32_e64_]], %bb.5, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 20 ; GCN: S_BRANCH %bb.1 21 ; GCN: bb.1: 22 ; GCN: successors: %bb.6(0x80000000) 23 ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 16 24 ; GCN: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[COPY3]], killed [[S_MOV_B32_]], 0, implicit $exec 25 ; GCN: [[V_CMP_GE_I32_e64_:%[0-9]+]]:sreg_64 = V_CMP_GE_I32_e64 [[V_ADD_U32_e64_]], [[COPY2]], implicit $exec 26 ; GCN: [[S_MOV_B64_1:%[0-9]+]]:sreg_64 = S_MOV_B64 0 27 ; GCN: [[COPY4:%[0-9]+]]:sreg_64 = COPY [[V_CMP_GE_I32_e64_]] 28 ; GCN: S_BRANCH %bb.6 29 ; GCN: bb.2: 30 ; GCN: successors: %bb.5(0x80000000) 31 ; GCN: [[PHI:%[0-9]+]]:sreg_64 = PHI %15, %bb.6 32 ; GCN: SI_END_CF [[PHI]], implicit-def dead $exec, implicit-def dead $scc, implicit $exec 33 ; GCN: [[S_MOV_B64_2:%[0-9]+]]:sreg_64 = S_MOV_B64 -1 34 ; GCN: [[COPY5:%[0-9]+]]:sreg_64 = COPY $exec 35 ; GCN: S_BRANCH %bb.5 36 ; GCN: bb.3: 37 ; GCN: successors: %bb.4(0x40000000), %bb.7(0x40000000) 38 ; GCN: ATOMIC_FENCE 5, 2 39 ; GCN: S_BARRIER 40 ; GCN: ATOMIC_FENCE 4, 2 41 ; GCN: [[COPY6:%[0-9]+]]:sreg_64 = COPY %18 42 ; GCN: [[SI_IF1:%[0-9]+]]:sreg_64 = SI_IF [[COPY6]], %bb.7, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 43 ; GCN: S_BRANCH %bb.4 44 ; GCN: bb.4: 45 ; GCN: successors: %bb.7(0x80000000) 46 ; GCN: S_BRANCH %bb.7 47 ; GCN: bb.5: 48 ; GCN: successors: %bb.3(0x80000000) 49 ; GCN: [[PHI1:%[0-9]+]]:sreg_64 = PHI [[S_MOV_B64_]], %bb.0, [[COPY5]], %bb.2 50 ; GCN: SI_END_CF [[SI_IF]], implicit-def dead $exec, implicit-def dead $scc, implicit $exec 51 ; GCN: S_BRANCH %bb.3 52 ; GCN: bb.6: 53 ; GCN: successors: %bb.2(0x40000000), %bb.6(0x40000000) 54 ; GCN: [[PHI2:%[0-9]+]]:sreg_64 = PHI [[S_MOV_B64_1]], %bb.1, %15, %bb.6 55 ; GCN: [[COPY7:%[0-9]+]]:sreg_64 = COPY [[COPY4]] 56 ; GCN: [[SI_IF_BREAK:%[0-9]+]]:sreg_64 = SI_IF_BREAK [[COPY7]], [[PHI2]], implicit-def dead $scc 57 ; GCN: SI_LOOP [[SI_IF_BREAK]], %bb.6, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 58 ; GCN: S_BRANCH %bb.2 59 ; GCN: bb.7: 60 ; GCN: SI_END_CF [[SI_IF1]], implicit-def dead $exec, implicit-def dead $scc, implicit $exec 61 ; GCN: S_ENDPGM 0 62 bb.0: 63 successors: %bb.1, %bb.5 64 liveins: $vgpr0, $sgpr4_sgpr5 65 66 %1:sgpr_64(p4) = COPY $sgpr4_sgpr5 67 %2:vgpr_32(s32) = COPY $vgpr0 68 %3:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %1:sgpr_64(p4), 0, 0 :: (dereferenceable invariant load (s32), align 16, addrspace 4) 69 %8:sreg_32 = COPY %3:sreg_32_xm0_xexec 70 %14:vgpr_32 = COPY %2:vgpr_32(s32) 71 %9:sreg_64 = V_CMP_LT_I32_e64 %2:vgpr_32(s32), %3:sreg_32_xm0_xexec, implicit $exec 72 %4:sreg_64 = S_MOV_B64 0 73 %17:vreg_1 = COPY %4:sreg_64, implicit $exec 74 %16:sreg_64 = SI_IF killed %9:sreg_64, %bb.5, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 75 S_BRANCH %bb.1 76 77 bb.1: 78 ; predecessors: %bb.0 79 successors: %bb.6 80 81 %10:sreg_32 = S_MOV_B32 16 82 %18:vgpr_32 = V_ADD_U32_e64 %14:vgpr_32, killed %10:sreg_32, 0, implicit $exec 83 %11:sreg_64 = V_CMP_GE_I32_e64 %18:vgpr_32, %8:sreg_32, implicit $exec 84 %12:sreg_64 = S_MOV_B64 0 85 %19:vreg_1 = COPY %11:sreg_64 86 S_BRANCH %bb.6 87 88 bb.2: 89 ; predecessors: %bb.6 90 successors: %bb.5 91 92 %20:sreg_64 = PHI %6:sreg_64, %bb.6 93 SI_END_CF %20:sreg_64, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 94 %15:sreg_64 = S_MOV_B64 -1 95 %21:vreg_1 = COPY %15:sreg_64, implicit $exec 96 S_BRANCH %bb.5 97 98 bb.3: 99 ; predecessors: %bb.5 100 successors: %bb.4, %bb.7 101 102 %22:vreg_1 = PHI %7:vreg_1, %bb.5 103 ATOMIC_FENCE 5, 2 104 S_BARRIER 105 ATOMIC_FENCE 4, 2 106 %23:sreg_64 = COPY %22:vreg_1 107 %24:sreg_64 = SI_IF %23:sreg_64, %bb.7, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 108 S_BRANCH %bb.4 109 110 bb.4: 111 ; predecessors: %bb.3 112 successors: %bb.7 113 114 S_BRANCH %bb.7 115 116 bb.5: 117 ; predecessors: %bb.0, %bb.2 118 successors: %bb.3 119 120 %7:vreg_1 = PHI %17:vreg_1, %bb.0, %21:vreg_1, %bb.2 121 SI_END_CF %16:sreg_64, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 122 S_BRANCH %bb.3 123 124 bb.6: 125 ; predecessors: %bb.1, %bb.6 126 successors: %bb.2, %bb.6 127 128 %5:sreg_64 = PHI %12:sreg_64, %bb.1, %6:sreg_64, %bb.6 129 %13:sreg_64 = COPY %19:vreg_1 130 %6:sreg_64 = SI_IF_BREAK %13:sreg_64, %5:sreg_64, implicit-def dead $scc 131 SI_LOOP %6:sreg_64, %bb.6, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 132 S_BRANCH %bb.2 133 134 bb.7: 135 ; predecessors: %bb.3, %bb.4 136 137 SI_END_CF %24:sreg_64, implicit-def dead $exec, implicit-def dead $scc, implicit $exec 138 S_ENDPGM 0 139 140... 141