1; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=GCN %s 2 3; GCN-LABEL: {{^}}negated_cond: 4; GCN: .LBB0_1: 5; GCN: v_cmp_eq_u32_e64 [[CC:[^,]+]], 6; GCN: .LBB0_3: 7; GCN-NOT: v_cndmask_b32 8; GCN-NOT: v_cmp 9; GCN: s_andn2_b64 vcc, exec, [[CC]] 10; GCN: s_cbranch_vccnz .LBB0_2 11define amdgpu_kernel void @negated_cond(i32 addrspace(1)* %arg1) { 12bb: 13 br label %bb1 14 15bb1: 16 %tmp1 = load i32, i32 addrspace(1)* %arg1 17 %tmp2 = icmp eq i32 %tmp1, 0 18 br label %bb2 19 20bb2: 21 %tmp3 = phi i32 [ 0, %bb1 ], [ %tmp6, %bb4 ] 22 %tmp4 = shl i32 %tmp3, 5 23 br i1 %tmp2, label %bb3, label %bb4 24 25bb3: 26 %tmp5 = add i32 %tmp4, 1 27 br label %bb4 28 29bb4: 30 %tmp6 = phi i32 [ %tmp5, %bb3 ], [ %tmp4, %bb2 ] 31 %gep = getelementptr inbounds i32, i32 addrspace(1)* %arg1, i32 %tmp6 32 store i32 0, i32 addrspace(1)* %gep 33 %tmp7 = icmp eq i32 %tmp6, 32 34 br i1 %tmp7, label %bb1, label %bb2 35} 36 37; GCN-LABEL: {{^}}negated_cond_dominated_blocks: 38; GCN: s_cmp_lg_u32 39; GCN: s_cselect_b64 [[CC1:[^,]+]], -1, 0 40; GCN: s_branch [[BB1:.LBB[0-9]+_[0-9]+]] 41; GCN: [[BB0:.LBB[0-9]+_[0-9]+]] 42; GCN-NOT: v_cndmask_b32 43; GCN-NOT: v_cmp 44; GCN: [[BB1]]: 45; GCN: s_mov_b64 vcc, [[CC1]] 46; GCN: s_cbranch_vccz [[BB2:.LBB[0-9]+_[0-9]+]] 47; GCN: s_mov_b64 vcc, exec 48; GCN: s_cbranch_execnz [[BB0]] 49; GCN: [[BB2]]: 50define amdgpu_kernel void @negated_cond_dominated_blocks(i32 addrspace(1)* %arg1) { 51bb: 52 br label %bb2 53 54bb2: 55 %tmp1 = load i32, i32 addrspace(1)* %arg1 56 %tmp2 = icmp eq i32 %tmp1, 0 57 br label %bb4 58 59bb3: 60 ret void 61 62bb4: 63 %tmp3 = phi i32 [ 0, %bb2 ], [ %tmp7, %bb7 ] 64 %tmp4 = shl i32 %tmp3, 5 65 br i1 %tmp2, label %bb5, label %bb6 66 67bb5: 68 %tmp5 = add i32 %tmp4, 1 69 br label %bb7 70 71bb6: 72 %tmp6 = add i32 %tmp3, 1 73 br label %bb7 74 75bb7: 76 %tmp7 = phi i32 [ %tmp5, %bb5 ], [ %tmp6, %bb6 ] 77 %gep = getelementptr inbounds i32, i32 addrspace(1)* %arg1, i32 %tmp7 78 store i32 0, i32 addrspace(1)* %gep 79 %tmp8 = icmp eq i32 %tmp7, 32 80 br i1 %tmp8, label %bb3, label %bb4 81} 82