1 // RUN: %clang_cc1 -verify -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -emit-llvm %s -fexceptions -fcxx-exceptions -o - | FileCheck %s 2 // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -emit-pch -o %t %s 3 // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -x c++ -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s 4 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -std=c++11 -fopenmp -fnoopenmp-use-tls -fexceptions -fcxx-exceptions -debug-info-kind=line-tables-only -x c++ -emit-llvm %s -o - | FileCheck %s --check-prefix=TERM_DEBUG 5 // RUN: %clang_cc1 -verify -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -DARRAY -triple x86_64-apple-darwin10 -emit-llvm %s -o - | FileCheck -check-prefix=ARRAY %s 6 // expected-no-diagnostics 7 #ifndef ARRAY 8 #ifndef HEADER 9 #define HEADER 10 11 class TestClass { 12 public: 13 int a; 14 TestClass() : a(0) {} 15 TestClass(const TestClass &C) : a(C.a) {} 16 TestClass &operator=(const TestClass &) { return *this;} 17 ~TestClass(){}; 18 }; 19 20 // CHECK-DAG: [[TEST_CLASS_TY:%.+]] = type { i{{[0-9]+}} } 21 // CHECK-DAG: [[SST_TY:%.+]] = type { double } 22 // CHECK-DAG: [[SS_TY:%.+]] = type { i32, i8, i32* } 23 // CHECK-DAG: [[IDENT_T_TY:%.+]] = type { i32, i32, i32, i32, i8* } 24 // CHECK: [[IMPLICIT_BARRIER_SINGLE_LOC:@.+]] = private unnamed_addr constant %{{.+}} { i32 0, i32 322, i32 0, i32 0, i8* 25 26 // CHECK: define void [[FOO:@.+]]() 27 28 TestClass tc; 29 TestClass tc2[2]; 30 #pragma omp threadprivate(tc, tc2) 31 32 void foo() {} 33 34 struct SS { 35 int a; 36 int b : 4; 37 int &c; 38 SS(int &d) : a(0), b(0), c(d) { 39 #pragma omp parallel firstprivate(a, b, c) 40 #pragma omp single copyprivate(a, this->b, (this)->c) 41 [&]() { 42 ++this->a, --b, (this)->c /= 1; 43 #pragma omp parallel firstprivate(a, b, c) 44 #pragma omp single copyprivate(a, this->b, (this)->c) 45 ++(this)->a, --b, this->c /= 1; 46 }(); 47 } 48 }; 49 50 template<typename T> 51 struct SST { 52 T a; 53 SST() : a(T()) { 54 #pragma omp parallel firstprivate(a) 55 #pragma omp single copyprivate(this->a) 56 [&]() { 57 [&]() { 58 ++this->a; 59 #pragma omp parallel firstprivate(a) 60 #pragma omp single copyprivate((this)->a) 61 ++(this)->a; 62 }(); 63 }(); 64 } 65 }; 66 67 // CHECK-LABEL: @main 68 // TERM_DEBUG-LABEL: @main 69 int main() { 70 // CHECK-DAG: [[A_ADDR:%.+]] = alloca i8 71 // CHECK-DAG: [[A2_ADDR:%.+]] = alloca [2 x i8] 72 // CHECK-DAG: [[C_ADDR:%.+]] = alloca [[TEST_CLASS_TY]] 73 char a; 74 char a2[2]; 75 TestClass &c = tc; 76 SST<double> sst; 77 SS ss(c.a); 78 79 // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T_TY]]* [[DEFAULT_LOC:@.+]]) 80 // CHECK-DAG: [[DID_IT:%.+]] = alloca i32, 81 // CHECK-DAG: [[COPY_LIST:%.+]] = alloca [5 x i8*], 82 83 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 84 // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 85 // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] 86 // CHECK: [[THEN]] 87 // CHECK-NEXT: store i8 2, i8* [[A_ADDR]] 88 // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 89 // CHECK-NEXT: br label {{%?}}[[EXIT]] 90 // CHECK: [[EXIT]] 91 // CHECK-NOT: call {{.+}} @__kmpc_cancel_barrier 92 #pragma omp single nowait 93 a = 2; 94 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 95 // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 96 // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] 97 // CHECK: [[THEN]] 98 // CHECK-NEXT: store i8 2, i8* [[A_ADDR]] 99 // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 100 // CHECK-NEXT: br label {{%?}}[[EXIT]] 101 // CHECK: [[EXIT]] 102 // CHECK: call{{.*}} @__kmpc_barrier([[IDENT_T_TY]]* [[IMPLICIT_BARRIER_SINGLE_LOC]], i32 [[GTID]]) 103 #pragma omp single 104 a = 2; 105 // CHECK: store i32 0, i32* [[DID_IT]] 106 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 107 // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 108 // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] 109 // CHECK: [[THEN]] 110 // CHECK-NEXT: invoke void [[FOO]]() 111 // CHECK: to label {{%?}}[[CONT:.+]] unwind 112 // CHECK: [[CONT]] 113 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 114 // CHECK: store i32 1, i32* [[DID_IT]] 115 // CHECK-NEXT: br label {{%?}}[[EXIT]] 116 // CHECK: [[EXIT]] 117 // CHECK: [[A_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 118 // CHECK: store i8* [[A_ADDR]], i8** [[A_PTR_REF]], 119 // CHECK: [[C_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 120 // CHECK: store i8* {{.+}}, i8** [[C_PTR_REF]], 121 // CHECK: [[TC_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 122 // CHECK: [[TC_THREADPRIVATE_ADDR_VOID_PTR:%.+]] = call{{.*}} i8* @__kmpc_threadprivate_cached 123 // CHECK: [[TC_THREADPRIVATE_ADDR:%.+]] = bitcast i8* [[TC_THREADPRIVATE_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 124 // CHECK: [[TC_PTR_REF_VOID_PTR:%.+]] = bitcast [[TEST_CLASS_TY]]* [[TC_THREADPRIVATE_ADDR]] to i8* 125 // CHECK: store i8* [[TC_PTR_REF_VOID_PTR]], i8** [[TC_PTR_REF]], 126 // CHECK: [[A2_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 127 // CHECK: [[BITCAST:%.+]] = bitcast [2 x i8]* [[A2_ADDR]] to i8* 128 // CHECK: store i8* [[BITCAST]], i8** [[A2_PTR_REF]], 129 // CHECK: [[TC2_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 130 // CHECK: [[TC2_THREADPRIVATE_ADDR_VOID_PTR:%.+]] = call{{.*}} i8* @__kmpc_threadprivate_cached 131 // CHECK: [[TC2_THREADPRIVATE_ADDR:%.+]] = bitcast i8* [[TC2_THREADPRIVATE_ADDR_VOID_PTR]] to [2 x [[TEST_CLASS_TY]]]* 132 // CHECK: [[TC2_PTR_REF_VOID_PTR:%.+]] = bitcast [2 x [[TEST_CLASS_TY]]]* [[TC2_THREADPRIVATE_ADDR]] to i8* 133 // CHECK: store i8* [[TC2_PTR_REF_VOID_PTR]], i8** [[TC2_PTR_REF]], 134 // CHECK: [[COPY_LIST_VOID_PTR:%.+]] = bitcast [5 x i8*]* [[COPY_LIST]] to i8* 135 // CHECK: [[DID_IT_VAL:%.+]] = load i32, i32* [[DID_IT]], 136 // CHECK: call void @__kmpc_copyprivate([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]], i64 40, i8* [[COPY_LIST_VOID_PTR]], void (i8*, i8*)* [[COPY_FUNC:@.+]], i32 [[DID_IT_VAL]]) 137 // CHECK-NOT: call {{.+}} @__kmpc_cancel_barrier 138 #pragma omp single copyprivate(a, c, tc, a2, tc2) 139 foo(); 140 // CHECK-NOT: call i32 @__kmpc_single 141 // CHECK-NOT: call void @__kmpc_end_single 142 return a; 143 } 144 145 // CHECK: void [[COPY_FUNC]](i8*, i8*) 146 // CHECK: store i8* %0, i8** [[DST_ADDR_REF:%.+]], 147 // CHECK: store i8* %1, i8** [[SRC_ADDR_REF:%.+]], 148 // CHECK: [[DST_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_ADDR_REF]], 149 // CHECK: [[DST_ADDR:%.+]] = bitcast i8* [[DST_ADDR_VOID_PTR]] to [5 x i8*]* 150 // CHECK: [[SRC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_ADDR_REF]], 151 // CHECK: [[SRC_ADDR:%.+]] = bitcast i8* [[SRC_ADDR_VOID_PTR]] to [5 x i8*]* 152 // CHECK: [[DST_A_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 153 // CHECK: [[DST_A_ADDR:%.+]] = load i8*, i8** [[DST_A_ADDR_REF]], 154 // CHECK: [[SRC_A_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 155 // CHECK: [[SRC_A_ADDR:%.+]] = load i8*, i8** [[SRC_A_ADDR_REF]], 156 // CHECK: [[SRC_A_VAL:%.+]] = load i8, i8* [[SRC_A_ADDR]], 157 // CHECK: store i8 [[SRC_A_VAL]], i8* [[DST_A_ADDR]], 158 // CHECK: [[DST_C_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 159 // CHECK: [[DST_C_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_C_ADDR_REF]], 160 // CHECK: [[DST_C_ADDR:%.+]] = bitcast i8* [[DST_C_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 161 // CHECK: [[SRC_C_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 162 // CHECK: [[SRC_C_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_C_ADDR_REF]], 163 // CHECK: [[SRC_C_ADDR:%.+]] = bitcast i8* [[SRC_C_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 164 // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN:@.+]]([[TEST_CLASS_TY]]* [[DST_C_ADDR]], [[TEST_CLASS_TY]]* {{.*}}[[SRC_C_ADDR]]) 165 // CHECK: [[DST_TC_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 166 // CHECK: [[DST_TC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_TC_ADDR_REF]], 167 // CHECK: [[DST_TC_ADDR:%.+]] = bitcast i8* [[DST_TC_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 168 // CHECK: [[SRC_TC_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 169 // CHECK: [[SRC_TC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_TC_ADDR_REF]], 170 // CHECK: [[SRC_TC_ADDR:%.+]] = bitcast i8* [[SRC_TC_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 171 // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN]]([[TEST_CLASS_TY]]* [[DST_TC_ADDR]], [[TEST_CLASS_TY]]* {{.*}}[[SRC_TC_ADDR]]) 172 // CHECK: [[DST_A2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 173 // CHECK: [[DST_A2_ADDR:%.+]] = load i8*, i8** [[DST_A2_ADDR_REF]], 174 // CHECK: [[SRC_A2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 175 // CHECK: [[SRC_A2_ADDR:%.+]] = load i8*, i8** [[SRC_A2_ADDR_REF]], 176 // CHECK: call void @llvm.memcpy.{{.+}}(i8* [[DST_A2_ADDR]], i8* [[SRC_A2_ADDR]], i{{[0-9]+}} 2, i{{[0-9]+}} 1, i1 false) 177 // CHECK: [[DST_TC2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 178 // CHECK: [[DST_TC2_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_TC2_ADDR_REF]], 179 // CHECK: [[DST_TC2_ADDR:%.+]] = bitcast i8* [[DST_TC2_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 180 // CHECK: [[SRC_TC2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 181 // CHECK: [[SRC_TC2_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_TC2_ADDR_REF]], 182 // CHECK: [[SRC_TC2_ADDR:%.+]] = bitcast i8* [[SRC_TC2_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 183 // CHECK: br i1 184 // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN]]([[TEST_CLASS_TY]]* %{{.+}}, [[TEST_CLASS_TY]]* {{.*}}) 185 // CHECK: br i1 186 // CHECK: ret void 187 188 // CHECK-LABEL: parallel_single 189 // TERM_DEBUG-LABEL: parallel_single 190 void parallel_single() { 191 #pragma omp parallel 192 #pragma omp single 193 // TERM_DEBUG-NOT: __kmpc_global_thread_num 194 // TERM_DEBUG: call i32 @__kmpc_single({{.+}}), !dbg [[DBG_LOC_START:![0-9]+]] 195 // TERM_DEBUG: invoke void {{.*}}foo{{.*}}() 196 // TERM_DEBUG: unwind label %[[TERM_LPAD:.+]], 197 // TERM_DEBUG-NOT: __kmpc_global_thread_num 198 // TERM_DEBUG: call void @__kmpc_end_single({{.+}}), !dbg [[DBG_LOC_END:![0-9]+]] 199 // TERM_DEBUG: [[TERM_LPAD]] 200 // TERM_DEBUG: call void @__clang_call_terminate 201 // TERM_DEBUG: unreachable 202 foo(); 203 } 204 // TERM_DEBUG-DAG: [[DBG_LOC_START]] = !DILocation(line: [[@LINE-12]], 205 // TERM_DEBUG-DAG: [[DBG_LOC_END]] = !DILocation(line: [[@LINE-3]], 206 #endif 207 #else 208 // ARRAY-LABEL: array_func 209 struct St { 210 int a, b; 211 St() : a(0), b(0) {} 212 St &operator=(const St &) { return *this; }; 213 ~St() {} 214 }; 215 216 void array_func(int n, int a[n], St s[2]) { 217 // ARRAY: call void @__kmpc_copyprivate(%ident_t* @{{.+}}, i32 %{{.+}}, i64 16, i8* %{{.+}}, void (i8*, i8*)* [[CPY:@.+]], i32 %{{.+}}) 218 #pragma omp single copyprivate(a, s) 219 ; 220 } 221 // ARRAY: define internal void [[CPY]] 222 // ARRAY: store i32* %{{.+}}, i32** %{{.+}}, 223 // ARRAY: store %struct.St* %{{.+}}, %struct.St** %{{.+}}, 224 #endif 225 226 // CHECK-LABEL:@_ZN2SSC2ERi( 227 // CHECK: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 4, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SS_TY]]*, i64, i64, i64)* [[SS_MICROTASK:@.+]] to void 228 // CHECK-NEXT: ret void 229 230 // CHECK: define internal void [[SS_MICROTASK]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SS_TY]]* {{.+}}, i64 {{.+}}, i64 {{.+}}, i64 {{.+}}) 231 // Private a 232 // CHECK: alloca i64, 233 // Private b 234 // CHECK: alloca i64, 235 // Private c 236 // CHECK: alloca i64, 237 // CHECK: alloca i32*, 238 // CHECK: alloca i32*, 239 // CHECK: alloca i32*, 240 // CHECK: alloca i32*, 241 // CHECK: [[DID_IT:%.+]] = alloca i32, 242 // CHECK: bitcast i64* %{{.+}} to i32* 243 // CHECK: bitcast i64* %{{.+}} to i32* 244 // CHECK: bitcast i64* %{{.+}} to i32* 245 // CHECK: store i32 0, i32* [[DID_IT]], 246 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 247 // CHECK-NEXT: icmp ne i32 [[RES]], 0 248 // CHECK-NEXT: br i1 249 250 // CHECK: getelementptr inbounds [[CAP_TY:%.+]], [[CAP_TY]]* [[CAP:%.+]], i32 0, i32 0 251 // CHECK: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 1 252 // CHECK-NEXT: load i32*, i32** % 253 // CHECK-NEXT: store i32* % 254 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 255 // CHECK-NEXT: store i32* % 256 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 257 // CHECK-NEXT: load i32*, i32** % 258 // CHECK-NEXT: store i32* % 259 // CHECK-LABEL: invoke void @_ZZN2SSC1ERiENKUlvE_clEv( 260 // CHECK-SAME: [[CAP_TY]]* [[CAP]]) 261 262 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 263 // CHECK: store i32 1, i32* [[DID_IT]], 264 // CHECK: br label 265 266 // CHECK: call void @__kmpc_end_single(%{{.+}}* @{{.+}}, i32 %{{.+}}) 267 // CHECK: br label 268 269 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST:%.+]], i64 0, i64 0 270 // CHECK: load i32*, i32** % 271 // CHECK-NEXT: bitcast i32* % 272 // CHECK-NEXT: store i8* % 273 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 1 274 // CHECK-NEXT: bitcast i32* % 275 // CHECK-NEXT: store i8* % 276 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 2 277 // CHECK: load i32*, i32** % 278 // CHECK-NEXT: bitcast i32* % 279 // CHECK-NEXT: store i8* % 280 // CHECK-NEXT: bitcast [3 x i8*]* [[LIST]] to i8* 281 // CHECK-NEXT: load i32, i32* [[DID_IT]], 282 // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 24, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) 283 // CHECK-NEXT: ret void 284 285 // CHECK-LABEL: @_ZZN2SSC1ERiENKUlvE_clEv( 286 // CHECK: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP:%.+]], i32 0, i32 1 287 // CHECK-NEXT: load i32*, i32** % 288 // CHECK-NEXT: load i32, i32* % 289 // CHECK-NEXT: add nsw i32 %{{.+}}, 1 290 // CHECK-NEXT: store i32 % 291 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 292 // CHECK-NEXT: load i32*, i32** % 293 // CHECK-NEXT: load i32, i32* % 294 // CHECK-NEXT: add nsw i32 %{{.+}}, -1 295 // CHECK-NEXT: store i32 % 296 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 297 // CHECK-NEXT: load i32*, i32** % 298 // CHECK-NEXT: load i32, i32* % 299 // CHECK-NEXT: sdiv i32 %{{.+}}, 1 300 // CHECK-NEXT: store i32 % 301 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 1 302 // CHECK-NEXT: load i32*, i32** % 303 // CHECK-NEXT: load i32, i32* % 304 // CHECK-NEXT: bitcast i64* % 305 // CHECK-NEXT: store i32 %{{.+}}, i32* % 306 // CHECK-NEXT: load i64, i64* % 307 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 308 // CHECK-NEXT: load i32*, i32** % 309 // CHECK-NEXT: load i32, i32* % 310 // CHECK-NEXT: bitcast i64* % 311 // CHECK-NEXT: store i32 %{{.+}}, i32* % 312 // CHECK-NEXT: load i64, i64* % 313 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 314 // CHECK-NEXT: load i32*, i32** % 315 // CHECK-NEXT: load i32, i32* % 316 // CHECK-NEXT: bitcast i64* % 317 // CHECK-NEXT: store i32 %{{.+}}, i32* % 318 // CHECK-NEXT: load i64, i64* % 319 // CHECK-NEXT: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 4, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SS_TY]]*, i64, i64, i64)* [[SS_MICROTASK1:@.+]] to void 320 // CHECK-NEXT: ret void 321 322 // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) 323 // CHECK: ret void 324 325 // CHECK: define internal void [[SS_MICROTASK1]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SS_TY]]* {{.+}}, i64 {{.+}}, i64 {{.+}}, i64 {{.+}}) 326 // Private a 327 // CHECK: alloca i64, 328 // Private b 329 // CHECK: alloca i64, 330 // Private c 331 // CHECK: alloca i64, 332 // CHECK: alloca i32*, 333 // CHECK: alloca i32*, 334 // CHECK: alloca i32*, 335 // CHECK: alloca i32*, 336 // CHECK: [[DID_IT:%.+]] = alloca i32, 337 // CHECK: bitcast i64* %{{.+}} to i32* 338 // CHECK: bitcast i64* %{{.+}} to i32* 339 // CHECK: bitcast i64* %{{.+}} to i32* 340 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 341 // CHECK-NEXT: icmp ne i32 [[RES]], 0 342 // CHECK-NEXT: br i1 343 344 // CHECK-NOT: getelementptr inbounds 345 // CHECK: load i32*, i32** % 346 // CHECK-NEXT: load i32, i32* % 347 // CHECK-NEXT: add nsw i32 %{{.+}}, 1 348 // CHECK-NEXT: store i32 % 349 // CHECK-NOT: getelementptr inbounds 350 // CHECK: load i32, i32* % 351 // CHECK-NEXT: add nsw i32 %{{.+}}, -1 352 // CHECK-NEXT: store i32 % 353 // CHECK-NOT: getelementptr inbounds 354 // CHECK: load i32*, i32** % 355 // CHECK-NEXT: load i32, i32* % 356 // CHECK-NEXT: sdiv i32 %{{.+}}, 1 357 // CHECK-NEXT: store i32 % 358 // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 359 // CHECK-NEXT: store i32 1, i32* [[DID_IT]], 360 // CHECK-NEXT: br label 361 362 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST:%.+]], i64 0, i64 0 363 // CHECK: load i32*, i32** % 364 // CHECK-NEXT: bitcast i32* % 365 // CHECK-NEXT: store i8* % 366 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 1 367 // CHECK-NEXT: bitcast i32* % 368 // CHECK-NEXT: store i8* % 369 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 2 370 // CHECK: load i32*, i32** % 371 // CHECK-NEXT: bitcast i32* % 372 // CHECK-NEXT: store i8* % 373 // CHECK-NEXT: bitcast [3 x i8*]* [[LIST]] to i8* 374 // CHECK-NEXT: load i32, i32* [[DID_IT]], 375 // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 24, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) 376 // CHECK-NEXT: ret void 377 378 // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) 379 // CHECK: ret void 380 381 // CHECK-LABEL: @_ZN3SSTIdEC2Ev 382 // CHECK: getelementptr inbounds [[SST_TY]], [[SST_TY]]* %{{.+}}, i32 0, i32 0 383 // CHECK-NEXT: store double 0.000000e+00, double* % 384 // CHECK-NEXT: getelementptr inbounds [[SST_TY]], [[SST_TY]]* %{{.+}}, i32 0, i32 0 385 // CHECK-NEXT: store double* %{{.+}}, double** % 386 // CHECK-NEXT: load double*, double** % 387 // CHECK-NEXT: load double, double* % 388 // CHECK-NEXT: bitcast i64* %{{.+}} to double* 389 // CHECK-NEXT: store double %{{.+}}, double* % 390 // CHECK-NEXT: load i64, i64* % 391 // CHECK-NEXT: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 2, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SST_TY]]*, i64)* [[SST_MICROTASK:@.+]] to void 392 // CHECK-NEXT: ret void 393 394 // CHECK: define internal void [[SST_MICROTASK]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SST_TY]]* {{.+}}, i64 {{.+}}) 395 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 396 // CHECK-NEXT: icmp ne i32 [[RES]], 0 397 // CHECK-NEXT: br i1 398 399 // CHECK: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 400 // CHECK-NEXT: load double*, double** % 401 // CHECK-NEXT: store double* % 402 // CHECK-LABEL: invoke void @_ZZN3SSTIdEC1EvENKUlvE_clEv( 403 404 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 405 // CHECK-NEXT: store i32 1, i32* [[DID_IT]], 406 // CHECK-NEXT: br label 407 408 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 409 // CHECK-NEXT: br label 410 411 // CHECK: getelementptr inbounds [1 x i8*], [1 x i8*]* [[LIST:%.+]], i64 0, i64 0 412 // CHECK: load double*, double** % 413 // CHECK-NEXT: bitcast double* % 414 // CHECK-NEXT: store i8* % 415 // CHECK-NEXT: bitcast [1 x i8*]* [[LIST]] to i8* 416 // CHECK-NEXT: load i32, i32* [[DID_IT]], 417 // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 8, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) 418 // CHECK-NEXT: ret void 419 420 // CHECK-LABEL: @_ZZN3SSTIdEC1EvENKUlvE_clEv( 421 // CHECK: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 422 // CHECK-NEXT: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 423 // CHECK-NEXT: load double*, double** % 424 // CHECK-NEXT: store double* % 425 // CHECK-LABEL: call void @_ZZZN3SSTIdEC1EvENKUlvE_clEvENKUlvE_clEv( 426 // CHECK-NEXT: ret void 427 428 // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) 429 // CHECK: ret void 430 431 // CHECK-LABEL: @_ZZZN3SSTIdEC1EvENKUlvE_clEvENKUlvE_clEv( 432