1 // RUN: %clang_cc1 -verify -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -emit-llvm %s -fexceptions -fcxx-exceptions -o - | FileCheck %s 2 // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -emit-pch -o %t %s 3 // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -x c++ -triple x86_64-unknown-unknown -fexceptions -fcxx-exceptions -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s 4 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -std=c++11 -fopenmp -fnoopenmp-use-tls -fexceptions -fcxx-exceptions -debug-info-kind=line-tables-only -x c++ -emit-llvm %s -o - | FileCheck %s --check-prefix=TERM_DEBUG 5 // RUN: %clang_cc1 -verify -fopenmp -fnoopenmp-use-tls -x c++ -std=c++11 -DARRAY -triple x86_64-apple-darwin10 -emit-llvm %s -o - | FileCheck -check-prefix=ARRAY %s 6 // expected-no-diagnostics 7 // REQUIRES: x86-registered-target 8 #ifndef ARRAY 9 #ifndef HEADER 10 #define HEADER 11 12 class TestClass { 13 public: 14 int a; 15 TestClass() : a(0) {} 16 TestClass(const TestClass &C) : a(C.a) {} 17 TestClass &operator=(const TestClass &) { return *this;} 18 ~TestClass(){}; 19 }; 20 21 // CHECK-DAG: [[TEST_CLASS_TY:%.+]] = type { i{{[0-9]+}} } 22 // CHECK-DAG: [[SST_TY:%.+]] = type { double } 23 // CHECK-DAG: [[SS_TY:%.+]] = type { i32, i8, i32* } 24 // CHECK-DAG: [[IDENT_T_TY:%.+]] = type { i32, i32, i32, i32, i8* } 25 // CHECK: [[IMPLICIT_BARRIER_SINGLE_LOC:@.+]] = private unnamed_addr constant %{{.+}} { i32 0, i32 322, i32 0, i32 0, i8* 26 27 // CHECK: define void [[FOO:@.+]]() 28 29 TestClass tc; 30 TestClass tc2[2]; 31 #pragma omp threadprivate(tc, tc2) 32 33 void foo() {} 34 35 struct SS { 36 int a; 37 int b : 4; 38 int &c; 39 SS(int &d) : a(0), b(0), c(d) { 40 #pragma omp parallel firstprivate(a, b, c) 41 #pragma omp single copyprivate(a, this->b, (this)->c) 42 [&]() { 43 ++this->a, --b, (this)->c /= 1; 44 #pragma omp parallel firstprivate(a, b, c) 45 #pragma omp single copyprivate(a, this->b, (this)->c) 46 ++(this)->a, --b, this->c /= 1; 47 }(); 48 } 49 }; 50 51 template<typename T> 52 struct SST { 53 T a; 54 SST() : a(T()) { 55 #pragma omp parallel firstprivate(a) 56 #pragma omp single copyprivate(this->a) 57 [&]() { 58 [&]() { 59 ++this->a; 60 #pragma omp parallel firstprivate(a) 61 #pragma omp single copyprivate((this)->a) 62 ++(this)->a; 63 }(); 64 }(); 65 } 66 }; 67 68 // CHECK-LABEL: @main 69 // TERM_DEBUG-LABEL: @main 70 int main() { 71 // CHECK-DAG: [[A_ADDR:%.+]] = alloca i8 72 // CHECK-DAG: [[A2_ADDR:%.+]] = alloca [2 x i8] 73 // CHECK-DAG: [[C_ADDR:%.+]] = alloca [[TEST_CLASS_TY]] 74 char a; 75 char a2[2]; 76 TestClass &c = tc; 77 SST<double> sst; 78 SS ss(c.a); 79 80 // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T_TY]]* [[DEFAULT_LOC:@.+]]) 81 // CHECK-DAG: [[DID_IT:%.+]] = alloca i32, 82 // CHECK-DAG: [[COPY_LIST:%.+]] = alloca [5 x i8*], 83 84 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 85 // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 86 // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] 87 // CHECK: [[THEN]] 88 // CHECK-NEXT: store i8 2, i8* [[A_ADDR]] 89 // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 90 // CHECK-NEXT: br label {{%?}}[[EXIT]] 91 // CHECK: [[EXIT]] 92 // CHECK-NOT: call {{.+}} @__kmpc_cancel_barrier 93 #pragma omp single nowait 94 a = 2; 95 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 96 // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 97 // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] 98 // CHECK: [[THEN]] 99 // CHECK-NEXT: store i8 2, i8* [[A_ADDR]] 100 // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 101 // CHECK-NEXT: br label {{%?}}[[EXIT]] 102 // CHECK: [[EXIT]] 103 // CHECK: call{{.*}} @__kmpc_barrier([[IDENT_T_TY]]* [[IMPLICIT_BARRIER_SINGLE_LOC]], i32 [[GTID]]) 104 #pragma omp single 105 a = 2; 106 // CHECK: store i32 0, i32* [[DID_IT]] 107 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 108 // CHECK-NEXT: [[IS_SINGLE:%.+]] = icmp ne i32 [[RES]], 0 109 // CHECK-NEXT: br i1 [[IS_SINGLE]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] 110 // CHECK: [[THEN]] 111 // CHECK-NEXT: invoke void [[FOO]]() 112 // CHECK: to label {{%?}}[[CONT:.+]] unwind 113 // CHECK: [[CONT]] 114 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]]) 115 // CHECK: store i32 1, i32* [[DID_IT]] 116 // CHECK-NEXT: br label {{%?}}[[EXIT]] 117 // CHECK: [[EXIT]] 118 // CHECK: [[A_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 119 // CHECK: store i8* [[A_ADDR]], i8** [[A_PTR_REF]], 120 // CHECK: [[C_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 121 // CHECK: store i8* {{.+}}, i8** [[C_PTR_REF]], 122 // CHECK: [[TC_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 123 // CHECK: [[TC_THREADPRIVATE_ADDR_VOID_PTR:%.+]] = call{{.*}} i8* @__kmpc_threadprivate_cached 124 // CHECK: [[TC_THREADPRIVATE_ADDR:%.+]] = bitcast i8* [[TC_THREADPRIVATE_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 125 // CHECK: [[TC_PTR_REF_VOID_PTR:%.+]] = bitcast [[TEST_CLASS_TY]]* [[TC_THREADPRIVATE_ADDR]] to i8* 126 // CHECK: store i8* [[TC_PTR_REF_VOID_PTR]], i8** [[TC_PTR_REF]], 127 // CHECK: [[A2_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 128 // CHECK: [[BITCAST:%.+]] = bitcast [2 x i8]* [[A2_ADDR]] to i8* 129 // CHECK: store i8* [[BITCAST]], i8** [[A2_PTR_REF]], 130 // CHECK: [[TC2_PTR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[COPY_LIST]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 131 // CHECK: [[TC2_THREADPRIVATE_ADDR_VOID_PTR:%.+]] = call{{.*}} i8* @__kmpc_threadprivate_cached 132 // CHECK: [[TC2_THREADPRIVATE_ADDR:%.+]] = bitcast i8* [[TC2_THREADPRIVATE_ADDR_VOID_PTR]] to [2 x [[TEST_CLASS_TY]]]* 133 // CHECK: [[TC2_PTR_REF_VOID_PTR:%.+]] = bitcast [2 x [[TEST_CLASS_TY]]]* [[TC2_THREADPRIVATE_ADDR]] to i8* 134 // CHECK: store i8* [[TC2_PTR_REF_VOID_PTR]], i8** [[TC2_PTR_REF]], 135 // CHECK: [[COPY_LIST_VOID_PTR:%.+]] = bitcast [5 x i8*]* [[COPY_LIST]] to i8* 136 // CHECK: [[DID_IT_VAL:%.+]] = load i32, i32* [[DID_IT]], 137 // CHECK: call void @__kmpc_copyprivate([[IDENT_T_TY]]* [[DEFAULT_LOC]], i32 [[GTID]], i64 40, i8* [[COPY_LIST_VOID_PTR]], void (i8*, i8*)* [[COPY_FUNC:@.+]], i32 [[DID_IT_VAL]]) 138 // CHECK-NOT: call {{.+}} @__kmpc_cancel_barrier 139 #pragma omp single copyprivate(a, c, tc, a2, tc2) 140 foo(); 141 // CHECK-NOT: call i32 @__kmpc_single 142 // CHECK-NOT: call void @__kmpc_end_single 143 return a; 144 } 145 146 // CHECK: void [[COPY_FUNC]](i8*, i8*) 147 // CHECK: store i8* %0, i8** [[DST_ADDR_REF:%.+]], 148 // CHECK: store i8* %1, i8** [[SRC_ADDR_REF:%.+]], 149 // CHECK: [[DST_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_ADDR_REF]], 150 // CHECK: [[DST_ADDR:%.+]] = bitcast i8* [[DST_ADDR_VOID_PTR]] to [5 x i8*]* 151 // CHECK: [[SRC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_ADDR_REF]], 152 // CHECK: [[SRC_ADDR:%.+]] = bitcast i8* [[SRC_ADDR_VOID_PTR]] to [5 x i8*]* 153 // CHECK: [[DST_A_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 154 // CHECK: [[DST_A_ADDR:%.+]] = load i8*, i8** [[DST_A_ADDR_REF]], 155 // CHECK: [[SRC_A_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 0 156 // CHECK: [[SRC_A_ADDR:%.+]] = load i8*, i8** [[SRC_A_ADDR_REF]], 157 // CHECK: [[SRC_A_VAL:%.+]] = load i8, i8* [[SRC_A_ADDR]], 158 // CHECK: store i8 [[SRC_A_VAL]], i8* [[DST_A_ADDR]], 159 // CHECK: [[DST_C_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 160 // CHECK: [[DST_C_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_C_ADDR_REF]], 161 // CHECK: [[DST_C_ADDR:%.+]] = bitcast i8* [[DST_C_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 162 // CHECK: [[SRC_C_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 1 163 // CHECK: [[SRC_C_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_C_ADDR_REF]], 164 // CHECK: [[SRC_C_ADDR:%.+]] = bitcast i8* [[SRC_C_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 165 // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN:@.+]]([[TEST_CLASS_TY]]* [[DST_C_ADDR]], [[TEST_CLASS_TY]]* {{.*}}[[SRC_C_ADDR]]) 166 // CHECK: [[DST_TC_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 167 // CHECK: [[DST_TC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_TC_ADDR_REF]], 168 // CHECK: [[DST_TC_ADDR:%.+]] = bitcast i8* [[DST_TC_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 169 // CHECK: [[SRC_TC_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 2 170 // CHECK: [[SRC_TC_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_TC_ADDR_REF]], 171 // CHECK: [[SRC_TC_ADDR:%.+]] = bitcast i8* [[SRC_TC_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 172 // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN]]([[TEST_CLASS_TY]]* [[DST_TC_ADDR]], [[TEST_CLASS_TY]]* {{.*}}[[SRC_TC_ADDR]]) 173 // CHECK: [[DST_A2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 174 // CHECK: [[DST_A2_ADDR:%.+]] = load i8*, i8** [[DST_A2_ADDR_REF]], 175 // CHECK: [[SRC_A2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 3 176 // CHECK: [[SRC_A2_ADDR:%.+]] = load i8*, i8** [[SRC_A2_ADDR_REF]], 177 // CHECK: call void @llvm.memcpy.{{.+}}(i8* [[DST_A2_ADDR]], i8* [[SRC_A2_ADDR]], i{{[0-9]+}} 2, i{{[0-9]+}} 1, i1 false) 178 // CHECK: [[DST_TC2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[DST_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 179 // CHECK: [[DST_TC2_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[DST_TC2_ADDR_REF]], 180 // CHECK: [[DST_TC2_ADDR:%.+]] = bitcast i8* [[DST_TC2_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 181 // CHECK: [[SRC_TC2_ADDR_REF:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[SRC_ADDR]], i{{[0-9]+}} 0, i{{[0-9]+}} 4 182 // CHECK: [[SRC_TC2_ADDR_VOID_PTR:%.+]] = load i8*, i8** [[SRC_TC2_ADDR_REF]], 183 // CHECK: [[SRC_TC2_ADDR:%.+]] = bitcast i8* [[SRC_TC2_ADDR_VOID_PTR]] to [[TEST_CLASS_TY]]* 184 // CHECK: br i1 185 // CHECK: call{{.*}} [[TEST_CLASS_TY_ASSIGN]]([[TEST_CLASS_TY]]* %{{.+}}, [[TEST_CLASS_TY]]* {{.*}}) 186 // CHECK: br i1 187 // CHECK: ret void 188 189 // CHECK-LABEL: parallel_single 190 // TERM_DEBUG-LABEL: parallel_single 191 void parallel_single() { 192 #pragma omp parallel 193 #pragma omp single 194 // TERM_DEBUG-NOT: __kmpc_global_thread_num 195 // TERM_DEBUG: call i32 @__kmpc_single({{.+}}), !dbg [[DBG_LOC_START:![0-9]+]] 196 // TERM_DEBUG: invoke void {{.*}}foo{{.*}}() 197 // TERM_DEBUG: unwind label %[[TERM_LPAD:.+]], 198 // TERM_DEBUG-NOT: __kmpc_global_thread_num 199 // TERM_DEBUG: call void @__kmpc_end_single({{.+}}), !dbg [[DBG_LOC_END:![0-9]+]] 200 // TERM_DEBUG: [[TERM_LPAD]] 201 // TERM_DEBUG: call void @__clang_call_terminate 202 // TERM_DEBUG: unreachable 203 foo(); 204 } 205 // TERM_DEBUG-DAG: [[DBG_LOC_START]] = !DILocation(line: [[@LINE-12]], 206 // TERM_DEBUG-DAG: [[DBG_LOC_END]] = !DILocation(line: [[@LINE-3]], 207 #endif 208 #else 209 // ARRAY-LABEL: array_func 210 struct St { 211 int a, b; 212 St() : a(0), b(0) {} 213 St &operator=(const St &) { return *this; }; 214 ~St() {} 215 }; 216 217 void array_func(int n, int a[n], St s[2]) { 218 // ARRAY: call void @__kmpc_copyprivate(%ident_t* @{{.+}}, i32 %{{.+}}, i64 16, i8* %{{.+}}, void (i8*, i8*)* [[CPY:@.+]], i32 %{{.+}}) 219 #pragma omp single copyprivate(a, s) 220 ; 221 } 222 // ARRAY: define internal void [[CPY]] 223 // ARRAY: store i32* %{{.+}}, i32** %{{.+}}, 224 // ARRAY: store %struct.St* %{{.+}}, %struct.St** %{{.+}}, 225 #endif 226 227 // CHECK-LABEL:@_ZN2SSC2ERi( 228 // CHECK: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 4, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SS_TY]]*, i32*, i32*, i32*)* [[SS_MICROTASK:@.+]] to void 229 // CHECK-NEXT: ret void 230 231 // CHECK: define internal void [[SS_MICROTASK]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SS_TY]]* {{.+}}, i32* {{.+}}, i32* {{.+}}, i32* {{.+}}) 232 // Private a 233 // CHECK: alloca i32, 234 // CHECK: alloca i32*, 235 // Private b 236 // CHECK: alloca i32, 237 // Private c 238 // CHECK: alloca i32, 239 // CHECK: alloca i32*, 240 // CHECK: [[DID_IT:%.+]] = alloca i32, 241 // CHECK: store i32 0, i32* [[DID_IT]], 242 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 243 // CHECK-NEXT: icmp ne i32 [[RES]], 0 244 // CHECK-NEXT: br i1 245 246 // CHECK: getelementptr inbounds [[CAP_TY:%.+]], [[CAP_TY]]* [[CAP:%.+]], i32 0, i32 0 247 // CHECK: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 1 248 // CHECK-NEXT: load i32*, i32** % 249 // CHECK-NEXT: store i32* % 250 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 251 // CHECK-NEXT: store i32* % 252 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 253 // CHECK-NEXT: load i32*, i32** % 254 // CHECK-NEXT: store i32* % 255 // CHECK-LABEL: invoke void @_ZZN2SSC1ERiENKUlvE_clEv( 256 // CHECK-SAME: [[CAP_TY]]* [[CAP]]) 257 258 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 259 // CHECK: store i32 1, i32* [[DID_IT]], 260 // CHECK: br label 261 262 // CHECK: call void @__kmpc_end_single(%{{.+}}* @{{.+}}, i32 %{{.+}}) 263 // CHECK: br label 264 265 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST:%.+]], i64 0, i64 0 266 // CHECK: load i32*, i32** % 267 // CHECK-NEXT: bitcast i32* % 268 // CHECK-NEXT: store i8* % 269 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 1 270 // CHECK-NEXT: bitcast i32* % 271 // CHECK-NEXT: store i8* % 272 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 2 273 // CHECK: load i32*, i32** % 274 // CHECK-NEXT: bitcast i32* % 275 // CHECK-NEXT: store i8* % 276 // CHECK-NEXT: bitcast [3 x i8*]* [[LIST]] to i8* 277 // CHECK-NEXT: load i32, i32* [[DID_IT]], 278 // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 24, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) 279 // CHECK-NEXT: ret void 280 281 // CHECK-LABEL: @_ZZN2SSC1ERiENKUlvE_clEv( 282 // CHECK: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP:%.+]], i32 0, i32 1 283 // CHECK-NEXT: load i32*, i32** % 284 // CHECK-NEXT: load i32, i32* % 285 // CHECK-NEXT: add nsw i32 %{{.+}}, 1 286 // CHECK-NEXT: store i32 % 287 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 288 // CHECK-NEXT: load i32*, i32** % 289 // CHECK-NEXT: load i32, i32* % 290 // CHECK-NEXT: add nsw i32 %{{.+}}, -1 291 // CHECK-NEXT: store i32 % 292 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 293 // CHECK-NEXT: load i32*, i32** % 294 // CHECK-NEXT: load i32, i32* % 295 // CHECK-NEXT: sdiv i32 %{{.+}}, 1 296 // CHECK-NEXT: store i32 % 297 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 1 298 // CHECK-NEXT: load i32*, i32** % 299 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 2 300 // CHECK-NEXT: load i32*, i32** % 301 // CHECK-NEXT: getelementptr inbounds [[CAP_TY]], [[CAP_TY]]* [[CAP]], i32 0, i32 3 302 // CHECK-NEXT: load i32*, i32** % 303 // CHECK-NEXT: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 4, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SS_TY]]*, i32*, i32*, i32*)* [[SS_MICROTASK1:@.+]] to void 304 // CHECK-NEXT: ret void 305 306 // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) 307 // CHECK: ret void 308 309 // CHECK: define internal void [[SS_MICROTASK1]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SS_TY]]* {{.+}}, i32* {{.+}}, i32* {{.+}}, i32* {{.+}}) 310 // Private a 311 // CHECK: alloca i32, 312 // CHECK: alloca i32*, 313 // Private b 314 // CHECK: alloca i32, 315 // Private c 316 // CHECK: alloca i32, 317 // CHECK: alloca i32*, 318 // CHECK: [[DID_IT:%.+]] = alloca i32, 319 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 320 // CHECK-NEXT: icmp ne i32 [[RES]], 0 321 // CHECK-NEXT: br i1 322 323 // CHECK-NOT: getelementptr inbounds 324 // CHECK: load i32*, i32** % 325 // CHECK-NEXT: load i32, i32* % 326 // CHECK-NEXT: add nsw i32 %{{.+}}, 1 327 // CHECK-NEXT: store i32 % 328 // CHECK-NOT: getelementptr inbounds 329 // CHECK: load i32, i32* % 330 // CHECK-NEXT: add nsw i32 %{{.+}}, -1 331 // CHECK-NEXT: store i32 % 332 // CHECK-NOT: getelementptr inbounds 333 // CHECK: load i32*, i32** % 334 // CHECK-NEXT: load i32, i32* % 335 // CHECK-NEXT: sdiv i32 %{{.+}}, 1 336 // CHECK-NEXT: store i32 % 337 // CHECK-NEXT: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 338 // CHECK-NEXT: store i32 1, i32* [[DID_IT]], 339 // CHECK-NEXT: br label 340 341 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST:%.+]], i64 0, i64 0 342 // CHECK: load i32*, i32** % 343 // CHECK-NEXT: bitcast i32* % 344 // CHECK-NEXT: store i8* % 345 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 1 346 // CHECK-NEXT: bitcast i32* % 347 // CHECK-NEXT: store i8* % 348 // CHECK: getelementptr inbounds [3 x i8*], [3 x i8*]* [[LIST]], i64 0, i64 2 349 // CHECK: load i32*, i32** % 350 // CHECK-NEXT: bitcast i32* % 351 // CHECK-NEXT: store i8* % 352 // CHECK-NEXT: bitcast [3 x i8*]* [[LIST]] to i8* 353 // CHECK-NEXT: load i32, i32* [[DID_IT]], 354 // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 24, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) 355 // CHECK-NEXT: ret void 356 357 // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) 358 // CHECK: ret void 359 360 // CHECK-LABEL: @_ZN3SSTIdEC2Ev 361 // CHECK: getelementptr inbounds [[SST_TY]], [[SST_TY]]* %{{.+}}, i32 0, i32 0 362 // CHECK-NEXT: store double 0.000000e+00, double* % 363 // CHECK-NEXT: getelementptr inbounds [[SST_TY]], [[SST_TY]]* %{{.+}}, i32 0, i32 0 364 // CHECK-NEXT: store double* % 365 // CHECK-NEXT: load double*, double** % 366 // CHECK-NEXT: call void ([[IDENT_T_TY]]*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call([[IDENT_T_TY]]* @{{.+}}, i32 2, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, [[SST_TY]]*, double*)* [[SST_MICROTASK:@.+]] to void 367 // CHECK-NEXT: ret void 368 369 // CHECK: define internal void [[SST_MICROTASK]](i32* {{[^,]+}}, i32* {{[^,]+}}, [[SST_TY]]* {{.+}}, double* {{.+}}) 370 // CHECK: [[RES:%.+]] = call i32 @__kmpc_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 371 // CHECK-NEXT: icmp ne i32 [[RES]], 0 372 // CHECK-NEXT: br i1 373 374 // CHECK: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 375 // CHECK-NEXT: load double*, double** % 376 // CHECK-NEXT: store double* % 377 // CHECK-LABEL: invoke void @_ZZN3SSTIdEC1EvENKUlvE_clEv( 378 379 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 380 // CHECK-NEXT: store i32 1, i32* [[DID_IT]], 381 // CHECK-NEXT: br label 382 383 // CHECK: call void @__kmpc_end_single([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}) 384 // CHECK-NEXT: br label 385 386 // CHECK: getelementptr inbounds [1 x i8*], [1 x i8*]* [[LIST:%.+]], i64 0, i64 0 387 // CHECK: load double*, double** % 388 // CHECK-NEXT: bitcast double* % 389 // CHECK-NEXT: store i8* % 390 // CHECK-NEXT: bitcast [1 x i8*]* [[LIST]] to i8* 391 // CHECK-NEXT: load i32, i32* [[DID_IT]], 392 // CHECK-NEXT: call void @__kmpc_copyprivate([[IDENT_T_TY]]* @{{.+}}, i32 %{{.+}}, i64 8, i8* %{{.+}}, void (i8*, i8*)* [[COPY_FUNC:@[^,]+]], i32 %{{.+}}) 393 // CHECK-NEXT: ret void 394 395 // CHECK-LABEL: @_ZZN3SSTIdEC1EvENKUlvE_clEv( 396 // CHECK: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 397 // CHECK-NEXT: getelementptr inbounds %{{.+}}, %{{.+}}* %{{.+}}, i32 0, i32 1 398 // CHECK-NEXT: load double*, double** % 399 // CHECK-NEXT: store double* % 400 // CHECK-LABEL: call void @_ZZZN3SSTIdEC1EvENKUlvE_clEvENKUlvE_clEv( 401 // CHECK-NEXT: ret void 402 403 // CHECK: define internal void [[COPY_FUNC]](i8*, i8*) 404 // CHECK: ret void 405 406 // CHECK-LABEL: @_ZZZN3SSTIdEC1EvENKUlvE_clEvENKUlvE_clEv( 407