1 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -x c++ -emit-llvm %s -o - | FileCheck %s
2 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s
3 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s
4 
5 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp-simd -x c++ -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s
6 // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s
7 // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s
8 // SIMD-ONLY0-NOT: {{__kmpc|__tgt}}
9 // expected-no-diagnostics
10 #ifndef HEADER
11 #define HEADER
12 
13 // CHECK-DAG: [[IDENT_T:%.+]] = type { i32, i32, i32, i32, i8* }
14 // CHECK-DAG: [[STRUCT_SHAREDS:%.+]] = type { i8*, [2 x [[STRUCT_S:%.+]]]* }
15 // CHECK-DAG: [[STRUCT_SHAREDS1:%.+]] = type { [2 x [[STRUCT_S:%.+]]]* }
16 // CHECK-DAG: [[KMP_TASK_T:%.+]] = type { i8*, i32 (i32, i8*)*, i32, %union{{.+}}, %union{{.+}} }
17 // CHECK-DAG: [[KMP_DEPEND_INFO:%.+]] = type { i64, i64, i8 }
18 struct S {
19   int a;
20   S() : a(0) {}
21   S(const S &s) : a(s.a) {}
22   ~S() {}
23 };
24 int a;
25 // CHECK-LABEL: @main
26 int main() {
27 // CHECK: [[B:%.+]] = alloca i8
28 // CHECK: [[S:%.+]] = alloca [2 x [[STRUCT_S]]]
29   char b;
30   S s[2];
31   int arr[10][a];
32 // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T]]* @{{.+}})
33 // CHECK: [[B_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES:%.+]], i32 0, i32 0
34 // CHECK: store i8* [[B]], i8** [[B_REF]]
35 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES]], i32 0, i32 1
36 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]]
37 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 33, i64 40, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY1:@.+]] to i32 (i32, i8*)*))
38 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0
39 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]]
40 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS]]* [[CAPTURES]] to i8*
41 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 16, i1 false)
42 // CHECK: [[PRIORITY_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR]], i32 0, i32 4
43 // CHECK: [[PRIORITY:%.+]] = bitcast %union{{.+}}* [[PRIORITY_REF_PTR]] to i32*
44 // CHECK: store i32 {{.+}}, i32* [[PRIORITY]]
45 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
46 #pragma omp task shared(a, b, s) priority(b)
47   {
48     a = 15;
49     b = a;
50     s[0].a = 10;
51   }
52 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS1]], [[STRUCT_SHAREDS1]]* [[CAPTURES:%.+]], i32 0, i32 0
53 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]]
54 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{[^,]+}}, i32 [[GTID]], i32 1, i64 40, i64 8,
55 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0
56 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]]
57 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS1]]* [[CAPTURES]] to i8*
58 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 8, i1 false)
59 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES:%.*]], i64 0, i64 0
60 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
61 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* [[T0]]
62 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
63 // CHECK: store i64 4, i64* [[T0]]
64 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
65 // CHECK: store i8 1, i8* [[T0]]
66 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 1
67 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
68 // CHECK: ptrtoint i8* [[B]] to i64
69 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]]
70 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
71 // CHECK: store i64 1, i64* [[T0]]
72 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
73 // CHECK: store i8 1, i8* [[T0]]
74 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 2
75 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
76 // CHECK: ptrtoint [2 x [[STRUCT_S]]]* [[S]] to i64
77 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]]
78 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
79 // CHECK: store i64 8, i64* [[T0]]
80 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
81 // CHECK: store i8 1, i8* [[T0]]
82 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL:%.+]]
83 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
84 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
85 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
86 // CHECK: [[END1:%.+]] = getelementptr i32, i32* [[END]], i32 1
87 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START]] to i64
88 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END1]] to i64
89 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
90 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 3
91 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
92 // CHECK: [[T1:%.*]] = ptrtoint i32* [[START]] to i64
93 // CHECK: store i64 [[T1]], i64* [[T0]]
94 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
95 // CHECK: store i64 [[SIZEOF]], i64* [[T0]]
96 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
97 // CHECK: store i8 1, i8* [[T0]]
98 // CHECK: [[DEPS:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i32 0, i32 0
99 // CHECK: bitcast [[KMP_DEPEND_INFO]]* [[DEPS]] to i8*
100 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 4, i8* %{{[^,]+}}, i32 0, i8* null)
101 #pragma omp task shared(a, s) depend(in : a, b, s, arr[:])
102   {
103     a = 15;
104     s[1].a = 10;
105   }
106 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY2:@.+]] to i32 (i32, i8*)*))
107 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
108 #pragma omp task untied
109   {
110 #pragma omp critical
111     a = 1;
112   }
113 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1,
114 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0
115 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0
116 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
117 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
118 // CHECK: store i64 %{{[^,]+}}, i64*
119 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
120 // CHECK: store i64 4, i64*
121 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
122 // CHECK: store i8 3, i8*
123 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
124 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
125 // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]]
126 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
127 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]]
128 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
129 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
130 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
131 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
132 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
133 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
134 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
135 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
136 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
137 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1
138 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
139 // CHECK: ptrtoint i32* [[START1]] to i64
140 // CHECK: store i64 %{{[^,]+}}, i64*
141 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
142 // CHECK: store i64 [[SIZEOF]], i64*
143 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
144 // CHECK: store i8 3, i8*
145 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i32 0, i32 0
146 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
147 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null)
148 #pragma omp task untied depend(out : s[0], arr[4:][b])
149   {
150     a = 1;
151   }
152 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1,
153 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0
154 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
155 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64*
156 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
157 // CHECK: store i64 4, i64*
158 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
159 // CHECK: store i8 3, i8*
160 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 1
161 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1
162 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
163 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
164 // CHECK: store i64 %{{[^,]+}}, i64*
165 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
166 // CHECK: store i64 4, i64*
167 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
168 // CHECK: store i8 3, i8*
169 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL]]
170 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
171 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 3
172 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}},
173 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64
174 // CHECK: [[IDX2:%.+]] = sub nsw i64 [[NEW_A_VAL_I64]], 1
175 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}},
176 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64
177 // CHECK: [[SUB:%.+]] = add nsw i64 -1, [[NEW_A_VAL_I64]]
178 // CHECK: [[IDX1:%.+]] = mul nsw i64 [[SUB]], [[A_VAL]]
179 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
180 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
181 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
182 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
183 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
184 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
185 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 2
186 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
187 // CHECK: ptrtoint i32* [[START1]] to i64
188 // CHECK: store i64 %{{[^,]+}}, i64*
189 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
190 // CHECK: store i64 [[SIZEOF]], i64*
191 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
192 // CHECK: store i8 3, i8*
193 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i32 0, i32 0
194 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
195 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 3, i8* %{{[^,]+}}, i32 0, i8* null)
196 #pragma omp task final(true) depend(inout: a, s[1], arr[:a][3:])
197   {
198     a = 2;
199   }
200 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY3:@.+]] to i32 (i32, i8*)*))
201 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
202 #pragma omp task final(true)
203   {
204     a = 2;
205   }
206 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 1, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY4:@.+]] to i32 (i32, i8*)*))
207 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
208   const bool flag = false;
209 #pragma omp task final(flag)
210   {
211     a = 3;
212   }
213 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
214 // CHECK: [[CMP:%.+]] = icmp ne i8 [[B_VAL]], 0
215 // CHECK: [[FINAL:%.+]] = select i1 [[CMP]], i32 2, i32 0
216 // CHECK: [[FLAGS:%.+]] = or i32 [[FINAL]], 1
217 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 [[FLAGS]], i64 40, i64 8, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY5:@.+]] to i32 (i32, i8*)*))
218 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
219   int c __attribute__((aligned(128)));
220 #pragma omp task final(b) shared(c)
221   {
222     a = 4;
223     c = 5;
224   }
225 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY6:@.+]] to i32 (i32, i8*)*))
226 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
227 #pragma omp task untied
228   {
229     S s1;
230 #pragma omp task
231     a = 4;
232 #pragma omp taskyield
233     s1 = S();
234 #pragma omp taskwait
235   }
236   return a;
237 }
238 // CHECK: define internal i32 [[TASK_ENTRY1]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
239 // CHECK: store i32 15, i32* [[A_PTR:@.+]]
240 // CHECK: [[A_VAL:%.+]] = load i32, i32* [[A_PTR]]
241 // CHECK: [[A_VAL_I8:%.+]] = trunc i32 [[A_VAL]] to i8
242 // CHECK: store i8 [[A_VAL_I8]], i8* %{{.+}}
243 // CHECK: store i32 10, i32* %{{.+}}
244 
245 // CHECK: define internal i32 [[TASK_ENTRY2]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
246 // CHECK: store i32 1, i32* [[A_PTR]]
247 
248 // CHECK: define internal i32 [[TASK_ENTRY3]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
249 // CHECK: store i32 2, i32* [[A_PTR]]
250 
251 // CHECK: define internal i32 [[TASK_ENTRY4]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
252 // CHECK: store i32 3, i32* [[A_PTR]]
253 
254 // CHECK: define internal i32 [[TASK_ENTRY5]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
255 // CHECK: store i32 4, i32* [[A_PTR]]
256 // CHECK: store i32 5, i32* [[C_PTR:%.+]], align 128
257 
258 // CHECK: define internal i32
259 // CHECK: store i32 4, i32* [[A_PTR]]
260 
261 // CHECK: define internal i32 [[TASK_ENTRY6]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
262 // CHECK: switch i32 %{{.+}}, label
263 // CHECK: load i32*, i32** %
264 // CHECK: store i32 1, i32* %
265 // CHECK: call i32 @__kmpc_omp_task(%
266 
267 // CHECK: call i8* @__kmpc_omp_task_alloc(
268 // CHECK: call i32 @__kmpc_omp_task(%
269 // CHECK: load i32*, i32** %
270 // CHECK: store i32 2, i32* %
271 // CHECK: call i32 @__kmpc_omp_task(%
272 
273 // CHECK: call i32 @__kmpc_omp_taskyield(%
274 // CHECK: load i32*, i32** %
275 // CHECK: store i32 3, i32* %
276 // CHECK: call i32 @__kmpc_omp_task(%
277 
278 // CHECK: call i32 @__kmpc_omp_taskwait(%
279 // CHECK: load i32*, i32** %
280 // CHECK: store i32 4, i32* %
281 // CHECK: call i32 @__kmpc_omp_task(%
282 
283 struct S1 {
284   int a;
285   S1() { taskinit(); }
286   void taskinit() {
287 #pragma omp task
288     a = 0;
289   }
290 } s1;
291 
292 // CHECK-LABEL: taskinit
293 // CHECK: call i8* @__kmpc_omp_task_alloc(
294 
295 #endif
296 
297