1 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -x c++ -emit-llvm %s -o - | FileCheck %s --check-prefix CHECK --check-prefix UNTIEDRT
2 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s
3 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s
4 //
5 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -fopenmp-enable-irbuilder -x c++ -emit-llvm %s -o - | FileCheck %s
6 // RUN: %clang_cc1 -fopenmp -fopenmp-enable-irbuilder -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s
7 // RUN: %clang_cc1 -fopenmp -fopenmp-enable-irbuilder -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s
8 
9 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp-simd -x c++ -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s
10 // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s
11 // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s
12 // SIMD-ONLY0-NOT: {{__kmpc|__tgt}}
13 // expected-no-diagnostics
14 #ifndef HEADER
15 #define HEADER
16 
17 // CHECK-DAG: [[IDENT_T:%.+]] = type { i32, i32, i32, i32, i8* }
18 // CHECK-DAG: [[STRUCT_SHAREDS:%.+]] = type { i8*, [2 x [[STRUCT_S:%.+]]]* }
19 // CHECK-DAG: [[STRUCT_SHAREDS1:%.+]] = type { [2 x [[STRUCT_S:%.+]]]* }
20 // CHECK-DAG: [[KMP_TASK_T:%.+]] = type { i8*, i32 (i32, i8*)*, i32, %union{{.+}}, %union{{.+}} }
21 // CHECK-DAG: [[KMP_DEPEND_INFO:%.+]] = type { i64, i64, i8 }
22 struct S {
23   int a;
24   S() : a(0) {}
25   S(const S &s) : a(s.a) {}
26   ~S() {}
27 };
28 int a;
29 // CHECK-LABEL: @main
30 int main() {
31 // CHECK: [[B:%.+]] = alloca i8
32 // CHECK: [[S:%.+]] = alloca [2 x [[STRUCT_S]]]
33   char b;
34   S s[2];
35   int arr[10][a];
36 // CHECK: [[B_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES:%.+]], i32 0, i32 0
37 // CHECK: store i8* [[B]], i8** [[B_REF]]
38 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES]], i32 0, i32 1
39 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]]
40 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 33, i64 40, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY1:@.+]] to i32 (i32, i8*)*))
41 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0
42 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]]
43 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS]]* [[CAPTURES]] to i8*
44 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 16, i1 false)
45 // CHECK: [[PRIORITY_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR]], i32 0, i32 4
46 // CHECK: [[PRIORITY:%.+]] = bitcast %union{{.+}}* [[PRIORITY_REF_PTR]] to i32*
47 // CHECK: store i32 {{.+}}, i32* [[PRIORITY]]
48 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]])
49 #pragma omp task shared(a, b, s) priority(b)
50   {
51     a = 15;
52     b = a;
53     s[0].a = 10;
54   }
55 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS1]], [[STRUCT_SHAREDS1]]* [[CAPTURES:%.+]], i32 0, i32 0
56 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]]
57 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{[^,]+}}, i32 {{%.*}}, i32 1, i64 40, i64 8,
58 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0
59 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]]
60 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS1]]* [[CAPTURES]] to i8*
61 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 8, i1 false)
62 // CHECK: [[DEP_BASE:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES:%.*]], i64 0, i64 0
63 // CHECK: [[DEP:%.+]] = getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP_BASE]], i64 0
64 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
65 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* [[T0]]
66 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
67 // CHECK: store i64 4, i64* [[T0]]
68 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
69 // CHECK: store i8 1, i8* [[T0]]
70 // CHECK: [[DEP:%.*]] = getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP_BASE]], i64 1
71 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
72 // CHECK: ptrtoint i8* [[B]] to i64
73 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]]
74 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
75 // CHECK: store i64 1, i64* [[T0]]
76 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
77 // CHECK: store i8 1, i8* [[T0]]
78 // CHECK: [[DEP:%.*]] = getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP_BASE]], i64 2
79 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
80 // CHECK: ptrtoint [2 x [[STRUCT_S]]]* [[S]] to i64
81 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]]
82 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
83 // CHECK: store i64 8, i64* [[T0]]
84 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
85 // CHECK: store i8 1, i8* [[T0]]
86 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL:%.+]]
87 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
88 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
89 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
90 // CHECK: [[END1:%.+]] = getelementptr i32, i32* [[END]], i32 1
91 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START]] to i64
92 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END1]] to i64
93 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
94 // CHECK: [[DEP:%.*]] = getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP_BASE]], i64 3
95 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
96 // CHECK: [[T1:%.*]] = ptrtoint i32* [[START]] to i64
97 // CHECK: store i64 [[T1]], i64* [[T0]]
98 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
99 // CHECK: store i64 [[SIZEOF]], i64* [[T0]]
100 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
101 // CHECK: store i8 1, i8* [[T0]]
102 // CHECK: bitcast [[KMP_DEPEND_INFO]]* [[DEP_BASE]] to i8*
103 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]], i32 4, i8* %{{[^,]+}}, i32 0, i8* null)
104 #pragma omp task shared(a, s) depend(in : a, b, s, arr[:])
105   {
106     a = 15;
107     s[1].a = 10;
108   }
109 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY2:@.+]] to i32 (i32, i8*)*))
110 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]])
111 #pragma omp task untied
112   {
113 #pragma omp critical
114     a = 1;
115   }
116 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 0, i64 40, i64 1,
117 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0
118 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
119 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
120 // CHECK: store i64 %{{[^,]+}}, i64*
121 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
122 // CHECK: store i64 4, i64*
123 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
124 // CHECK: store i8 3, i8*
125 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
126 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
127 // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]]
128 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
129 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]]
130 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
131 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
132 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
133 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
134 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
135 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
136 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
137 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
138 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
139 // CHECK: getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i64 1
140 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
141 // CHECK: ptrtoint i32* [[START1]] to i64
142 // CHECK: store i64 %{{[^,]+}}, i64*
143 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
144 // CHECK: store i64 [[SIZEOF]], i64*
145 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
146 // CHECK: store i8 3, i8*
147 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
148 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null)
149 #pragma omp task untied depend(out : s[0], arr[4:][b])
150   {
151     a = 1;
152   }
153 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 0, i64 40, i64 1,
154 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0
155 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
156 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
157 // CHECK: store i64 %{{[^,]+}}, i64*
158 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
159 // CHECK: store i64 4, i64*
160 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
161 // CHECK: store i8 4, i8*
162 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
163 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
164 // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]]
165 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
166 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]]
167 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
168 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
169 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
170 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
171 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
172 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
173 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
174 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
175 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
176 // CHECK: getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i64 1
177 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
178 // CHECK: ptrtoint i32* [[START1]] to i64
179 // CHECK: store i64 %{{[^,]+}}, i64*
180 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
181 // CHECK: store i64 [[SIZEOF]], i64*
182 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
183 // CHECK: store i8 4, i8*
184 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
185 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null)
186 #pragma omp task untied depend(mutexinoutset: s[0], arr[4:][b])
187   {
188     a = 1;
189   }
190 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 3, i64 40, i64 1,
191 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
192 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64*
193 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
194 // CHECK: store i64 4, i64*
195 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
196 // CHECK: store i8 3, i8*
197 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 1
198 // CHECK: getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i64 1
199 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
200 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
201 // CHECK: store i64 %{{[^,]+}}, i64*
202 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
203 // CHECK: store i64 4, i64*
204 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
205 // CHECK: store i8 3, i8*
206 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL]]
207 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
208 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 3
209 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}},
210 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64
211 // CHECK: [[IDX2:%.+]] = sub nsw i64 [[NEW_A_VAL_I64]], 1
212 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}},
213 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64
214 // CHECK: [[SUB:%.+]] = add nsw i64 -1, [[NEW_A_VAL_I64]]
215 // CHECK: [[IDX1:%.+]] = mul nsw i64 [[SUB]], [[A_VAL]]
216 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
217 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
218 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
219 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
220 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
221 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
222 // CHECK: getelementptr [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i64 2
223 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
224 // CHECK: ptrtoint i32* [[START1]] to i64
225 // CHECK: store i64 %{{[^,]+}}, i64*
226 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
227 // CHECK: store i64 [[SIZEOF]], i64*
228 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
229 // CHECK: store i8 3, i8*
230 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
231 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]], i32 3, i8* %{{[^,]+}}, i32 0, i8* null)
232 #pragma omp task final(true) depend(inout: a, s[1], arr[:a][3:])
233   {
234     a = 2;
235   }
236 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 3, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY3:@.+]] to i32 (i32, i8*)*))
237 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]])
238 #pragma omp task final(true)
239   {
240     a = 2;
241   }
242   // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 1, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY4:@.+]] to i32 (i32, i8*)*))
243   // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]])
244   const bool flag = false;
245 #pragma omp task final(flag)
246   {
247     a = 3;
248   }
249   // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
250   // CHECK: [[CMP:%.+]] = icmp ne i8 [[B_VAL]], 0
251   // CHECK: [[FINAL:%.+]] = select i1 [[CMP]], i32 2, i32 0
252   // CHECK: [[FLAGS:%.+]] = or i32 [[FINAL]], 1
253   // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 [[FLAGS]], i64 40, i64 8, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY5:@.+]] to i32 (i32, i8*)*))
254   // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]])
255   int c __attribute__((aligned(128)));
256 #pragma omp task final(b) shared(c)
257   {
258     a = 4;
259     c = 5;
260   }
261 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i32 0, i64 48, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY6:@.+]] to i32 (i32, i8*)*))
262 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 {{%.*}}, i8* [[ORIG_TASK_PTR]])
263 #pragma omp task untied
264   {
265     S s1;
266 #pragma omp task
267     a = 4;
268 #pragma omp taskyield
269     s1 = S();
270 #pragma omp taskwait
271   }
272   return a;
273 }
274 // CHECK: define internal i32 [[TASK_ENTRY1]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1)
275 // CHECK: store i32 15, i32* [[A_PTR:@.+]]
276 // CHECK: [[A_VAL:%.+]] = load i32, i32* [[A_PTR]]
277 // CHECK: [[A_VAL_I8:%.+]] = trunc i32 [[A_VAL]] to i8
278 // CHECK: store i8 [[A_VAL_I8]], i8* %{{.+}}
279 // CHECK: store i32 10, i32* %{{.+}}
280 
281 // CHECK: define internal i32 [[TASK_ENTRY2]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1)
282 // CHECK: store i32 1, i32* [[A_PTR]]
283 
284 // CHECK: define internal i32 [[TASK_ENTRY3]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1)
285 // CHECK: store i32 2, i32* [[A_PTR]]
286 
287 // CHECK: define internal i32 [[TASK_ENTRY4]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1)
288 // CHECK: store i32 3, i32* [[A_PTR]]
289 
290 // CHECK: define internal i32 [[TASK_ENTRY5]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1)
291 // CHECK: store i32 4, i32* [[A_PTR]]
292 // CHECK: store i32 5, i32* [[C_PTR:%.+]], align 128
293 
294 // CHECK: define internal i32
295 // CHECK: store i32 4, i32* [[A_PTR]]
296 
297 // CHECK: define internal i32 [[TASK_ENTRY6]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1)
298 // UNTIEDRT: [[S1_ADDR_PTR:%.+]] = alloca %struct.S*,
299 // UNTIEDRT: call void (i8*, ...) %{{.+}}(i8* %{{.+}}, %struct.S** [[S1_ADDR_PTR]])
300 // UNTIEDRT: [[S1_ADDR:%.+]] = load %struct.S*, %struct.S** [[S1_ADDR_PTR]],
301 // CHECK: switch i32 %{{.+}}, label %[[DONE:.+]] [
302 
303 // CHECK: [[DONE]]:
304 // CHECK: br label %[[CLEANUP:[^,]+]]
305 
306 // CHECK: load i32*, i32** %
307 // CHECK: store i32 1, i32* %
308 // CHECK: call i32 @__kmpc_omp_task(%
309 // UNTIEDRT: br label %[[EXIT:[^,]+]]
310 
311 // UNTIEDRT: call void [[CONSTR:@.+]](%struct.S* [[S1_ADDR]])
312 // CHECK: call i8* @__kmpc_omp_task_alloc(
313 // CHECK: call i32 @__kmpc_omp_task(%
314 // CHECK: load i32*, i32** %
315 // CHECK: store i32 2, i32* %
316 // CHECK: call i32 @__kmpc_omp_task(%
317 // UNTIEDRT: br label %[[EXIT]]
318 
319 // CHECK: call i32 @__kmpc_omp_taskyield(%
320 // CHECK: load i32*, i32** %
321 // CHECK: store i32 3, i32* %
322 // CHECK: call i32 @__kmpc_omp_task(%
323 // UNTIEDRT: br label %[[EXIT]]
324 
325 // s1 = S();
326 // UNTIEDRT: call void [[CONSTR]](%struct.S* [[TMP:%.+]])
327 // UNTIEDRT: [[DST:%.+]] = bitcast %struct.S* [[S1_ADDR]] to i8*
328 // UNTIEDRT: [[SRC:%.+]] = bitcast %struct.S* [[TMP]] to i8*
329 // UNTIEDRT: call void @llvm.memcpy.{{.+}}(i8* {{.*}}[[DST]], i8* {{.*}}[[SRC]], i64 4, i1 false)
330 // UNTIEDRT: call void [[DESTR:@.+]](%struct.S* [[TMP]])
331 
332 // CHECK: call i32 @__kmpc_omp_taskwait(%
333 // CHECK: load i32*, i32** %
334 // CHECK: store i32 4, i32* %
335 // CHECK: call i32 @__kmpc_omp_task(%
336 // UNTIEDRT: br label %[[EXIT]]
337 
338 // UNTIEDRT: call void [[DESTR]](%struct.S* [[S1_ADDR]])
339 // CHECK: br label %[[CLEANUP]]
340 
341 // CHECK: [[CLEANUP]]:
342 // UNTIEDRT: br label %[[EXIT]]
343 
344 // UNTIEDRT:      [[EXIT]]:
345 // UNTIEDRT-NEXT: ret i32 0
346 
347 struct S1 {
348   int a;
349   S1() { taskinit(); }
350   void taskinit() {
351 #pragma omp task
352     a = 0;
353   }
354 } s1;
355 
356 // CHECK-LABEL: taskinit
357 // CHECK: call i8* @__kmpc_omp_task_alloc(
358 
359 #endif
360