1 // RUN: %clang_cc1 -no-opaque-pointers -verify -triple x86_64-apple-darwin10 -fopenmp -fopenmp-version=50 -x c -emit-llvm %s -o - | FileCheck %s
2 // RUN: %clang_cc1 -no-opaque-pointers -fopenmp -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -emit-pch -o %t %s
3 // RUN: %clang_cc1 -no-opaque-pointers -fopenmp -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s
4 
5 // RUN: %clang_cc1 -no-opaque-pointers -verify -triple x86_64-apple-darwin10 -fopenmp-simd -fopenmp-version=50 -x c -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s
6 // RUN: %clang_cc1 -no-opaque-pointers -fopenmp-simd -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -emit-pch -o %t %s
7 // RUN: %clang_cc1 -no-opaque-pointers -fopenmp-simd -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s
8 // SIMD-ONLY0-NOT: {{__kmpc|__tgt}}
9 // expected-no-diagnostics
10 #ifndef HEADER
11 #define HEADER
12 
13 typedef void *omp_depend_t;
14 typedef __UINTPTR_TYPE__ omp_event_handle_t;
15 
16 void foo(void);
17 
18 // CHECK-LABEL: @main
19 int main(void) {
20   omp_depend_t d, x;
21   omp_event_handle_t evt;
22   int a, *b;
23   // CHECK: [[D_ADDR:%.+]] = alloca i8*,
24   // CHECK: [[X_ADDR:%.+]] = alloca i8*,
25   // CHECK: [[EVT_ADDR:%.+]] = alloca i64,
26   // CHECK: [[A_ADDR:%.+]] = alloca i32,
27   // CHECK: [[DEPOBJ_SIZE_ADDR:%.+]] = alloca i64,
28   // CHECK: [[DEPOBJ_SIZE_ADDR1:%.+]] = alloca i64,
29   // CHECK: = alloca i64,
30   // CHECK: [[DEP_COUNTER_ADDR:%.+]] = alloca i64,
31   // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num(
32   // CHECK: [[ALLOC:%.+]] = call i8* @__kmpc_omp_task_alloc(%struct.ident_t* @{{.+}}, i32 [[GTID]], i32 65, i64 48, i64 0, i32 (i32, i8*)* bitcast (i32 (i32, [[PRIVATES_TY:%.+]]*)* [[TASK_ENTRY:@.+]] to i32 (i32, i8*)*))
33   // CHECK: [[EVT_VAL:%.+]] = call i8* @__kmpc_task_allow_completion_event(%struct.ident_t* @{{.+}}, i32 [[GTID]], i8* [[ALLOC]])
34   // CHECK: [[CAST_EVT_VAL:%.+]] = ptrtoint i8* [[EVT_VAL]] to i64
35   // CHECK: store i64 [[CAST_EVT_VAL]], i64* [[EVT_ADDR]], align 8
36   // CHECK: [[DATA:%.+]] = bitcast i8* [[ALLOC]] to [[PRIVATES_TY]]*
37   // CHECK: [[D_ADDR_CAST:%.+]] = bitcast i8** [[D_ADDR]] to %struct.kmp_depend_info**
38   // CHECK: [[D_DEP:%.+]] = load %struct.kmp_depend_info*, %struct.kmp_depend_info** [[D_ADDR_CAST]], align 8
39   // CHECK: [[D_DEP_BASE:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[D_DEP]], i{{.+}} -1
40   // CHECK: [[D_DEP_BASE_SIZE:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[D_DEP_BASE]], i{{.+}} 0, i{{.+}} 0
41   // CHECK: [[SIZE1:%.+]] = load i64, i64* [[D_DEP_BASE_SIZE]], align 8
42   // CHECK-DAG: store i64 0, i64* [[DEPOBJ_SIZE_ADDR]], align 8
43   // CHECK: [[SZ:%.+]] = load i64, i64* [[DEPOBJ_SIZE_ADDR]], align 8
44   // CHECK: [[SIZE:%.+]] = add nuw i64 [[SZ]], [[SIZE1]]
45   // CHECK: store i64 [[SIZE]], i64* [[DEPOBJ_SIZE_ADDR]], align 8
46   // CHECK: [[X_ADDR_CAST:%.+]] = bitcast i8** [[X_ADDR]] to %struct.kmp_depend_info**
47   // CHECK: [[X_DEP:%.+]] = load %struct.kmp_depend_info*, %struct.kmp_depend_info** [[X_ADDR_CAST]], align 8
48   // CHECK: [[X_DEP_BASE:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[X_DEP]], i{{.+}} -1
49   // CHECK: [[X_DEP_BASE_SIZE:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[X_DEP_BASE]], i{{.+}} 0, i{{.+}} 0
50   // CHECK: [[SIZE2:%.+]] = load i64, i64* [[X_DEP_BASE_SIZE]], align 8
51   // CHECK-DAG: store i64 0, i64* [[DEPOBJ_SIZE_ADDR1]], align 8
52   // CHECK: [[SZ:%.+]] = load i64, i64* [[DEPOBJ_SIZE_ADDR1]], align 8
53   // CHECK: [[SIZE3:%.+]] = add nuw i64 [[SZ]], [[SIZE2]]
54   // CHECK: store i64 [[SIZE3]], i64* [[DEPOBJ_SIZE_ADDR1]], align 8
55   // CHECK: [[SZ:%.+]] = load i64, i64* [[DEPOBJ_SIZE_ADDR]], align 8
56   // CHECK: [[SZ1:%.+]] = load i64, i64* [[DEPOBJ_SIZE_ADDR1]], align 8
57   // CHECK: [[SIZE1:%.+]] = add nuw i64 0, [[SZ]]
58   // CHECK: [[SIZE2:%.+]] = add nuw i64 [[SIZE1]], [[SZ1]]
59   // CHECK: [[SIZE:%.+]] = add nuw i64 [[SIZE2]], 2
60   // CHECK: [[SV:%.+]] = call i8* @llvm.stacksave()
61   // CHECK: store i8* [[SV]], i8** [[SV_ADDR:%.+]], align 8
62   // CHECK: [[VLA:%.+]] = alloca %struct.kmp_depend_info, i64 [[SIZE]],
63   // CHECK: [[SIZE32:%.+]] = trunc i64 [[SIZE]] to i32
64   // CHECK: [[A_ADDR_CAST:%.+]] = ptrtoint i32* [[A_ADDR]] to i64
65   // CHECK: [[VLA0:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA]], i64 0
66   // CHECK: [[BASE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA0]], i{{.+}} 0, i{{.+}} 0
67   // CHECK: store i64 [[A_ADDR_CAST]], i64* [[BASE_ADDR]], align 16
68   // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA0]], i{{.+}} 0, i{{.+}} 1
69   // CHECK: store i64 4, i64* [[SIZE_ADDR]], align 8
70   // CHECK: [[FLAGS_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA0]], i{{.+}} 0, i{{.+}} 2
71   // CHECK: store i8 1, i8* [[FLAGS_ADDR]], align 1
72   // CHECK: [[A:%.+]] = load i32, i32* [[A_ADDR]], align 4
73   // CHECK: [[A_CAST:%.+]] = sext i32 [[A]] to i64
74   // CHECK: [[SZ1:%.+]] = mul nuw i64 24, [[A_CAST]]
75   // CHECK: [[A:%.+]] = load i32, i32* [[A_ADDR]], align 4
76   // CHECK: [[A_CAST:%.+]] = sext i32 [[A]] to i64
77   // CHECK: [[SZ:%.+]] = mul nuw i64 [[SZ1]], [[A_CAST]]
78   // CHECK: [[B_ADDR_CAST:%.+]] = ptrtoint i32** %{{.+}} to i64
79   // CHECK: [[VLA1:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA]], i64 1
80   // CHECK: [[BASE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA1]], i{{.+}} 0, i{{.+}} 0
81   // CHECK: store i64 [[B_ADDR_CAST]], i64* [[BASE_ADDR]], align 8
82   // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA1]], i{{.+}} 0, i{{.+}} 1
83   // CHECK: store i64 [[SZ]], i64* [[SIZE_ADDR]], align 8
84   // CHECK: [[FLAGS_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA1]], i{{.+}} 0, i{{.+}} 2
85   // CHECK: store i8 1, i8* [[FLAGS_ADDR]], align 8
86   // CHECK: store i64 2, i64* [[DEP_COUNTER_ADDR]], align 8
87   // CHECK: [[D_ADDR_CAST:%.+]] = bitcast i8** [[D_ADDR]] to %struct.kmp_depend_info**
88   // CHECK: [[BC:%.+]] = load %struct.kmp_depend_info*, %struct.kmp_depend_info** [[D_ADDR_CAST]], align 8
89   // CHECK: [[PREV:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[BC]], i64 -1
90   // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[PREV]], i{{.+}} 0, i{{.+}} 0
91   // CHECK: [[SIZE:%.+]] = load i64, i64* [[SIZE_ADDR]], align 8
92   // CHECK: [[BYTES:%.+]] = mul nuw i64 24, [[SIZE]]
93   // CHECK: [[POS:%.+]] = load i64, i64* [[DEP_COUNTER_ADDR]], align 8
94   // CHECK: [[VLA_D:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA]], i64 [[POS]]
95   // CHECK: [[DEST:%.+]] = bitcast %struct.kmp_depend_info* [[VLA_D]] to i8*
96   // CHECK: [[SRC:%.+]] = bitcast %struct.kmp_depend_info* [[BC]] to i8*
97   // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align {{.+}} [[DEST]], i8* align {{.+}} [[SRC]], i64 [[BYTES]], i1 false)
98   // CHECK: [[ADD:%.+]] = add nuw i64 [[POS]], [[SIZE]]
99   // CHECK: store i64 [[ADD]], i64* [[DEP_COUNTER_ADDR]], align 8
100   // CHECK: [[X_ADDR_CAST:%.+]] = bitcast i8** [[X_ADDR]] to %struct.kmp_depend_info**
101   // CHECK: [[BC:%.+]] = load %struct.kmp_depend_info*, %struct.kmp_depend_info** [[X_ADDR_CAST]], align 8
102   // CHECK: [[PREV:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[BC]], i64 -1
103   // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[PREV]], i{{.+}} 0, i{{.+}} 0
104   // CHECK: [[SIZE:%.+]] = load i64, i64* [[SIZE_ADDR]], align 8
105   // CHECK: [[BYTES:%.+]] = mul nuw i64 24, [[SIZE]]
106   // CHECK: [[POS:%.+]] = load i64, i64* [[DEP_COUNTER_ADDR]], align 8
107   // CHECK: [[VLA_X:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[VLA]], i64 [[POS]]
108   // CHECK: [[DEST:%.+]] = bitcast %struct.kmp_depend_info* [[VLA_X]] to i8*
109   // CHECK: [[SRC:%.+]] = bitcast %struct.kmp_depend_info* [[BC]] to i8*
110   // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align {{.+}} [[DEST]], i8* align {{.+}} [[SRC]], i64 [[BYTES]], i1 false)
111   // CHECK: [[ADD:%.+]] = add nuw i64 [[POS]], [[SIZE]]
112   // CHECK: store i64 [[ADD]], i64* [[DEP_COUNTER_ADDR]], align 8
113   // CHECK: [[BC:%.+]] = bitcast %struct.kmp_depend_info* [[VLA]] to i8*
114   // CHECK: call i32 @__kmpc_omp_task_with_deps(%struct.ident_t* @{{.+}}, i32 [[GTID]], i8* [[ALLOC]], i32 [[SIZE32]], i8* [[BC]], i32 0, i8* null)
115   // CHECK: [[SV:%.+]] = load i8*, i8** [[SV_ADDR]], align 8
116   // CHECK: call void @llvm.stackrestore(i8* [[SV]])
117 #pragma omp task depend(in: a, ([3][a][a])&b) depend(depobj: d, x) detach(evt)
118   {
119 #pragma omp taskgroup
120     {
121 #pragma omp task
122       foo();
123     }
124   }
125   // CHECK: ret i32 0
126   return 0;
127 }
128 // CHECK: call void @__kmpc_taskgroup(
129 // CHECK: call i8* @__kmpc_omp_task_alloc(
130 // CHECK: call i32 @__kmpc_omp_task(
131 // CHECK: call void @__kmpc_end_taskgroup(
132 
133 // CHECK-LINE: @bar
134 void bar(void) {
135   int **a;
136   // CHECK: call void @__kmpc_for_static_init_4(
137 #pragma omp for
138 for (int i = 0; i < 10; ++i)
139   // CHECK: [[BUF:%.+]] = call i8* @__kmpc_omp_task_alloc(%struct.ident_t* @{{.+}}, i32 %{{.+}}, i32 1, i64 48,
140   // CHECK: [[BC_BUF:%.+]] = bitcast i8* [[BUF]] to [[TT_WITH_PRIVS:%.+]]*
141   // CHECK: [[PRIVS:%.+]] = getelementptr inbounds [[TT_WITH_PRIVS]], [[TT_WITH_PRIVS]]* [[BC_BUF]], i32 0, i32 1
142   // CHECK: [[I_PRIV:%.+]] = getelementptr inbounds %{{.+}}, %{{.+}} [[PRIVS]], i32 0, i32 0
143   // CHECK: [[I:%.+]] = load i32, i32* [[I_ADDR:%.+]],
144   // CHECK: store i32 %{{.+}}, i32* [[I_PRIV]],
145 
146   // NELEMS = 1 * ((i - 0 + 2 - 1) / 2);
147   // CHECK: [[END:%.+]] = load i32, i32* [[I_ADDR]],
148   // CHECK: [[EB_SUB:%.+]] = sub i32 [[END]], 0
149   // CHECK: [[EB_SUB_2_ADD:%.+]] = add i32 [[EB_SUB]], 2
150   // CHECK: [[EB_SUB_2_ADD_1_SUB:%.+]] = sub i32 [[EB_SUB_2_ADD]], 1
151   // CHECK: [[EB_SUB_2_ADD_1_SUB_2_DIV:%.+]] = udiv i32 [[EB_SUB_2_ADD_1_SUB]], 2
152   // CHECK: [[ELEMS:%.+]] = zext i32 [[EB_SUB_2_ADD_1_SUB_2_DIV]] to i64
153   // CHECK: [[NELEMS:%.+]] = mul nuw i64 [[ELEMS]], 1
154 
155   // ITERATOR_TOTAL = NELEMS + 0;
156   // CHECK: [[ITERATOR_TOTAL:%.+]] = add nuw i64 0, [[NELEMS]]
157   // NELEMS = ITERATOR_TOTAL + non-iterator-deps (=0)
158   // CHECK: [[TOTAL:%.+]] = add nuw i64 [[ITERATOR_TOTAL]], 0
159 
160   // %struct.kmp_depend_info DEPS[TOTAL];
161   // CHECK: [[DEPS:%.+]] = alloca %struct.kmp_depend_info, i64 [[TOTAL]],
162   // CHECK: [[NDEPS:%.+]] = trunc i64 [[TOTAL]] to i32
163 
164   // i64 DEP_COUNTER = 0;
165   // CHECK: store i64 0, i64* [[DEP_COUNTER_ADDR:%.+]],
166 
167   // NELEMS = ((i - 0 + 2 - 1) / 2);
168   // CHECK: [[END:%.+]] = load i32, i32* [[I_ADDR]],
169   // CHECK: [[EB_SUB:%.+]] = sub i32 [[END]], 0
170   // CHECK: [[EB_SUB_2_ADD:%.+]] = add i32 [[EB_SUB]], 2
171   // CHECK: [[EB_SUB_2_ADD_1_SUB:%.+]] = sub i32 [[EB_SUB_2_ADD]], 1
172   // CHECK: [[ELEMS:%.+]] = udiv i32 [[EB_SUB_2_ADD_1_SUB]], 2
173 
174   // i32 COUNTER = 0;
175   // CHECK: store i32 0, i32* [[COUNTER_ADDR:%.+]],
176   // CHECK: br label %[[CONT:.+]]
177 
178   // Loop.
179   // CHECK: [[CONT]]:
180   // CHECK: [[COUNTER:%.+]] = load i32, i32* [[COUNTER_ADDR]],
181   // CHECK: [[CMP:%.+]] = icmp ult i32 [[COUNTER]], [[ELEMS]]
182   // CHECK: br i1 [[CMP]], label %[[BODY:.+]], label %[[EXIT:.+]]
183 
184   // CHECK: [[BODY]]:
185 
186   // k = 0 + 2*COUNTER;
187   // CHECK: [[COUNTER:%.+]] = load i32, i32* [[COUNTER_ADDR]],
188   // CHECK: [[C2_MUL:%.+]] = mul i32 [[COUNTER]], 2
189   // CHECK: [[C2_MUL_0_ADD:%.+]] = add i32 0, [[C2_MUL]]
190   // CHECK: store i32 [[C2_MUL_0_ADD]], i32* [[K_ADDR:%.+]],
191 
192   // &a[k][i]
193   // CHECK: [[A:%.+]] = load i32**, i32*** [[A_ADDR:%.+]],
194   // CHECK: [[K:%.+]] = load i32, i32* [[K_ADDR]],
195   // CHECK: [[IDX:%.+]] = zext i32 [[K]] to i64
196   // CHECK: [[AK_ADDR:%.+]] = getelementptr inbounds i32*, i32** [[A]], i64 [[IDX]]
197   // CHECK: [[AK:%.+]] = load i32*, i32** [[AK_ADDR]],
198   // CHECK: [[I:%.+]] = load i32, i32* [[I_ADDR]],
199   // CHECK: [[IDX:%.+]] = sext i32 [[I]] to i64
200   // CHECK: [[AKI_ADDR:%.+]] = getelementptr inbounds i32, i32* [[AK]], i64 [[IDX]]
201   // CHECK: [[AKI_INT:%.+]] = ptrtoint i32* [[AKI_ADDR]] to i64
202 
203   // DEPS[DEP_COUNTER].base_addr = &a[k][i];
204   // CHECK: [[DEP_COUNTER:%.+]] = load i64, i64* [[DEP_COUNTER_ADDR]],
205   // CHECK: [[DEPS_DC:%.+]] = getelementptr %struct.kmp_depend_info, %struct.kmp_depend_info* [[DEPS]], i64 [[DEP_COUNTER]]
206   // CHECK: [[DEPS_DC_BASE_ADDR:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[DEPS_DC]], i{{.+}} 0, i{{.+}} 0
207   // CHECK: store i64 [[AKI_INT]], i64* [[DEPS_DC_BASE_ADDR]],
208 
209   // DEPS[DEP_COUNTER].size = sizeof(a[k][i]);
210   // CHECK: [[DEPS_DC_SIZE:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[DEPS_DC]], i{{.+}} 0, i{{.+}} 1
211   // CHECK: store i64 4, i64* [[DEPS_DC_SIZE]],
212 
213   // DEPS[DEP_COUNTER].flags = in;
214   // CHECK: [[DEPS_DC_FLAGS:%.+]] = getelementptr inbounds %struct.kmp_depend_info, %struct.kmp_depend_info* [[DEPS_DC]], i{{.+}} 0, i{{.+}} 2
215   // CHECK: store i8 1, i8* [[DEPS_DC_FLAGS]],
216 
217   // DEP_COUNTER = DEP_COUNTER + 1;
218   // CHECK: [[DEP_COUNTER:%.+]] = load i64, i64* [[DEP_COUNTER_ADDR]],
219   // CHECK: [[INC:%.+]] = add nuw i64 [[DEP_COUNTER]], 1
220   // CHECK: store i64 [[INC]], i64* [[DEP_COUNTER_ADDR]],
221 
222   // COUNTER = COUNTER + 1;
223   // CHECK: [[COUNTER:%.+]] = load i32, i32* [[COUNTER_ADDR]],
224   // CHECK: [[INC:%.+]] = add i32 [[COUNTER]], 1
225   // CHECK: store i32 [[INC]], i32* [[COUNTER_ADDR]],
226   // CHECK: br label %[[CONT]]
227 
228   // CHECK: [[EXIT]]:
229   // CHECK: [[DEP_BEGIN:%.+]] = bitcast %struct.kmp_depend_info* [[DEPS]] to i8*
230   // CHECK: = call i32 @__kmpc_omp_task_with_deps(%struct.ident_t* @{{.+}}, i32 %{{.+}}, i8* [[BUF]], i32 [[NDEPS]], i8* [[DEP_BEGIN]], i32 0, i8* null)
231 #pragma omp task depend(iterator(unsigned k=0:i:2), in: a[k][i])
232 ++i;
233 }
234 #endif
235