1 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -x c++ -emit-llvm %s -o - | FileCheck %s 2 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s 3 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s 4 // 5 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -fopenmp-enable-irbuilder -x c++ -emit-llvm %s -o - | FileCheck %s 6 // RUN: %clang_cc1 -fopenmp -fopenmp-enable-irbuilder -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s 7 // RUN: %clang_cc1 -fopenmp -fopenmp-enable-irbuilder -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s 8 9 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp-simd -x c++ -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s 10 // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s 11 // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s 12 // SIMD-ONLY0-NOT: {{__kmpc|__tgt}} 13 // expected-no-diagnostics 14 #ifndef HEADER 15 #define HEADER 16 17 // CHECK-DAG: [[IDENT_T:%.+]] = type { i32, i32, i32, i32, i8* } 18 // CHECK-DAG: [[STRUCT_SHAREDS:%.+]] = type { i8*, [2 x [[STRUCT_S:%.+]]]* } 19 // CHECK-DAG: [[STRUCT_SHAREDS1:%.+]] = type { [2 x [[STRUCT_S:%.+]]]* } 20 // CHECK-DAG: [[KMP_TASK_T:%.+]] = type { i8*, i32 (i32, i8*)*, i32, %union{{.+}}, %union{{.+}} } 21 // CHECK-DAG: [[KMP_DEPEND_INFO:%.+]] = type { i64, i64, i8 } 22 struct S { 23 int a; 24 S() : a(0) {} 25 S(const S &s) : a(s.a) {} 26 ~S() {} 27 }; 28 int a; 29 // CHECK-LABEL: @main 30 int main() { 31 // CHECK: [[B:%.+]] = alloca i8 32 // CHECK: [[S:%.+]] = alloca [2 x [[STRUCT_S]]] 33 char b; 34 S s[2]; 35 int arr[10][a]; 36 // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T]]* @{{.+}}) 37 // CHECK: [[B_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES:%.+]], i32 0, i32 0 38 // CHECK: store i8* [[B]], i8** [[B_REF]] 39 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES]], i32 0, i32 1 40 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]] 41 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 33, i64 40, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY1:@.+]] to i32 (i32, i8*)*)) 42 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0 43 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]] 44 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS]]* [[CAPTURES]] to i8* 45 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 16, i1 false) 46 // CHECK: [[PRIORITY_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR]], i32 0, i32 4 47 // CHECK: [[PRIORITY:%.+]] = bitcast %union{{.+}}* [[PRIORITY_REF_PTR]] to i32* 48 // CHECK: store i32 {{.+}}, i32* [[PRIORITY]] 49 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) 50 #pragma omp task shared(a, b, s) priority(b) 51 { 52 a = 15; 53 b = a; 54 s[0].a = 10; 55 } 56 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS1]], [[STRUCT_SHAREDS1]]* [[CAPTURES:%.+]], i32 0, i32 0 57 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]] 58 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{[^,]+}}, i32 [[GTID]], i32 1, i64 40, i64 8, 59 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0 60 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]] 61 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS1]]* [[CAPTURES]] to i8* 62 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 8, i1 false) 63 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES:%.*]], i64 0, i64 0 64 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 65 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* [[T0]] 66 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1 67 // CHECK: store i64 4, i64* [[T0]] 68 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2 69 // CHECK: store i8 1, i8* [[T0]] 70 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 1 71 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 72 // CHECK: ptrtoint i8* [[B]] to i64 73 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]] 74 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1 75 // CHECK: store i64 1, i64* [[T0]] 76 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2 77 // CHECK: store i8 1, i8* [[T0]] 78 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 2 79 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 80 // CHECK: ptrtoint [2 x [[STRUCT_S]]]* [[S]] to i64 81 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]] 82 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1 83 // CHECK: store i64 8, i64* [[T0]] 84 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2 85 // CHECK: store i8 1, i8* [[T0]] 86 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL:%.+]] 87 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 88 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]] 89 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 90 // CHECK: [[END1:%.+]] = getelementptr i32, i32* [[END]], i32 1 91 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START]] to i64 92 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END1]] to i64 93 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] 94 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 3 95 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 96 // CHECK: [[T1:%.*]] = ptrtoint i32* [[START]] to i64 97 // CHECK: store i64 [[T1]], i64* [[T0]] 98 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 99 // CHECK: store i64 [[SIZEOF]], i64* [[T0]] 100 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 101 // CHECK: store i8 1, i8* [[T0]] 102 // CHECK: [[DEPS:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i{{32|64}} 0, i{{32|64}} 0 103 // CHECK: bitcast [[KMP_DEPEND_INFO]]* [[DEPS]] to i8* 104 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 4, i8* %{{[^,]+}}, i32 0, i8* null) 105 #pragma omp task shared(a, s) depend(in : a, b, s, arr[:]) 106 { 107 a = 15; 108 s[1].a = 10; 109 } 110 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY2:@.+]] to i32 (i32, i8*)*)) 111 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) 112 #pragma omp task untied 113 { 114 #pragma omp critical 115 a = 1; 116 } 117 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, 118 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0 119 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0 120 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 121 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64 122 // CHECK: store i64 %{{[^,]+}}, i64* 123 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 124 // CHECK: store i64 4, i64* 125 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 126 // CHECK: store i8 3, i8* 127 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] 128 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 129 // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]] 130 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 131 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]] 132 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] 133 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 134 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]] 135 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 136 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]] 137 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1 138 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64 139 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64 140 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] 141 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1 142 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 143 // CHECK: ptrtoint i32* [[START1]] to i64 144 // CHECK: store i64 %{{[^,]+}}, i64* 145 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 146 // CHECK: store i64 [[SIZEOF]], i64* 147 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 148 // CHECK: store i8 3, i8* 149 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i{{32|64}} 0, i{{32|64}} 0 150 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8* 151 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null) 152 #pragma omp task untied depend(out : s[0], arr[4:][b]) 153 { 154 a = 1; 155 } 156 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, 157 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0 158 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0 159 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 160 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64 161 // CHECK: store i64 %{{[^,]+}}, i64* 162 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 163 // CHECK: store i64 4, i64* 164 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 165 // CHECK: store i8 4, i8* 166 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] 167 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 168 // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]] 169 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 170 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]] 171 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] 172 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 173 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]] 174 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 175 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]] 176 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1 177 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64 178 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64 179 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] 180 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1 181 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 182 // CHECK: ptrtoint i32* [[START1]] to i64 183 // CHECK: store i64 %{{[^,]+}}, i64* 184 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 185 // CHECK: store i64 [[SIZEOF]], i64* 186 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 187 // CHECK: store i8 4, i8* 188 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i{{32|64}} 0, i{{32|64}} 0 189 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8* 190 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null) 191 #pragma omp task untied depend(mutexinoutset: s[0], arr[4:][b]) 192 { 193 a = 1; 194 } 195 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1, 196 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0 197 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 198 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* 199 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 200 // CHECK: store i64 4, i64* 201 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 202 // CHECK: store i8 3, i8* 203 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 1 204 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1 205 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 206 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64 207 // CHECK: store i64 %{{[^,]+}}, i64* 208 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 209 // CHECK: store i64 4, i64* 210 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 211 // CHECK: store i8 3, i8* 212 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL]] 213 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 214 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 3 215 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}}, 216 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64 217 // CHECK: [[IDX2:%.+]] = sub nsw i64 [[NEW_A_VAL_I64]], 1 218 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}}, 219 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64 220 // CHECK: [[SUB:%.+]] = add nsw i64 -1, [[NEW_A_VAL_I64]] 221 // CHECK: [[IDX1:%.+]] = mul nsw i64 [[SUB]], [[A_VAL]] 222 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] 223 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]] 224 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1 225 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64 226 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64 227 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] 228 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 2 229 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 230 // CHECK: ptrtoint i32* [[START1]] to i64 231 // CHECK: store i64 %{{[^,]+}}, i64* 232 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 233 // CHECK: store i64 [[SIZEOF]], i64* 234 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 235 // CHECK: store i8 3, i8* 236 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i{{32|64}} 0, i{{32|64}} 0 237 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8* 238 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 3, i8* %{{[^,]+}}, i32 0, i8* null) 239 #pragma omp task final(true) depend(inout: a, s[1], arr[:a][3:]) 240 { 241 a = 2; 242 } 243 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY3:@.+]] to i32 (i32, i8*)*)) 244 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) 245 #pragma omp task final(true) 246 { 247 a = 2; 248 } 249 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 1, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY4:@.+]] to i32 (i32, i8*)*)) 250 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) 251 const bool flag = false; 252 #pragma omp task final(flag) 253 { 254 a = 3; 255 } 256 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] 257 // CHECK: [[CMP:%.+]] = icmp ne i8 [[B_VAL]], 0 258 // CHECK: [[FINAL:%.+]] = select i1 [[CMP]], i32 2, i32 0 259 // CHECK: [[FLAGS:%.+]] = or i32 [[FINAL]], 1 260 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 [[FLAGS]], i64 40, i64 8, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY5:@.+]] to i32 (i32, i8*)*)) 261 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) 262 int c __attribute__((aligned(128))); 263 #pragma omp task final(b) shared(c) 264 { 265 a = 4; 266 c = 5; 267 } 268 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY6:@.+]] to i32 (i32, i8*)*)) 269 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) 270 #pragma omp task untied 271 { 272 S s1; 273 #pragma omp task 274 a = 4; 275 #pragma omp taskyield 276 s1 = S(); 277 #pragma omp taskwait 278 } 279 return a; 280 } 281 // CHECK: define internal i32 [[TASK_ENTRY1]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1) 282 // CHECK: store i32 15, i32* [[A_PTR:@.+]] 283 // CHECK: [[A_VAL:%.+]] = load i32, i32* [[A_PTR]] 284 // CHECK: [[A_VAL_I8:%.+]] = trunc i32 [[A_VAL]] to i8 285 // CHECK: store i8 [[A_VAL_I8]], i8* %{{.+}} 286 // CHECK: store i32 10, i32* %{{.+}} 287 288 // CHECK: define internal i32 [[TASK_ENTRY2]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1) 289 // CHECK: store i32 1, i32* [[A_PTR]] 290 291 // CHECK: define internal i32 [[TASK_ENTRY3]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1) 292 // CHECK: store i32 2, i32* [[A_PTR]] 293 294 // CHECK: define internal i32 [[TASK_ENTRY4]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1) 295 // CHECK: store i32 3, i32* [[A_PTR]] 296 297 // CHECK: define internal i32 [[TASK_ENTRY5]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1) 298 // CHECK: store i32 4, i32* [[A_PTR]] 299 // CHECK: store i32 5, i32* [[C_PTR:%.+]], align 128 300 301 // CHECK: define internal i32 302 // CHECK: store i32 4, i32* [[A_PTR]] 303 304 // CHECK: define internal i32 [[TASK_ENTRY6]](i32 %0, [[KMP_TASK_T]]{{.*}}* noalias %1) 305 // CHECK: switch i32 %{{.+}}, label 306 // CHECK: load i32*, i32** % 307 // CHECK: store i32 1, i32* % 308 // CHECK: call i32 @__kmpc_omp_task(% 309 310 // CHECK: call i8* @__kmpc_omp_task_alloc( 311 // CHECK: call i32 @__kmpc_omp_task(% 312 // CHECK: load i32*, i32** % 313 // CHECK: store i32 2, i32* % 314 // CHECK: call i32 @__kmpc_omp_task(% 315 316 // CHECK: call i32 @__kmpc_omp_taskyield(% 317 // CHECK: load i32*, i32** % 318 // CHECK: store i32 3, i32* % 319 // CHECK: call i32 @__kmpc_omp_task(% 320 321 // CHECK: call i32 @__kmpc_omp_taskwait(% 322 // CHECK: load i32*, i32** % 323 // CHECK: store i32 4, i32* % 324 // CHECK: call i32 @__kmpc_omp_task(% 325 326 struct S1 { 327 int a; 328 S1() { taskinit(); } 329 void taskinit() { 330 #pragma omp task 331 a = 0; 332 } 333 } s1; 334 335 // CHECK-LABEL: taskinit 336 // CHECK: call i8* @__kmpc_omp_task_alloc( 337 338 #endif 339