1; RUN: llc -mtriple=x86_64-pc-linux-gnu -start-before=stack-protector -stop-after=stack-protector -o - < %s | FileCheck %s
2; Bugs 42238/43308: Test some additional situations not caught previously.
3
4define void @store_captures() #0 {
5; CHECK-LABEL: @store_captures(
6; CHECK-NEXT:  entry:
7; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
8; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
9; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
10; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
11; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
12; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
13; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
14; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
15; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
16; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
17; CHECK-NEXT:    store i32* [[A]], i32** [[J]], align 8
18; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
19; CHECK-NEXT:    [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
20; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
21; CHECK-NEXT:    br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
22; CHECK:       SP_return:
23; CHECK-NEXT:    ret void
24; CHECK:       CallStackCheckFailBlk:
25; CHECK-NEXT:    call void @__stack_chk_fail()
26; CHECK-NEXT:    unreachable
27;
28entry:
29  %retval = alloca i32, align 4
30  %a = alloca i32, align 4
31  %j = alloca i32*, align 8
32  store i32 0, i32* %retval
33  %load = load i32, i32* %a, align 4
34  %add = add nsw i32 %load, 1
35  store i32 %add, i32* %a, align 4
36  store i32* %a, i32** %j, align 8
37  ret void
38}
39
40define i32* @non_captures() #0 {
41; load, atomicrmw, and ret do not trigger a stack protector.
42; CHECK-LABEL: @non_captures(
43; CHECK-NEXT:  entry:
44; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
45; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
46; CHECK-NEXT:    [[ATOM:%.*]] = atomicrmw add i32* [[A]], i32 1 seq_cst
47; CHECK-NEXT:    ret i32* [[A]]
48;
49entry:
50  %a = alloca i32, align 4
51  %load = load i32, i32* %a, align 4
52  %atom = atomicrmw add i32* %a, i32 1 seq_cst
53  ret i32* %a
54}
55
56define void @store_addrspacecast_captures() #0 {
57; CHECK-LABEL: @store_addrspacecast_captures(
58; CHECK-NEXT:  entry:
59; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
60; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
61; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
62; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
63; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
64; CHECK-NEXT:    [[J:%.*]] = alloca i32 addrspace(1)*, align 8
65; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
66; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
67; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
68; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
69; CHECK-NEXT:    [[A_ADDRSPACECAST:%.*]] = addrspacecast i32* [[A]] to i32 addrspace(1)*
70; CHECK-NEXT:    store i32 addrspace(1)* [[A_ADDRSPACECAST]], i32 addrspace(1)** [[J]], align 8
71; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
72; CHECK-NEXT:    [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
73; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
74; CHECK-NEXT:    br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
75; CHECK:       SP_return:
76; CHECK-NEXT:    ret void
77; CHECK:       CallStackCheckFailBlk:
78; CHECK-NEXT:    call void @__stack_chk_fail()
79; CHECK-NEXT:    unreachable
80;
81entry:
82  %retval = alloca i32, align 4
83  %a = alloca i32, align 4
84  %j = alloca i32 addrspace(1)*, align 8
85  store i32 0, i32* %retval
86  %load = load i32, i32* %a, align 4
87  %add = add nsw i32 %load, 1
88  store i32 %add, i32* %a, align 4
89  %a.addrspacecast = addrspacecast i32* %a to i32 addrspace(1)*
90  store i32 addrspace(1)* %a.addrspacecast, i32 addrspace(1)** %j, align 8
91  ret void
92}
93
94define void @cmpxchg_captures() #0 {
95; CHECK-LABEL: @cmpxchg_captures(
96; CHECK-NEXT:  entry:
97; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
98; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
99; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
100; CHECK-NEXT:    [[RETVAL:%.*]] = alloca i32, align 4
101; CHECK-NEXT:    [[A:%.*]] = alloca i32, align 4
102; CHECK-NEXT:    [[J:%.*]] = alloca i32*, align 8
103; CHECK-NEXT:    store i32 0, i32* [[RETVAL]]
104; CHECK-NEXT:    [[LOAD:%.*]] = load i32, i32* [[A]], align 4
105; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
106; CHECK-NEXT:    store i32 [[ADD]], i32* [[A]], align 4
107; CHECK-NEXT:    [[TMP0:%.*]] = cmpxchg i32** [[J]], i32* null, i32* [[A]] seq_cst monotonic
108; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
109; CHECK-NEXT:    [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
110; CHECK-NEXT:    [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
111; CHECK-NEXT:    br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
112; CHECK:       SP_return:
113; CHECK-NEXT:    ret void
114; CHECK:       CallStackCheckFailBlk:
115; CHECK-NEXT:    call void @__stack_chk_fail()
116; CHECK-NEXT:    unreachable
117;
118entry:
119  %retval = alloca i32, align 4
120  %a = alloca i32, align 4
121  %j = alloca i32*, align 8
122  store i32 0, i32* %retval
123  %load = load i32, i32* %a, align 4
124  %add = add nsw i32 %load, 1
125  store i32 %add, i32* %a, align 4
126
127  cmpxchg i32** %j, i32* null, i32* %a seq_cst monotonic
128  ret void
129}
130
131define void @memset_captures(i64 %c) #0 {
132; CHECK-LABEL: @memset_captures(
133; CHECK-NEXT:  entry:
134; CHECK-NEXT:    [[STACKGUARDSLOT:%.*]] = alloca i8*
135; CHECK-NEXT:    [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
136; CHECK-NEXT:    call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
137; CHECK-NEXT:    [[CADDR:%.*]] = alloca i64, align 8
138; CHECK-NEXT:    store i64 %c, i64* [[CADDR]], align 8
139; CHECK-NEXT:    [[I:%.*]] = alloca i32, align 4
140; CHECK-NEXT:    [[IPTR:%.*]] = bitcast i32* [[I]] to i8*
141; CHECK-NEXT:    [[COUNT:%.*]] = load i64, i64* [[CADDR]], align 8
142; CHECK-NEXT:    call void @llvm.memset.p0i8.i64(i8* align 4 [[IPTR]], i8 0, i64 [[COUNT]], i1 false)
143; CHECK-NEXT:    [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
144; CHECK-NEXT:    [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
145; CHECK-NEXT:    [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
146; CHECK-NEXT:    br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
147; CHECK:       SP_return:
148; CHECK-NEXT:    ret void
149; CHECK:       CallStackCheckFailBlk:
150; CHECK-NEXT:    call void @__stack_chk_fail()
151; CHECK-NEXT:    unreachable
152;
153entry:
154  %c.addr = alloca i64, align 8
155  store i64 %c, i64* %c.addr, align 8
156  %i = alloca i32, align 4
157  %i.ptr = bitcast i32* %i to i8*
158  %count = load i64, i64* %c.addr, align 8
159  call void @llvm.memset.p0i8.i64(i8* align 4 %i.ptr, i8 0, i64 %count, i1 false)
160  ret void
161}
162
163declare void @llvm.memset.p0i8.i64(i8* nocapture writeonly, i8, i64, i1 immarg)
164
165; Intentionally does not have any fn attrs.
166declare dso_local void @foo(i8*)
167
168; @bar_sspstrong and @bar_nossp are the same function, but differ only in
169; function attributes. Test that a callee without stack protector function
170; attribute does not trigger a stack guard slot in a caller that also does not
171; have a stack protector slot.
172define dso_local void @bar_sspstrong(i64 %0) #0 {
173; CHECK-LABEL: @bar_sspstrong
174; CHECK-NEXT: %StackGuardSlot = alloca i8*
175  %2 = alloca i64, align 8
176  store i64 %0, i64* %2, align 8
177  %3 = load i64, i64* %2, align 8
178  %4 = alloca i8, i64 %3, align 16
179  call void @foo(i8* %4)
180  ret void
181}
182
183; Intentionally does not have any fn attrs.
184define dso_local void @bar_nossp(i64 %0) {
185; CHECK-LABEL: @bar_nossp
186; CHECK-NEXT: %2 = alloca i64
187  %2 = alloca i64, align 8
188  store i64 %0, i64* %2, align 8
189  %3 = load i64, i64* %2, align 8
190  %4 = alloca i8, i64 %3, align 16
191  call void @foo(i8* %4)
192  ret void
193}
194
195attributes #0 = { sspstrong }
196