Skip to content

Commit f29366b

Browse files
committed
StackProtector: Use PointerMayBeCaptured
This was using its own, outdated list of possible captures. This was at minimum not catching cmpxchg and addrspacecast captures. One change is now any volatile access is treated as capturing. The test coverage for this pass is quite inadequate, but this required removing volatile in the lifetime capture test. Also fixes some infrastructure issues to allow running just the IR pass. Fixes bug 42238. llvm-svn: 363169
1 parent 61f6395 commit f29366b

File tree

6 files changed

+149
-43
lines changed

6 files changed

+149
-43
lines changed

llvm/include/llvm/CodeGen/StackProtector.h

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -61,12 +61,6 @@ class StackProtector : public FunctionPass {
6161
/// protection when -fstack-protection is used.
6262
unsigned SSPBufferSize = 0;
6363

64-
/// VisitedPHIs - The set of PHI nodes visited when determining
65-
/// if a variable's reference has been taken. This set
66-
/// is maintained to ensure we don't visit the same PHI node multiple
67-
/// times.
68-
SmallPtrSet<const PHINode *, 16> VisitedPHIs;
69-
7064
// A prologue is generated.
7165
bool HasPrologue = false;
7266

llvm/lib/CodeGen/StackProtector.cpp

Lines changed: 4 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
#include "llvm/ADT/SmallPtrSet.h"
1818
#include "llvm/ADT/Statistic.h"
1919
#include "llvm/Analysis/BranchProbabilityInfo.h"
20+
#include "llvm/Analysis/CaptureTracking.h"
2021
#include "llvm/Analysis/EHPersonalities.h"
2122
#include "llvm/Analysis/OptimizationRemarkEmitter.h"
2223
#include "llvm/CodeGen/Passes.h"
@@ -156,40 +157,6 @@ bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge,
156157
return NeedsProtector;
157158
}
158159

159-
bool StackProtector::HasAddressTaken(const Instruction *AI) {
160-
for (const User *U : AI->users()) {
161-
if (const StoreInst *SI = dyn_cast<StoreInst>(U)) {
162-
if (AI == SI->getValueOperand())
163-
return true;
164-
} else if (const PtrToIntInst *SI = dyn_cast<PtrToIntInst>(U)) {
165-
if (AI == SI->getOperand(0))
166-
return true;
167-
} else if (const CallInst *CI = dyn_cast<CallInst>(U)) {
168-
// Ignore intrinsics that are not calls. TODO: Use isLoweredToCall().
169-
if (!isa<DbgInfoIntrinsic>(CI) && !CI->isLifetimeStartOrEnd())
170-
return true;
171-
} else if (isa<InvokeInst>(U)) {
172-
return true;
173-
} else if (const SelectInst *SI = dyn_cast<SelectInst>(U)) {
174-
if (HasAddressTaken(SI))
175-
return true;
176-
} else if (const PHINode *PN = dyn_cast<PHINode>(U)) {
177-
// Keep track of what PHI nodes we have already visited to ensure
178-
// they are only visited once.
179-
if (VisitedPHIs.insert(PN).second)
180-
if (HasAddressTaken(PN))
181-
return true;
182-
} else if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) {
183-
if (HasAddressTaken(GEP))
184-
return true;
185-
} else if (const BitCastInst *BI = dyn_cast<BitCastInst>(U)) {
186-
if (HasAddressTaken(BI))
187-
return true;
188-
}
189-
}
190-
return false;
191-
}
192-
193160
/// Search for the first call to the llvm.stackprotector intrinsic and return it
194161
/// if present.
195162
static const CallInst *findStackProtectorIntrinsic(Function &F) {
@@ -297,7 +264,9 @@ bool StackProtector::RequiresStackProtector() {
297264
continue;
298265
}
299266

300-
if (Strong && HasAddressTaken(AI)) {
267+
if (Strong && PointerMayBeCaptured(AI,
268+
/* ReturnCaptures */ false,
269+
/* StoreCaptures */ true)) {
301270
++NumAddrTaken;
302271
Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
303272
ORE.emit([&]() {

llvm/test/CodeGen/X86/stack-protector.ll

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4087,8 +4087,8 @@ define i32 @IgnoreIntrinsicTest() #1 {
40874087
%1 = alloca i32, align 4
40884088
%2 = bitcast i32* %1 to i8*
40894089
call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %2)
4090-
store volatile i32 1, i32* %1, align 4
4091-
%3 = load volatile i32, i32* %1, align 4
4090+
store i32 1, i32* %1, align 4
4091+
%3 = load i32, i32* %1, align 4
40924092
%4 = mul nsw i32 %3, 42
40934093
call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %2)
40944094
ret i32 %4
Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2+
; RUN: opt -S -mtriple=x86_64-pc-linux-gnu -stack-protector < %s | FileCheck %s
3+
; Bug 42238: Test some situations missed by old, custom capture tracking.
4+
5+
define void @store_captures() #0 {
6+
; CHECK-LABEL: @store_captures(
7+
; CHECK-NEXT: entry:
8+
; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
9+
; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
10+
; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
11+
; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
12+
; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
13+
; CHECK-NEXT: [[J:%.*]] = alloca i32*, align 8
14+
; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
15+
; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
16+
; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
17+
; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
18+
; CHECK-NEXT: store i32* [[A]], i32** [[J]], align 8
19+
; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
20+
; CHECK-NEXT: [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
21+
; CHECK-NEXT: [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
22+
; CHECK-NEXT: br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
23+
; CHECK: SP_return:
24+
; CHECK-NEXT: ret void
25+
; CHECK: CallStackCheckFailBlk:
26+
; CHECK-NEXT: call void @__stack_chk_fail()
27+
; CHECK-NEXT: unreachable
28+
;
29+
entry:
30+
%retval = alloca i32, align 4
31+
%a = alloca i32, align 4
32+
%j = alloca i32*, align 8
33+
store i32 0, i32* %retval
34+
%load = load i32, i32* %a, align 4
35+
%add = add nsw i32 %load, 1
36+
store i32 %add, i32* %a, align 4
37+
store i32* %a, i32** %j, align 8
38+
ret void
39+
}
40+
41+
define i32* @return_captures() #0 {
42+
; CHECK-LABEL: @return_captures(
43+
; CHECK-NEXT: entry:
44+
; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
45+
; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
46+
; CHECK-NEXT: [[J:%.*]] = alloca i32*, align 8
47+
; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
48+
; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
49+
; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
50+
; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
51+
; CHECK-NEXT: ret i32* [[A]]
52+
;
53+
entry:
54+
%retval = alloca i32, align 4
55+
%a = alloca i32, align 4
56+
%j = alloca i32*, align 8
57+
store i32 0, i32* %retval
58+
%load = load i32, i32* %a, align 4
59+
%add = add nsw i32 %load, 1
60+
store i32 %add, i32* %a, align 4
61+
ret i32* %a
62+
}
63+
64+
define void @store_addrspacecast_captures() #0 {
65+
; CHECK-LABEL: @store_addrspacecast_captures(
66+
; CHECK-NEXT: entry:
67+
; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
68+
; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
69+
; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
70+
; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
71+
; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
72+
; CHECK-NEXT: [[J:%.*]] = alloca i32 addrspace(1)*, align 8
73+
; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
74+
; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
75+
; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
76+
; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
77+
; CHECK-NEXT: [[A_ADDRSPACECAST:%.*]] = addrspacecast i32* [[A]] to i32 addrspace(1)*
78+
; CHECK-NEXT: store i32 addrspace(1)* [[A_ADDRSPACECAST]], i32 addrspace(1)** [[J]], align 8
79+
; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
80+
; CHECK-NEXT: [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
81+
; CHECK-NEXT: [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
82+
; CHECK-NEXT: br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
83+
; CHECK: SP_return:
84+
; CHECK-NEXT: ret void
85+
; CHECK: CallStackCheckFailBlk:
86+
; CHECK-NEXT: call void @__stack_chk_fail()
87+
; CHECK-NEXT: unreachable
88+
;
89+
entry:
90+
%retval = alloca i32, align 4
91+
%a = alloca i32, align 4
92+
%j = alloca i32 addrspace(1)*, align 8
93+
store i32 0, i32* %retval
94+
%load = load i32, i32* %a, align 4
95+
%add = add nsw i32 %load, 1
96+
store i32 %add, i32* %a, align 4
97+
%a.addrspacecast = addrspacecast i32* %a to i32 addrspace(1)*
98+
store i32 addrspace(1)* %a.addrspacecast, i32 addrspace(1)** %j, align 8
99+
ret void
100+
}
101+
102+
define void @cmpxchg_captures() #0 {
103+
; CHECK-LABEL: @cmpxchg_captures(
104+
; CHECK-NEXT: entry:
105+
; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
106+
; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
107+
; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
108+
; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
109+
; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
110+
; CHECK-NEXT: [[J:%.*]] = alloca i32*, align 8
111+
; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
112+
; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
113+
; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
114+
; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
115+
; CHECK-NEXT: [[TMP0:%.*]] = cmpxchg i32** [[J]], i32* [[A]], i32* null seq_cst monotonic
116+
; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
117+
; CHECK-NEXT: [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
118+
; CHECK-NEXT: [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
119+
; CHECK-NEXT: br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
120+
; CHECK: SP_return:
121+
; CHECK-NEXT: ret void
122+
; CHECK: CallStackCheckFailBlk:
123+
; CHECK-NEXT: call void @__stack_chk_fail()
124+
; CHECK-NEXT: unreachable
125+
;
126+
entry:
127+
%retval = alloca i32, align 4
128+
%a = alloca i32, align 4
129+
%j = alloca i32*, align 8
130+
store i32 0, i32* %retval
131+
%load = load i32, i32* %a, align 4
132+
%add = add nsw i32 %load, 1
133+
store i32 %add, i32* %a, align 4
134+
135+
cmpxchg i32** %j, i32* %a, i32* null seq_cst monotonic
136+
ret void
137+
}
138+
139+
attributes #0 = { sspstrong }
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
if not 'X86' in config.root.targets:
2+
config.unsupported = True
3+

llvm/tools/opt/opt.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -517,6 +517,7 @@ int main(int argc, char **argv) {
517517
initializeDwarfEHPreparePass(Registry);
518518
initializeSafeStackLegacyPassPass(Registry);
519519
initializeSjLjEHPreparePass(Registry);
520+
initializeStackProtectorPass(Registry);
520521
initializePreISelIntrinsicLoweringLegacyPassPass(Registry);
521522
initializeGlobalMergePass(Registry);
522523
initializeIndirectBrExpandPassPass(Registry);

0 commit comments

Comments
 (0)