1; RUN: opt -instcombine -S < %s | FileCheck %s
2
3target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128"
4target triple = "x86_64-apple-macosx10.14.0"
5
6; Function Attrs: nounwind ssp uwtable
7define i64 @weird_identity_but_ok(i64 %sz) {
8entry:
9  %call = tail call i8* @malloc(i64 %sz)
10  %calc_size = tail call i64 @llvm.objectsize.i64.p0i8(i8* %call, i1 false, i1 true, i1 true)
11  tail call void @free(i8* %call)
12  ret i64 %calc_size
13}
14
15; CHECK:      define i64 @weird_identity_but_ok(i64 %sz)
16; CHECK-NEXT: entry:
17; CHECK:   ret i64 %sz
18; CHECK-NEXT: }
19
20define i64 @phis_are_neat(i1 %which) {
21entry:
22  br i1 %which, label %first_label, label %second_label
23
24first_label:
25  %first_call = call i8* @malloc(i64 10)
26  br label %join_label
27
28second_label:
29  %second_call = call i8* @malloc(i64 30)
30  br label %join_label
31
32join_label:
33  %joined = phi i8* [ %first_call, %first_label ], [ %second_call, %second_label ]
34  %calc_size = tail call i64 @llvm.objectsize.i64.p0i8(i8* %joined, i1 false, i1 true, i1 true)
35  ret i64 %calc_size
36}
37
38; CHECK:      %0 = phi i64 [ 10, %first_label ], [ 30, %second_label ]
39; CHECK-NEXT: ret i64 %0
40
41define i64 @internal_pointer(i64 %sz) {
42entry:
43  %ptr = call i8* @malloc(i64 %sz)
44  %ptr2 = getelementptr inbounds i8, i8* %ptr, i32 2
45  %calc_size = call i64 @llvm.objectsize.i64.p0i8(i8* %ptr2, i1 false, i1 true, i1 true)
46  ret i64 %calc_size
47}
48
49; CHECK:      define i64 @internal_pointer(i64 %sz)
50; CHECK-NEXT: entry:
51; CHECK-NEXT:   %0 = call i64 @llvm.usub.sat.i64(i64 %sz, i64 2)
52; CHECK-NEXT:   ret i64 %0
53; CHECK-NEXT: }
54
55define i64 @uses_nullptr_no_fold() {
56entry:
57  %res = call i64 @llvm.objectsize.i64.p0i8(i8* null, i1 false, i1 true, i1 true)
58  ret i64 %res
59}
60
61; CHECK: %res = call i64 @llvm.objectsize.i64.p0i8(i8* null, i1 false, i1 true, i1 true)
62
63define i64 @uses_nullptr_fold() {
64entry:
65  ; NOTE: the third parameter to this call is false, unlike above.
66  %res = call i64 @llvm.objectsize.i64.p0i8(i8* null, i1 false, i1 false, i1 true)
67  ret i64 %res
68}
69
70; CHECK: ret i64 0
71
72@d = common global i8 0, align 1
73@c = common global i32 0, align 4
74
75; Function Attrs: nounwind
76define void @f() {
77entry:
78  %.pr = load i32, i32* @c, align 4
79  %tobool4 = icmp eq i32 %.pr, 0
80  br i1 %tobool4, label %for.end, label %for.body
81
82for.body:                                         ; preds = %entry, %for.body
83  %dp.05 = phi i8* [ %add.ptr, %for.body ], [ @d, %entry ]
84  %0 = tail call i64 @llvm.objectsize.i64.p0i8(i8* %dp.05, i1 false, i1 true, i1 true)
85  %conv = trunc i64 %0 to i32
86  tail call void @bury(i32 %conv) #3
87  %1 = load i32, i32* @c, align 4
88  %idx.ext = sext i32 %1 to i64
89  %add.ptr.offs = add i64 %idx.ext, 0
90  %2 = add i64 undef, %add.ptr.offs
91  %add.ptr = getelementptr inbounds i8, i8* %dp.05, i64 %idx.ext
92  %add = shl nsw i32 %1, 1
93  store i32 %add, i32* @c, align 4
94  %tobool = icmp eq i32 %1, 0
95  br i1 %tobool, label %for.end, label %for.body
96
97for.end:                                          ; preds = %for.body, %entry
98  ret void
99}
100
101; CHECK:   define void @f()
102; CHECK:     call i64 @llvm.objectsize.i64.p0i8(
103
104define void @bdos_cmpm1(i64 %alloc) {
105entry:
106  %obj = call i8* @malloc(i64 %alloc)
107  %objsize = call i64 @llvm.objectsize.i64.p0i8(i8* %obj, i1 0, i1 0, i1 1)
108  %cmp.not = icmp eq i64 %objsize, -1
109  br i1 %cmp.not, label %if.else, label %if.then
110
111if.then:
112  call void @fortified_chk(i8* %obj, i64 %objsize)
113  br label %if.end
114
115if.else:
116  call void @unfortified(i8* %obj, i64 %objsize)
117  br label %if.end
118
119if.end:                                           ; preds = %if.else, %if.then
120  ret void
121}
122
123; CHECK:  define void @bdos_cmpm1(
124; CHECK:    [[TMP:%.*]] = icmp ne i64 %alloc, -1
125; CHECK-NEXT:    call void @llvm.assume(i1 [[TMP]])
126; CHECK-NEXT:    br i1 false, label %if.else, label %if.then
127; CHECK:    call void @fortified_chk(i8* %obj, i64 %alloc)
128
129define void @bdos_cmpm1_expr(i64 %alloc, i64 %part) {
130entry:
131  %sz = udiv i64 %alloc, %part
132  %obj = call i8* @malloc(i64 %sz)
133  %objsize = call i64 @llvm.objectsize.i64.p0i8(i8* %obj, i1 0, i1 0, i1 1)
134  %cmp.not = icmp eq i64 %objsize, -1
135  br i1 %cmp.not, label %if.else, label %if.then
136
137if.then:
138  call void @fortified_chk(i8* %obj, i64 %objsize)
139  br label %if.end
140
141if.else:
142  call void @unfortified(i8* %obj, i64 %objsize)
143  br label %if.end
144
145if.end:                                           ; preds = %if.else, %if.then
146  ret void
147}
148
149; CHECK:  define void @bdos_cmpm1_expr(
150; CHECK:    [[TMP:%.*]] = icmp ne i64 [[SZ:%.*]], -1
151; CHECK-NEXT:    call void @llvm.assume(i1 [[TMP]])
152; CHECK-NEXT:    br i1 false, label %if.else, label %if.then
153; CHECK:    call void @fortified_chk(i8* %obj, i64 [[SZ]])
154
155declare void @bury(i32) local_unnamed_addr #2
156
157; Function Attrs: nounwind allocsize(0)
158declare i8* @malloc(i64)
159
160declare i8* @get_unknown_buffer()
161
162; Function Attrs: nounwind
163declare void @free(i8* nocapture)
164
165; Function Attrs: nounwind readnone speculatable
166declare i64 @llvm.objectsize.i64.p0i8(i8*, i1, i1, i1)
167
168declare void @fortified_chk(i8*, i64)
169
170declare void @unfortified(i8*, i64)
171