1*0a6a1f1dSLionel Sambuc; RUN: llc -verify-machineinstrs < %s -mtriple=aarch64-none-linux-gnu | FileCheck %s --check-prefix=CHECK
2f4a2713aSLionel Sambuc
3f4a2713aSLionel Sambuc@var32 = global i32 0
4f4a2713aSLionel Sambuc@var64 = global i64 0
5f4a2713aSLionel Sambuc
6f4a2713aSLionel Sambucdefine void @test_extendb(i8 %var) {
7f4a2713aSLionel Sambuc; CHECK-LABEL: test_extendb:
8f4a2713aSLionel Sambuc
9f4a2713aSLionel Sambuc  %sxt32 = sext i8 %var to i32
10f4a2713aSLionel Sambuc  store volatile i32 %sxt32, i32* @var32
11f4a2713aSLionel Sambuc; CHECK: sxtb {{w[0-9]+}}, {{w[0-9]+}}
12f4a2713aSLionel Sambuc
13f4a2713aSLionel Sambuc  %sxt64 = sext i8 %var to i64
14f4a2713aSLionel Sambuc  store volatile i64 %sxt64, i64* @var64
15f4a2713aSLionel Sambuc; CHECK: sxtb {{x[0-9]+}}, {{w[0-9]+}}
16f4a2713aSLionel Sambuc
17f4a2713aSLionel Sambuc; N.b. this doesn't actually produce a bitfield instruction at the
18f4a2713aSLionel Sambuc; moment, but it's still a good test to have and the semantics are
19f4a2713aSLionel Sambuc; correct.
20f4a2713aSLionel Sambuc  %uxt32 = zext i8 %var to i32
21f4a2713aSLionel Sambuc  store volatile i32 %uxt32, i32* @var32
22f4a2713aSLionel Sambuc; CHECK: and {{w[0-9]+}}, {{w[0-9]+}}, #0xff
23f4a2713aSLionel Sambuc
24f4a2713aSLionel Sambuc  %uxt64 = zext i8 %var to i64
25f4a2713aSLionel Sambuc  store volatile i64 %uxt64, i64* @var64
26*0a6a1f1dSLionel Sambuc; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xff
27f4a2713aSLionel Sambuc  ret void
28f4a2713aSLionel Sambuc}
29f4a2713aSLionel Sambuc
30f4a2713aSLionel Sambucdefine void @test_extendh(i16 %var) {
31f4a2713aSLionel Sambuc; CHECK-LABEL: test_extendh:
32f4a2713aSLionel Sambuc
33f4a2713aSLionel Sambuc  %sxt32 = sext i16 %var to i32
34f4a2713aSLionel Sambuc  store volatile i32 %sxt32, i32* @var32
35f4a2713aSLionel Sambuc; CHECK: sxth {{w[0-9]+}}, {{w[0-9]+}}
36f4a2713aSLionel Sambuc
37f4a2713aSLionel Sambuc  %sxt64 = sext i16 %var to i64
38f4a2713aSLionel Sambuc  store volatile i64 %sxt64, i64* @var64
39f4a2713aSLionel Sambuc; CHECK: sxth {{x[0-9]+}}, {{w[0-9]+}}
40f4a2713aSLionel Sambuc
41f4a2713aSLionel Sambuc; N.b. this doesn't actually produce a bitfield instruction at the
42f4a2713aSLionel Sambuc; moment, but it's still a good test to have and the semantics are
43f4a2713aSLionel Sambuc; correct.
44f4a2713aSLionel Sambuc  %uxt32 = zext i16 %var to i32
45f4a2713aSLionel Sambuc  store volatile i32 %uxt32, i32* @var32
46f4a2713aSLionel Sambuc; CHECK: and {{w[0-9]+}}, {{w[0-9]+}}, #0xffff
47f4a2713aSLionel Sambuc
48f4a2713aSLionel Sambuc  %uxt64 = zext i16 %var to i64
49f4a2713aSLionel Sambuc  store volatile i64 %uxt64, i64* @var64
50*0a6a1f1dSLionel Sambuc; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffff
51f4a2713aSLionel Sambuc  ret void
52f4a2713aSLionel Sambuc}
53f4a2713aSLionel Sambuc
54f4a2713aSLionel Sambucdefine void @test_extendw(i32 %var) {
55f4a2713aSLionel Sambuc; CHECK-LABEL: test_extendw:
56f4a2713aSLionel Sambuc
57f4a2713aSLionel Sambuc  %sxt64 = sext i32 %var to i64
58f4a2713aSLionel Sambuc  store volatile i64 %sxt64, i64* @var64
59f4a2713aSLionel Sambuc; CHECK: sxtw {{x[0-9]+}}, {{w[0-9]+}}
60f4a2713aSLionel Sambuc
61f4a2713aSLionel Sambuc  %uxt64 = zext i32 %var to i64
62f4a2713aSLionel Sambuc  store volatile i64 %uxt64, i64* @var64
63*0a6a1f1dSLionel Sambuc; CHECK: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #32
64f4a2713aSLionel Sambuc  ret void
65f4a2713aSLionel Sambuc}
66f4a2713aSLionel Sambuc
67f4a2713aSLionel Sambucdefine void @test_shifts(i32 %val32, i64 %val64) {
68f4a2713aSLionel Sambuc; CHECK-LABEL: test_shifts:
69f4a2713aSLionel Sambuc
70f4a2713aSLionel Sambuc  %shift1 = ashr i32 %val32, 31
71f4a2713aSLionel Sambuc  store volatile i32 %shift1, i32* @var32
72f4a2713aSLionel Sambuc; CHECK: asr {{w[0-9]+}}, {{w[0-9]+}}, #31
73f4a2713aSLionel Sambuc
74f4a2713aSLionel Sambuc  %shift2 = lshr i32 %val32, 8
75f4a2713aSLionel Sambuc  store volatile i32 %shift2, i32* @var32
76f4a2713aSLionel Sambuc; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, #8
77f4a2713aSLionel Sambuc
78f4a2713aSLionel Sambuc  %shift3 = shl i32 %val32, 1
79f4a2713aSLionel Sambuc  store volatile i32 %shift3, i32* @var32
80f4a2713aSLionel Sambuc; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, #1
81f4a2713aSLionel Sambuc
82f4a2713aSLionel Sambuc  %shift4 = ashr i64 %val64, 31
83f4a2713aSLionel Sambuc  store volatile i64 %shift4, i64* @var64
84f4a2713aSLionel Sambuc; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, #31
85f4a2713aSLionel Sambuc
86f4a2713aSLionel Sambuc  %shift5 = lshr i64 %val64, 8
87f4a2713aSLionel Sambuc  store volatile i64 %shift5, i64* @var64
88f4a2713aSLionel Sambuc; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, #8
89f4a2713aSLionel Sambuc
90f4a2713aSLionel Sambuc  %shift6 = shl i64 %val64, 63
91f4a2713aSLionel Sambuc  store volatile i64 %shift6, i64* @var64
92f4a2713aSLionel Sambuc; CHECK: lsl {{x[0-9]+}}, {{x[0-9]+}}, #63
93f4a2713aSLionel Sambuc
94f4a2713aSLionel Sambuc  %shift7 = ashr i64 %val64, 63
95f4a2713aSLionel Sambuc  store volatile i64 %shift7, i64* @var64
96f4a2713aSLionel Sambuc; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, #63
97f4a2713aSLionel Sambuc
98f4a2713aSLionel Sambuc  %shift8 = lshr i64 %val64, 63
99f4a2713aSLionel Sambuc  store volatile i64 %shift8, i64* @var64
100f4a2713aSLionel Sambuc; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, #63
101f4a2713aSLionel Sambuc
102f4a2713aSLionel Sambuc  %shift9 = lshr i32 %val32, 31
103f4a2713aSLionel Sambuc  store volatile i32 %shift9, i32* @var32
104f4a2713aSLionel Sambuc; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, #31
105f4a2713aSLionel Sambuc
106f4a2713aSLionel Sambuc  %shift10 = shl i32 %val32, 31
107f4a2713aSLionel Sambuc  store volatile i32 %shift10, i32* @var32
108f4a2713aSLionel Sambuc; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, #31
109f4a2713aSLionel Sambuc
110f4a2713aSLionel Sambuc  ret void
111f4a2713aSLionel Sambuc}
112f4a2713aSLionel Sambuc
113f4a2713aSLionel Sambuc; LLVM can produce in-register extensions taking place entirely with
114f4a2713aSLionel Sambuc; 64-bit registers too.
115f4a2713aSLionel Sambucdefine void @test_sext_inreg_64(i64 %in) {
116f4a2713aSLionel Sambuc; CHECK-LABEL: test_sext_inreg_64:
117f4a2713aSLionel Sambuc
118f4a2713aSLionel Sambuc; i1 doesn't have an official alias, but crops up and is handled by
119f4a2713aSLionel Sambuc; the bitfield ops.
120f4a2713aSLionel Sambuc  %trunc_i1 = trunc i64 %in to i1
121f4a2713aSLionel Sambuc  %sext_i1 = sext i1 %trunc_i1 to i64
122f4a2713aSLionel Sambuc  store volatile i64 %sext_i1, i64* @var64
123f4a2713aSLionel Sambuc; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #1
124f4a2713aSLionel Sambuc
125f4a2713aSLionel Sambuc  %trunc_i8 = trunc i64 %in to i8
126f4a2713aSLionel Sambuc  %sext_i8 = sext i8 %trunc_i8 to i64
127f4a2713aSLionel Sambuc  store volatile i64 %sext_i8, i64* @var64
128f4a2713aSLionel Sambuc; CHECK: sxtb {{x[0-9]+}}, {{w[0-9]+}}
129f4a2713aSLionel Sambuc
130f4a2713aSLionel Sambuc  %trunc_i16 = trunc i64 %in to i16
131f4a2713aSLionel Sambuc  %sext_i16 = sext i16 %trunc_i16 to i64
132f4a2713aSLionel Sambuc  store volatile i64 %sext_i16, i64* @var64
133f4a2713aSLionel Sambuc; CHECK: sxth {{x[0-9]+}}, {{w[0-9]+}}
134f4a2713aSLionel Sambuc
135f4a2713aSLionel Sambuc  %trunc_i32 = trunc i64 %in to i32
136f4a2713aSLionel Sambuc  %sext_i32 = sext i32 %trunc_i32 to i64
137f4a2713aSLionel Sambuc  store volatile i64 %sext_i32, i64* @var64
138f4a2713aSLionel Sambuc; CHECK: sxtw {{x[0-9]+}}, {{w[0-9]+}}
139f4a2713aSLionel Sambuc  ret void
140f4a2713aSLionel Sambuc}
141f4a2713aSLionel Sambuc
142f4a2713aSLionel Sambuc; These instructions don't actually select to official bitfield
143f4a2713aSLionel Sambuc; operations, but it's important that we select them somehow:
144f4a2713aSLionel Sambucdefine void @test_zext_inreg_64(i64 %in) {
145f4a2713aSLionel Sambuc; CHECK-LABEL: test_zext_inreg_64:
146f4a2713aSLionel Sambuc
147f4a2713aSLionel Sambuc  %trunc_i8 = trunc i64 %in to i8
148f4a2713aSLionel Sambuc  %zext_i8 = zext i8 %trunc_i8 to i64
149f4a2713aSLionel Sambuc  store volatile i64 %zext_i8, i64* @var64
150f4a2713aSLionel Sambuc; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xff
151f4a2713aSLionel Sambuc
152f4a2713aSLionel Sambuc  %trunc_i16 = trunc i64 %in to i16
153f4a2713aSLionel Sambuc  %zext_i16 = zext i16 %trunc_i16 to i64
154f4a2713aSLionel Sambuc  store volatile i64 %zext_i16, i64* @var64
155f4a2713aSLionel Sambuc; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffff
156f4a2713aSLionel Sambuc
157f4a2713aSLionel Sambuc  %trunc_i32 = trunc i64 %in to i32
158f4a2713aSLionel Sambuc  %zext_i32 = zext i32 %trunc_i32 to i64
159f4a2713aSLionel Sambuc  store volatile i64 %zext_i32, i64* @var64
160f4a2713aSLionel Sambuc; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffffffff
161f4a2713aSLionel Sambuc
162f4a2713aSLionel Sambuc  ret void
163f4a2713aSLionel Sambuc}
164f4a2713aSLionel Sambuc
165f4a2713aSLionel Sambucdefine i64 @test_sext_inreg_from_32(i32 %in) {
166f4a2713aSLionel Sambuc; CHECK-LABEL: test_sext_inreg_from_32:
167f4a2713aSLionel Sambuc
168f4a2713aSLionel Sambuc  %small = trunc i32 %in to i1
169f4a2713aSLionel Sambuc  %ext = sext i1 %small to i64
170f4a2713aSLionel Sambuc
171f4a2713aSLionel Sambuc  ; Different registers are of course, possible, though suboptimal. This is
172f4a2713aSLionel Sambuc  ; making sure that a 64-bit "(sext_inreg (anyext GPR32), i1)" uses the 64-bit
173f4a2713aSLionel Sambuc  ; sbfx rather than just 32-bits.
174f4a2713aSLionel Sambuc; CHECK: sbfx x0, x0, #0, #1
175f4a2713aSLionel Sambuc  ret i64 %ext
176f4a2713aSLionel Sambuc}
177f4a2713aSLionel Sambuc
178f4a2713aSLionel Sambuc
179f4a2713aSLionel Sambucdefine i32 @test_ubfx32(i32* %addr) {
180f4a2713aSLionel Sambuc; CHECK-LABEL: test_ubfx32:
181f4a2713aSLionel Sambuc; CHECK: ubfx {{w[0-9]+}}, {{w[0-9]+}}, #23, #3
182f4a2713aSLionel Sambuc
183f4a2713aSLionel Sambuc   %fields = load i32* %addr
184f4a2713aSLionel Sambuc   %shifted = lshr i32 %fields, 23
185f4a2713aSLionel Sambuc   %masked = and i32 %shifted, 7
186f4a2713aSLionel Sambuc   ret i32 %masked
187f4a2713aSLionel Sambuc}
188f4a2713aSLionel Sambuc
189f4a2713aSLionel Sambucdefine i64 @test_ubfx64(i64* %addr) {
190f4a2713aSLionel Sambuc; CHECK-LABEL: test_ubfx64:
191f4a2713aSLionel Sambuc; CHECK: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #25, #10
192f4a2713aSLionel Sambuc   %fields = load i64* %addr
193f4a2713aSLionel Sambuc   %shifted = lshr i64 %fields, 25
194f4a2713aSLionel Sambuc   %masked = and i64 %shifted, 1023
195f4a2713aSLionel Sambuc   ret i64 %masked
196f4a2713aSLionel Sambuc}
197f4a2713aSLionel Sambuc
198f4a2713aSLionel Sambucdefine i32 @test_sbfx32(i32* %addr) {
199f4a2713aSLionel Sambuc; CHECK-LABEL: test_sbfx32:
200f4a2713aSLionel Sambuc; CHECK: sbfx {{w[0-9]+}}, {{w[0-9]+}}, #6, #3
201f4a2713aSLionel Sambuc
202f4a2713aSLionel Sambuc   %fields = load i32* %addr
203f4a2713aSLionel Sambuc   %shifted = shl i32 %fields, 23
204f4a2713aSLionel Sambuc   %extended = ashr i32 %shifted, 29
205f4a2713aSLionel Sambuc   ret i32 %extended
206f4a2713aSLionel Sambuc}
207f4a2713aSLionel Sambuc
208f4a2713aSLionel Sambucdefine i64 @test_sbfx64(i64* %addr) {
209f4a2713aSLionel Sambuc; CHECK-LABEL: test_sbfx64:
210f4a2713aSLionel Sambuc; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #63
211f4a2713aSLionel Sambuc
212f4a2713aSLionel Sambuc   %fields = load i64* %addr
213f4a2713aSLionel Sambuc   %shifted = shl i64 %fields, 1
214f4a2713aSLionel Sambuc   %extended = ashr i64 %shifted, 1
215f4a2713aSLionel Sambuc   ret i64 %extended
216f4a2713aSLionel Sambuc}
217