1 //! ARM 64-bit Instruction Set Architecture.
2
3 use crate::ir::condcodes::IntCC;
4 use crate::ir::Function;
5 use crate::isa::aarch64::settings as aarch64_settings;
6 use crate::isa::Builder as IsaBuilder;
7 use crate::machinst::{compile, MachBackend, MachCompileResult, TargetIsaAdapter, VCode};
8 use crate::result::CodegenResult;
9 use crate::settings as shared_settings;
10 use alloc::{boxed::Box, vec::Vec};
11 use core::hash::{Hash, Hasher};
12 use regalloc::{PrettyPrint, RealRegUniverse};
13 use target_lexicon::{Aarch64Architecture, Architecture, Triple};
14
15 // New backend:
16 mod abi;
17 pub(crate) mod inst;
18 mod lower;
19 mod lower_inst;
20 mod settings;
21
22 use inst::create_reg_universe;
23
24 use self::inst::EmitInfo;
25
26 /// An AArch64 backend.
27 pub struct AArch64Backend {
28 triple: Triple,
29 flags: shared_settings::Flags,
30 isa_flags: aarch64_settings::Flags,
31 reg_universe: RealRegUniverse,
32 }
33
34 impl AArch64Backend {
35 /// Create a new AArch64 backend with the given (shared) flags.
new_with_flags( triple: Triple, flags: shared_settings::Flags, isa_flags: aarch64_settings::Flags, ) -> AArch64Backend36 pub fn new_with_flags(
37 triple: Triple,
38 flags: shared_settings::Flags,
39 isa_flags: aarch64_settings::Flags,
40 ) -> AArch64Backend {
41 let reg_universe = create_reg_universe(&flags);
42 AArch64Backend {
43 triple,
44 flags,
45 isa_flags,
46 reg_universe,
47 }
48 }
49
50 /// This performs lowering to VCode, register-allocates the code, computes block layout and
51 /// finalizes branches. The result is ready for binary emission.
compile_vcode( &self, func: &Function, flags: shared_settings::Flags, ) -> CodegenResult<VCode<inst::Inst>>52 fn compile_vcode(
53 &self,
54 func: &Function,
55 flags: shared_settings::Flags,
56 ) -> CodegenResult<VCode<inst::Inst>> {
57 let emit_info = EmitInfo::new(flags.clone());
58 let abi = Box::new(abi::AArch64ABICallee::new(func, flags)?);
59 compile::compile::<AArch64Backend>(func, self, abi, emit_info)
60 }
61 }
62
63 impl MachBackend for AArch64Backend {
compile_function( &self, func: &Function, want_disasm: bool, ) -> CodegenResult<MachCompileResult>64 fn compile_function(
65 &self,
66 func: &Function,
67 want_disasm: bool,
68 ) -> CodegenResult<MachCompileResult> {
69 let flags = self.flags();
70 let vcode = self.compile_vcode(func, flags.clone())?;
71
72 let buffer = vcode.emit();
73 let frame_size = vcode.frame_size();
74 let stackslot_offsets = vcode.stackslot_offsets().clone();
75
76 let disasm = if want_disasm {
77 Some(vcode.show_rru(Some(&create_reg_universe(flags))))
78 } else {
79 None
80 };
81
82 let buffer = buffer.finish();
83
84 Ok(MachCompileResult {
85 buffer,
86 frame_size,
87 disasm,
88 value_labels_ranges: Default::default(),
89 stackslot_offsets,
90 })
91 }
92
name(&self) -> &'static str93 fn name(&self) -> &'static str {
94 "aarch64"
95 }
96
triple(&self) -> Triple97 fn triple(&self) -> Triple {
98 self.triple.clone()
99 }
100
flags(&self) -> &shared_settings::Flags101 fn flags(&self) -> &shared_settings::Flags {
102 &self.flags
103 }
104
isa_flags(&self) -> Vec<shared_settings::Value>105 fn isa_flags(&self) -> Vec<shared_settings::Value> {
106 self.isa_flags.iter().collect()
107 }
108
hash_all_flags(&self, mut hasher: &mut dyn Hasher)109 fn hash_all_flags(&self, mut hasher: &mut dyn Hasher) {
110 self.flags.hash(&mut hasher);
111 self.isa_flags.hash(&mut hasher);
112 }
113
reg_universe(&self) -> &RealRegUniverse114 fn reg_universe(&self) -> &RealRegUniverse {
115 &self.reg_universe
116 }
117
unsigned_add_overflow_condition(&self) -> IntCC118 fn unsigned_add_overflow_condition(&self) -> IntCC {
119 // Unsigned `>=`; this corresponds to the carry flag set on aarch64, which happens on
120 // overflow of an add.
121 IntCC::UnsignedGreaterThanOrEqual
122 }
123
unsigned_sub_overflow_condition(&self) -> IntCC124 fn unsigned_sub_overflow_condition(&self) -> IntCC {
125 // unsigned `<`; this corresponds to the carry flag cleared on aarch64, which happens on
126 // underflow of a subtract (aarch64 follows a carry-cleared-on-borrow convention, the
127 // opposite of x86).
128 IntCC::UnsignedLessThan
129 }
130
131 #[cfg(feature = "unwind")]
emit_unwind_info( &self, result: &MachCompileResult, kind: crate::machinst::UnwindInfoKind, ) -> CodegenResult<Option<crate::isa::unwind::UnwindInfo>>132 fn emit_unwind_info(
133 &self,
134 result: &MachCompileResult,
135 kind: crate::machinst::UnwindInfoKind,
136 ) -> CodegenResult<Option<crate::isa::unwind::UnwindInfo>> {
137 use crate::isa::unwind::UnwindInfo;
138 use crate::machinst::UnwindInfoKind;
139 Ok(match kind {
140 UnwindInfoKind::SystemV => {
141 let mapper = self::inst::unwind::systemv::RegisterMapper;
142 Some(UnwindInfo::SystemV(
143 crate::isa::unwind::systemv::create_unwind_info_from_insts(
144 &result.buffer.unwind_info[..],
145 result.buffer.data.len(),
146 &mapper,
147 )?,
148 ))
149 }
150 UnwindInfoKind::Windows => {
151 // TODO: support Windows unwind info on AArch64
152 None
153 }
154 _ => None,
155 })
156 }
157
158 #[cfg(feature = "unwind")]
create_systemv_cie(&self) -> Option<gimli::write::CommonInformationEntry>159 fn create_systemv_cie(&self) -> Option<gimli::write::CommonInformationEntry> {
160 Some(inst::unwind::systemv::create_cie())
161 }
162 }
163
164 /// Create a new `isa::Builder`.
isa_builder(triple: Triple) -> IsaBuilder165 pub fn isa_builder(triple: Triple) -> IsaBuilder {
166 assert!(triple.architecture == Architecture::Aarch64(Aarch64Architecture::Aarch64));
167 IsaBuilder {
168 triple,
169 setup: aarch64_settings::builder(),
170 constructor: |triple, shared_flags, builder| {
171 let isa_flags = aarch64_settings::Flags::new(&shared_flags, builder);
172 let backend = AArch64Backend::new_with_flags(triple, shared_flags, isa_flags);
173 Box::new(TargetIsaAdapter::new(backend))
174 },
175 }
176 }
177
178 #[cfg(test)]
179 mod test {
180 use super::*;
181 use crate::cursor::{Cursor, FuncCursor};
182 use crate::ir::types::*;
183 use crate::ir::{AbiParam, ExternalName, Function, InstBuilder, Signature};
184 use crate::isa::CallConv;
185 use crate::settings;
186 use crate::settings::Configurable;
187 use core::str::FromStr;
188 use target_lexicon::Triple;
189
190 #[test]
test_compile_function()191 fn test_compile_function() {
192 let name = ExternalName::testcase("test0");
193 let mut sig = Signature::new(CallConv::SystemV);
194 sig.params.push(AbiParam::new(I32));
195 sig.returns.push(AbiParam::new(I32));
196 let mut func = Function::with_name_signature(name, sig);
197
198 let bb0 = func.dfg.make_block();
199 let arg0 = func.dfg.append_block_param(bb0, I32);
200
201 let mut pos = FuncCursor::new(&mut func);
202 pos.insert_block(bb0);
203 let v0 = pos.ins().iconst(I32, 0x1234);
204 let v1 = pos.ins().iadd(arg0, v0);
205 pos.ins().return_(&[v1]);
206
207 let mut shared_flags_builder = settings::builder();
208 shared_flags_builder.set("opt_level", "none").unwrap();
209 let shared_flags = settings::Flags::new(shared_flags_builder);
210 let isa_flags = aarch64_settings::Flags::new(&shared_flags, aarch64_settings::builder());
211 let backend = AArch64Backend::new_with_flags(
212 Triple::from_str("aarch64").unwrap(),
213 shared_flags,
214 isa_flags,
215 );
216 let buffer = backend.compile_function(&mut func, false).unwrap().buffer;
217 let code = &buffer.data[..];
218
219 // stp x29, x30, [sp, #-16]!
220 // mov x29, sp
221 // mov x1, #0x1234
222 // add w0, w0, w1
223 // ldp x29, x30, [sp], #16
224 // ret
225 let golden = vec![
226 0xfd, 0x7b, 0xbf, 0xa9, 0xfd, 0x03, 0x00, 0x91, 0x81, 0x46, 0x82, 0xd2, 0x00, 0x00,
227 0x01, 0x0b, 0xfd, 0x7b, 0xc1, 0xa8, 0xc0, 0x03, 0x5f, 0xd6,
228 ];
229
230 assert_eq!(code, &golden[..]);
231 }
232
233 #[test]
test_branch_lowering()234 fn test_branch_lowering() {
235 let name = ExternalName::testcase("test0");
236 let mut sig = Signature::new(CallConv::SystemV);
237 sig.params.push(AbiParam::new(I32));
238 sig.returns.push(AbiParam::new(I32));
239 let mut func = Function::with_name_signature(name, sig);
240
241 let bb0 = func.dfg.make_block();
242 let arg0 = func.dfg.append_block_param(bb0, I32);
243 let bb1 = func.dfg.make_block();
244 let bb2 = func.dfg.make_block();
245 let bb3 = func.dfg.make_block();
246
247 let mut pos = FuncCursor::new(&mut func);
248 pos.insert_block(bb0);
249 let v0 = pos.ins().iconst(I32, 0x1234);
250 let v1 = pos.ins().iadd(arg0, v0);
251 pos.ins().brnz(v1, bb1, &[]);
252 pos.ins().jump(bb2, &[]);
253 pos.insert_block(bb1);
254 pos.ins().brnz(v1, bb2, &[]);
255 pos.ins().jump(bb3, &[]);
256 pos.insert_block(bb2);
257 let v2 = pos.ins().iadd(v1, v0);
258 pos.ins().brnz(v2, bb2, &[]);
259 pos.ins().jump(bb1, &[]);
260 pos.insert_block(bb3);
261 let v3 = pos.ins().isub(v1, v0);
262 pos.ins().return_(&[v3]);
263
264 let mut shared_flags_builder = settings::builder();
265 shared_flags_builder.set("opt_level", "none").unwrap();
266 let shared_flags = settings::Flags::new(shared_flags_builder);
267 let isa_flags = aarch64_settings::Flags::new(&shared_flags, aarch64_settings::builder());
268 let backend = AArch64Backend::new_with_flags(
269 Triple::from_str("aarch64").unwrap(),
270 shared_flags,
271 isa_flags,
272 );
273 let result = backend
274 .compile_function(&mut func, /* want_disasm = */ false)
275 .unwrap();
276 let code = &result.buffer.data[..];
277
278 // stp x29, x30, [sp, #-16]!
279 // mov x29, sp
280 // mov x1, #0x1234 // #4660
281 // add w0, w0, w1
282 // mov w1, w0
283 // cbnz x1, 0x28
284 // mov x1, #0x1234 // #4660
285 // add w1, w0, w1
286 // mov w1, w1
287 // cbnz x1, 0x18
288 // mov w1, w0
289 // cbnz x1, 0x18
290 // mov x1, #0x1234 // #4660
291 // sub w0, w0, w1
292 // ldp x29, x30, [sp], #16
293 // ret
294 let golden = vec![
295 253, 123, 191, 169, 253, 3, 0, 145, 129, 70, 130, 210, 0, 0, 1, 11, 225, 3, 0, 42, 161,
296 0, 0, 181, 129, 70, 130, 210, 1, 0, 1, 11, 225, 3, 1, 42, 161, 255, 255, 181, 225, 3,
297 0, 42, 97, 255, 255, 181, 129, 70, 130, 210, 0, 0, 1, 75, 253, 123, 193, 168, 192, 3,
298 95, 214,
299 ];
300
301 assert_eq!(code, &golden[..]);
302 }
303 }
304