1 //! Implementation of the 32-bit ARM ABI.
2
3 use crate::ir;
4 use crate::ir::types::*;
5 use crate::isa;
6 use crate::isa::arm32::inst::*;
7 use crate::machinst::*;
8 use crate::settings;
9 use crate::{CodegenError, CodegenResult};
10 use alloc::boxed::Box;
11 use alloc::vec::Vec;
12 use regalloc::{RealReg, Reg, RegClass, Set, Writable};
13 use smallvec::{smallvec, SmallVec};
14
15 /// Support for the ARM ABI from the callee side (within a function body).
16 pub(crate) type Arm32ABICallee = ABICalleeImpl<Arm32MachineDeps>;
17
18 /// Support for the ARM ABI from the caller side (at a callsite).
19 pub(crate) type Arm32ABICaller = ABICallerImpl<Arm32MachineDeps>;
20
21 /// This is the limit for the size of argument and return-value areas on the
22 /// stack. We place a reasonable limit here to avoid integer overflow issues
23 /// with 32-bit arithmetic: for now, 128 MB.
24 static STACK_ARG_RET_SIZE_LIMIT: u64 = 128 * 1024 * 1024;
25
26 /// ARM-specific ABI behavior. This struct just serves as an implementation
27 /// point for the trait; it is never actually instantiated.
28 pub(crate) struct Arm32MachineDeps;
29
30 impl Into<AMode> for StackAMode {
into(self) -> AMode31 fn into(self) -> AMode {
32 match self {
33 StackAMode::FPOffset(off, ty) => AMode::FPOffset(off, ty),
34 StackAMode::NominalSPOffset(off, ty) => AMode::NominalSPOffset(off, ty),
35 StackAMode::SPOffset(off, ty) => AMode::SPOffset(off, ty),
36 }
37 }
38 }
39
40 impl ABIMachineSpec for Arm32MachineDeps {
41 type I = Inst;
42
word_bits() -> u3243 fn word_bits() -> u32 {
44 32
45 }
46
47 /// Return required stack alignment in bytes.
stack_align(_call_conv: isa::CallConv) -> u3248 fn stack_align(_call_conv: isa::CallConv) -> u32 {
49 8
50 }
51
compute_arg_locs( _call_conv: isa::CallConv, _flags: &settings::Flags, params: &[ir::AbiParam], args_or_rets: ArgsOrRets, add_ret_area_ptr: bool, ) -> CodegenResult<(Vec<ABIArg>, i64, Option<usize>)>52 fn compute_arg_locs(
53 _call_conv: isa::CallConv,
54 _flags: &settings::Flags,
55 params: &[ir::AbiParam],
56 args_or_rets: ArgsOrRets,
57 add_ret_area_ptr: bool,
58 ) -> CodegenResult<(Vec<ABIArg>, i64, Option<usize>)> {
59 let mut next_rreg = 0;
60 let mut next_stack: u64 = 0;
61 let mut ret = vec![];
62 let mut stack_args = vec![];
63
64 let max_reg_val = 4; // r0-r3
65
66 for i in 0..params.len() {
67 let param = params[i];
68
69 // Validate "purpose".
70 match ¶m.purpose {
71 &ir::ArgumentPurpose::VMContext
72 | &ir::ArgumentPurpose::Normal
73 | &ir::ArgumentPurpose::StackLimit
74 | &ir::ArgumentPurpose::SignatureId => {}
75 _ => panic!(
76 "Unsupported argument purpose {:?} in signature: {:?}",
77 param.purpose, params
78 ),
79 }
80 assert!(param.value_type.bits() <= 32);
81
82 if next_rreg < max_reg_val {
83 let reg = rreg(next_rreg);
84
85 ret.push(ABIArg::reg(
86 reg.to_real_reg(),
87 param.value_type,
88 param.extension,
89 param.purpose,
90 ));
91 next_rreg += 1;
92 } else {
93 // Arguments are stored on stack in reversed order.
94 // https://static.docs.arm.com/ihi0042/g/aapcs32.pdf
95
96 // Stack offset is not known yet. Store param info for later.
97 stack_args.push((param.value_type, param.extension, param.purpose));
98 next_stack += 4;
99 }
100 }
101
102 let extra_arg = if add_ret_area_ptr {
103 debug_assert!(args_or_rets == ArgsOrRets::Args);
104 if next_rreg < max_reg_val {
105 ret.push(ABIArg::reg(
106 rreg(next_rreg).to_real_reg(),
107 I32,
108 ir::ArgumentExtension::None,
109 ir::ArgumentPurpose::Normal,
110 ));
111 } else {
112 stack_args.push((
113 I32,
114 ir::ArgumentExtension::None,
115 ir::ArgumentPurpose::Normal,
116 ));
117 next_stack += 4;
118 }
119 Some(ret.len() - 1)
120 } else {
121 None
122 };
123
124 // Now we can assign proper stack offsets to params.
125 let max_stack = next_stack;
126 for (ty, ext, purpose) in stack_args.into_iter().rev() {
127 next_stack -= 4;
128 ret.push(ABIArg::stack(
129 (max_stack - next_stack) as i64,
130 ty,
131 ext,
132 purpose,
133 ));
134 }
135 assert_eq!(next_stack, 0);
136
137 next_stack = (next_stack + 7) & !7;
138
139 // To avoid overflow issues, limit the arg/return size to something
140 // reasonable -- here, 128 MB.
141 if next_stack > STACK_ARG_RET_SIZE_LIMIT {
142 return Err(CodegenError::ImplLimitExceeded);
143 }
144
145 Ok((ret, next_stack as i64, extra_arg))
146 }
147
fp_to_arg_offset(_call_conv: isa::CallConv, _flags: &settings::Flags) -> i64148 fn fp_to_arg_offset(_call_conv: isa::CallConv, _flags: &settings::Flags) -> i64 {
149 8 // frame pointer and link register
150 }
151
gen_load_stack(mem: StackAMode, into_reg: Writable<Reg>, ty: Type) -> Inst152 fn gen_load_stack(mem: StackAMode, into_reg: Writable<Reg>, ty: Type) -> Inst {
153 Inst::gen_load(into_reg, mem.into(), ty)
154 }
155
gen_store_stack(mem: StackAMode, from_reg: Reg, ty: Type) -> Inst156 fn gen_store_stack(mem: StackAMode, from_reg: Reg, ty: Type) -> Inst {
157 Inst::gen_store(from_reg, mem.into(), ty)
158 }
159
gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Inst160 fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Inst {
161 Inst::gen_move(to_reg, from_reg, ty)
162 }
163
gen_extend( to_reg: Writable<Reg>, from_reg: Reg, is_signed: bool, from_bits: u8, to_bits: u8, ) -> Inst164 fn gen_extend(
165 to_reg: Writable<Reg>,
166 from_reg: Reg,
167 is_signed: bool,
168 from_bits: u8,
169 to_bits: u8,
170 ) -> Inst {
171 assert!(to_bits == 32);
172 assert!(from_bits < 32);
173 Inst::Extend {
174 rd: to_reg,
175 rm: from_reg,
176 signed: is_signed,
177 from_bits,
178 }
179 }
180
gen_ret() -> Inst181 fn gen_ret() -> Inst {
182 Inst::Ret
183 }
184
gen_epilogue_placeholder() -> Inst185 fn gen_epilogue_placeholder() -> Inst {
186 Inst::EpiloguePlaceholder
187 }
188
gen_add_imm(into_reg: Writable<Reg>, from_reg: Reg, imm: u32) -> SmallInstVec<Inst>189 fn gen_add_imm(into_reg: Writable<Reg>, from_reg: Reg, imm: u32) -> SmallInstVec<Inst> {
190 let mut insts = SmallVec::new();
191
192 if let Some(imm12) = UImm12::maybe_from_i64(imm as i64) {
193 insts.push(Inst::AluRRImm12 {
194 alu_op: ALUOp::Add,
195 rd: into_reg,
196 rn: from_reg,
197 imm12,
198 });
199 } else {
200 let scratch2 = writable_tmp2_reg();
201 insts.extend(Inst::load_constant(scratch2, imm));
202 insts.push(Inst::AluRRRShift {
203 alu_op: ALUOp::Add,
204 rd: into_reg,
205 rn: from_reg,
206 rm: scratch2.to_reg(),
207 shift: None,
208 });
209 }
210 insts
211 }
212
gen_stack_lower_bound_trap(limit_reg: Reg) -> SmallInstVec<Inst>213 fn gen_stack_lower_bound_trap(limit_reg: Reg) -> SmallInstVec<Inst> {
214 let mut insts = SmallVec::new();
215 insts.push(Inst::Cmp {
216 rn: sp_reg(),
217 rm: limit_reg,
218 });
219 insts.push(Inst::TrapIf {
220 trap_info: ir::TrapCode::StackOverflow,
221 // Here `Lo` == "less than" when interpreting the two
222 // operands as unsigned integers.
223 cond: Cond::Lo,
224 });
225 insts
226 }
227
gen_get_stack_addr(mem: StackAMode, into_reg: Writable<Reg>, _ty: Type) -> Inst228 fn gen_get_stack_addr(mem: StackAMode, into_reg: Writable<Reg>, _ty: Type) -> Inst {
229 let mem = mem.into();
230 Inst::LoadAddr { rd: into_reg, mem }
231 }
232
get_stacklimit_reg() -> Reg233 fn get_stacklimit_reg() -> Reg {
234 ip_reg()
235 }
236
gen_load_base_offset(into_reg: Writable<Reg>, base: Reg, offset: i32, ty: Type) -> Inst237 fn gen_load_base_offset(into_reg: Writable<Reg>, base: Reg, offset: i32, ty: Type) -> Inst {
238 let mem = AMode::RegOffset(base, offset as i64);
239 Inst::gen_load(into_reg, mem, ty)
240 }
241
gen_store_base_offset(base: Reg, offset: i32, from_reg: Reg, ty: Type) -> Inst242 fn gen_store_base_offset(base: Reg, offset: i32, from_reg: Reg, ty: Type) -> Inst {
243 let mem = AMode::RegOffset(base, offset as i64);
244 Inst::gen_store(from_reg, mem, ty)
245 }
246
gen_sp_reg_adjust(amount: i32) -> SmallInstVec<Inst>247 fn gen_sp_reg_adjust(amount: i32) -> SmallInstVec<Inst> {
248 let mut ret = SmallVec::new();
249
250 if amount == 0 {
251 return ret;
252 }
253 let (amount, is_sub) = if amount > 0 {
254 (amount, false)
255 } else {
256 (-amount, true)
257 };
258
259 let alu_op = if is_sub { ALUOp::Sub } else { ALUOp::Add };
260
261 if let Some(imm12) = UImm12::maybe_from_i64(amount as i64) {
262 ret.push(Inst::AluRRImm12 {
263 alu_op,
264 rd: writable_sp_reg(),
265 rn: sp_reg(),
266 imm12,
267 });
268 } else {
269 let tmp = writable_ip_reg();
270 ret.extend(Inst::load_constant(tmp, amount as u32));
271 ret.push(Inst::AluRRRShift {
272 alu_op,
273 rd: writable_sp_reg(),
274 rn: sp_reg(),
275 rm: tmp.to_reg(),
276 shift: None,
277 });
278 }
279 ret
280 }
281
gen_nominal_sp_adj(offset: i32) -> Inst282 fn gen_nominal_sp_adj(offset: i32) -> Inst {
283 let offset = i64::from(offset);
284 Inst::VirtualSPOffsetAdj { offset }
285 }
286
gen_prologue_frame_setup(_: &settings::Flags) -> SmallInstVec<Inst>287 fn gen_prologue_frame_setup(_: &settings::Flags) -> SmallInstVec<Inst> {
288 let mut ret = SmallVec::new();
289 let reg_list = vec![fp_reg(), lr_reg()];
290 ret.push(Inst::Push { reg_list });
291 ret.push(Inst::Mov {
292 rd: writable_fp_reg(),
293 rm: sp_reg(),
294 });
295 ret
296 }
297
gen_epilogue_frame_restore(_: &settings::Flags) -> SmallInstVec<Inst>298 fn gen_epilogue_frame_restore(_: &settings::Flags) -> SmallInstVec<Inst> {
299 let mut ret = SmallVec::new();
300 ret.push(Inst::Mov {
301 rd: writable_sp_reg(),
302 rm: fp_reg(),
303 });
304 let reg_list = vec![writable_fp_reg(), writable_lr_reg()];
305 ret.push(Inst::Pop { reg_list });
306 ret
307 }
308
gen_probestack(_: u32) -> SmallInstVec<Self::I>309 fn gen_probestack(_: u32) -> SmallInstVec<Self::I> {
310 // TODO: implement if we ever require stack probes on ARM32 (unlikely
311 // unless Lucet is ported)
312 smallvec![]
313 }
314
315 /// Returns stack bytes used as well as instructions. Does not adjust
316 /// nominal SP offset; caller will do that.
gen_clobber_save( _call_conv: isa::CallConv, _flags: &settings::Flags, clobbers: &Set<Writable<RealReg>>, fixed_frame_storage_size: u32, _outgoing_args_size: u32, ) -> (u64, SmallVec<[Inst; 16]>)317 fn gen_clobber_save(
318 _call_conv: isa::CallConv,
319 _flags: &settings::Flags,
320 clobbers: &Set<Writable<RealReg>>,
321 fixed_frame_storage_size: u32,
322 _outgoing_args_size: u32,
323 ) -> (u64, SmallVec<[Inst; 16]>) {
324 let mut insts = SmallVec::new();
325 if fixed_frame_storage_size > 0 {
326 insts.extend(Self::gen_sp_reg_adjust(-(fixed_frame_storage_size as i32)).into_iter());
327 }
328 let clobbered_vec = get_callee_saves(clobbers);
329 let mut clobbered_vec: Vec<_> = clobbered_vec
330 .into_iter()
331 .map(|r| r.to_reg().to_reg())
332 .collect();
333 if clobbered_vec.len() % 2 == 1 {
334 // For alignment purposes.
335 clobbered_vec.push(ip_reg());
336 }
337 let stack_used = clobbered_vec.len() * 4;
338 if !clobbered_vec.is_empty() {
339 insts.push(Inst::Push {
340 reg_list: clobbered_vec,
341 });
342 }
343
344 (stack_used as u64, insts)
345 }
346
gen_clobber_restore( _call_conv: isa::CallConv, _flags: &settings::Flags, clobbers: &Set<Writable<RealReg>>, _fixed_frame_storage_size: u32, _outgoing_args_size: u32, ) -> SmallVec<[Inst; 16]>347 fn gen_clobber_restore(
348 _call_conv: isa::CallConv,
349 _flags: &settings::Flags,
350 clobbers: &Set<Writable<RealReg>>,
351 _fixed_frame_storage_size: u32,
352 _outgoing_args_size: u32,
353 ) -> SmallVec<[Inst; 16]> {
354 let mut insts = SmallVec::new();
355 let clobbered_vec = get_callee_saves(clobbers);
356 let mut clobbered_vec: Vec<_> = clobbered_vec
357 .into_iter()
358 .map(|r| Writable::from_reg(r.to_reg().to_reg()))
359 .collect();
360 if clobbered_vec.len() % 2 == 1 {
361 clobbered_vec.push(writable_ip_reg());
362 }
363 if !clobbered_vec.is_empty() {
364 insts.push(Inst::Pop {
365 reg_list: clobbered_vec,
366 });
367 }
368 insts
369 }
370
gen_call( dest: &CallDest, uses: Vec<Reg>, defs: Vec<Writable<Reg>>, opcode: ir::Opcode, tmp: Writable<Reg>, _callee_conv: isa::CallConv, _caller_conv: isa::CallConv, ) -> SmallVec<[(InstIsSafepoint, Inst); 2]>371 fn gen_call(
372 dest: &CallDest,
373 uses: Vec<Reg>,
374 defs: Vec<Writable<Reg>>,
375 opcode: ir::Opcode,
376 tmp: Writable<Reg>,
377 _callee_conv: isa::CallConv,
378 _caller_conv: isa::CallConv,
379 ) -> SmallVec<[(InstIsSafepoint, Inst); 2]> {
380 let mut insts = SmallVec::new();
381 match &dest {
382 &CallDest::ExtName(ref name, RelocDistance::Near) => insts.push((
383 InstIsSafepoint::Yes,
384 Inst::Call {
385 info: Box::new(CallInfo {
386 dest: name.clone(),
387 uses,
388 defs,
389 opcode,
390 }),
391 },
392 )),
393 &CallDest::ExtName(ref name, RelocDistance::Far) => {
394 insts.push((
395 InstIsSafepoint::No,
396 Inst::LoadExtName {
397 rt: tmp,
398 name: Box::new(name.clone()),
399 offset: 0,
400 },
401 ));
402 insts.push((
403 InstIsSafepoint::Yes,
404 Inst::CallInd {
405 info: Box::new(CallIndInfo {
406 rm: tmp.to_reg(),
407 uses,
408 defs,
409 opcode,
410 }),
411 },
412 ));
413 }
414 &CallDest::Reg(reg) => insts.push((
415 InstIsSafepoint::Yes,
416 Inst::CallInd {
417 info: Box::new(CallIndInfo {
418 rm: *reg,
419 uses,
420 defs,
421 opcode,
422 }),
423 },
424 )),
425 }
426
427 insts
428 }
429
gen_memcpy( _call_conv: isa::CallConv, _dst: Reg, _src: Reg, _size: usize, ) -> SmallVec<[Self::I; 8]>430 fn gen_memcpy(
431 _call_conv: isa::CallConv,
432 _dst: Reg,
433 _src: Reg,
434 _size: usize,
435 ) -> SmallVec<[Self::I; 8]> {
436 unimplemented!("StructArgs not implemented for ARM32 yet");
437 }
438
get_number_of_spillslots_for_value(rc: RegClass, _ty: Type) -> u32439 fn get_number_of_spillslots_for_value(rc: RegClass, _ty: Type) -> u32 {
440 match rc {
441 RegClass::I32 => 1,
442 _ => panic!("Unexpected register class!"),
443 }
444 }
445
get_virtual_sp_offset_from_state(s: &EmitState) -> i64446 fn get_virtual_sp_offset_from_state(s: &EmitState) -> i64 {
447 s.virtual_sp_offset
448 }
449
get_nominal_sp_to_fp(s: &EmitState) -> i64450 fn get_nominal_sp_to_fp(s: &EmitState) -> i64 {
451 s.nominal_sp_to_fp
452 }
453
get_regs_clobbered_by_call(_: isa::CallConv) -> Vec<Writable<Reg>>454 fn get_regs_clobbered_by_call(_: isa::CallConv) -> Vec<Writable<Reg>> {
455 let mut caller_saved = Vec::new();
456 for i in 0..15 {
457 let r = writable_rreg(i);
458 if is_reg_clobbered_by_call(r.to_reg().to_real_reg()) {
459 caller_saved.push(r);
460 }
461 }
462 caller_saved
463 }
464
get_ext_mode( _call_conv: isa::CallConv, specified: ir::ArgumentExtension, ) -> ir::ArgumentExtension465 fn get_ext_mode(
466 _call_conv: isa::CallConv,
467 specified: ir::ArgumentExtension,
468 ) -> ir::ArgumentExtension {
469 specified
470 }
471 }
472
is_callee_save(r: RealReg) -> bool473 fn is_callee_save(r: RealReg) -> bool {
474 let enc = r.get_hw_encoding();
475 4 <= enc && enc <= 10
476 }
477
get_callee_saves(regs: &Set<Writable<RealReg>>) -> Vec<Writable<RealReg>>478 fn get_callee_saves(regs: &Set<Writable<RealReg>>) -> Vec<Writable<RealReg>> {
479 let mut ret = Vec::new();
480 for ® in regs.iter() {
481 if is_callee_save(reg.to_reg()) {
482 ret.push(reg);
483 }
484 }
485
486 // Sort registers for deterministic code output.
487 ret.sort_by_key(|r| r.to_reg().get_index());
488 ret
489 }
490
is_reg_clobbered_by_call(r: RealReg) -> bool491 fn is_reg_clobbered_by_call(r: RealReg) -> bool {
492 let enc = r.get_hw_encoding();
493 enc <= 3
494 }
495