1#include "sanitizer_common/sanitizer_asm.h" 2 3// The content of this file is AArch64-only: 4#if defined(__aarch64__) 5 6// The responsibility of the HWASan entry point in compiler-rt is to primarily 7// readjust the stack from the callee and save the current register values to 8// the stack. 9// This entry point function should be called from a __hwasan_check_* symbol. 10// These are generated during a lowering pass in the backend, and are found in 11// AArch64AsmPrinter::EmitHwasanMemaccessSymbols(). Please look there for 12// further information. 13// The __hwasan_check_* caller of this function should have expanded the stack 14// and saved the previous values of x0, x1, x29, and x30. This function will 15// "consume" these saved values and treats it as part of its own stack frame. 16// In this sense, the __hwasan_check_* callee and this function "share" a stack 17// frame. This allows us to omit having unwinding information (.cfi_*) present 18// in every __hwasan_check_* function, therefore reducing binary size. This is 19// particularly important as hwasan_check_* instances are duplicated in every 20// translation unit where HWASan is enabled. 21// This function calls HwasanTagMismatch to step back into the C++ code that 22// completes the stack unwinding and error printing. This function is is not 23// permitted to return. 24 25 26// Frame from __hwasan_check_: 27// | ... | 28// | ... | 29// | Previous stack frames... | 30// +=================================+ 31// | Unused 8-bytes for maintaining | 32// | 16-byte SP alignment. | 33// +---------------------------------+ 34// | Return address (x30) for caller | 35// | of __hwasan_check_*. | 36// +---------------------------------+ 37// | Frame address (x29) for caller | 38// | of __hwasan_check_* | 39// +---------------------------------+ <-- [SP + 232] 40// | ... | 41// | | 42// | Stack frame space for x2 - x28. | 43// | | 44// | ... | 45// +---------------------------------+ <-- [SP + 16] 46// | | 47// | Saved x1, as __hwasan_check_* | 48// | clobbers it. | 49// +---------------------------------+ 50// | Saved x0, likewise above. | 51// +---------------------------------+ <-- [x30 / SP] 52 53// This function takes two arguments: 54// * x0: The data address. 55// * x1: The encoded access info for the failing access. 56 57// This function has two entry points. The first, __hwasan_tag_mismatch, is used 58// by clients that were compiled without short tag checks (i.e. binaries built 59// by older compilers and binaries targeting older runtimes). In this case the 60// outlined tag check will be missing the code handling short tags (which won't 61// be used in the binary's own stack variables but may be used on the heap 62// or stack variables in other binaries), so the check needs to be done here. 63// 64// The second, __hwasan_tag_mismatch_v2, is used by binaries targeting newer 65// runtimes. This entry point bypasses the short tag check since it will have 66// already been done as part of the outlined tag check. Since tag mismatches are 67// uncommon, there isn't a significant performance benefit to being able to 68// bypass the check; the main benefits are that we can sometimes avoid 69// clobbering the x17 register in error reports, and that the program will have 70// a runtime dependency on the __hwasan_tag_mismatch_v2 symbol therefore it will 71// fail to start up given an older (i.e. incompatible) runtime. 72.section .text 73.file "hwasan_tag_mismatch_aarch64.S" 74.global __hwasan_tag_mismatch 75.type __hwasan_tag_mismatch, %function 76__hwasan_tag_mismatch: 77 // Compute the granule position one past the end of the access. 78 mov x16, #1 79 and x17, x1, #0xf 80 lsl x16, x16, x17 81 and x17, x0, #0xf 82 add x17, x16, x17 83 84 // Load the shadow byte again and check whether it is a short tag within the 85 // range of the granule position computed above. 86 ubfx x16, x0, #4, #52 87 ldrb w16, [x9, x16] 88 cmp w16, #0xf 89 b.hi __hwasan_tag_mismatch_v2 90 cmp w16, w17 91 b.lo __hwasan_tag_mismatch_v2 92 93 // Load the real tag from the last byte of the granule and compare against 94 // the pointer tag. 95 orr x16, x0, #0xf 96 ldrb w16, [x16] 97 cmp x16, x0, lsr #56 98 b.ne __hwasan_tag_mismatch_v2 99 100 // Restore x0, x1 and sp to their values from before the __hwasan_tag_mismatch 101 // call and resume execution. 102 ldp x0, x1, [sp], #256 103 ret 104 105.global __hwasan_tag_mismatch_v2 106.type __hwasan_tag_mismatch_v2, %function 107__hwasan_tag_mismatch_v2: 108 CFI_STARTPROC 109 110 // Set the CFA to be the return address for caller of __hwasan_check_*. Note 111 // that we do not emit CFI predicates to describe the contents of this stack 112 // frame, as this proxy entry point should never be debugged. The contents 113 // are static and are handled by the unwinder after calling 114 // __hwasan_tag_mismatch. The frame pointer is already correctly setup 115 // by __hwasan_check_*. 116 add x29, sp, #232 117 CFI_DEF_CFA(w29, 24) 118 CFI_OFFSET(w30, -16) 119 CFI_OFFSET(w29, -24) 120 121 // Save the rest of the registers into the preallocated space left by 122 // __hwasan_check. 123 str x28, [sp, #224] 124 stp x26, x27, [sp, #208] 125 stp x24, x25, [sp, #192] 126 stp x22, x23, [sp, #176] 127 stp x20, x21, [sp, #160] 128 stp x18, x19, [sp, #144] 129 stp x16, x17, [sp, #128] 130 stp x14, x15, [sp, #112] 131 stp x12, x13, [sp, #96] 132 stp x10, x11, [sp, #80] 133 stp x8, x9, [sp, #64] 134 stp x6, x7, [sp, #48] 135 stp x4, x5, [sp, #32] 136 stp x2, x3, [sp, #16] 137 138 // Pass the address of the frame to __hwasan_tag_mismatch4, so that it can 139 // extract the saved registers from this frame without having to worry about 140 // finding this frame. 141 mov x2, sp 142 143 bl __hwasan_tag_mismatch4 144 CFI_ENDPROC 145 146.Lfunc_end0: 147 .size __hwasan_tag_mismatch, .Lfunc_end0-__hwasan_tag_mismatch 148 149#endif // defined(__aarch64__) 150 151// We do not need executable stack. 152NO_EXEC_STACK_DIRECTIVE 153