1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include <algorithm>
6 
7 #include "include/v8.h"
8 #include "src/common/globals.h"
9 #include "src/execution/frame-constants.h"
10 #include "src/execution/pointer-authentication.h"
11 
12 namespace v8 {
13 
14 namespace {
15 
CalculateEnd(const void * start,size_t length_in_bytes)16 const i::byte* CalculateEnd(const void* start, size_t length_in_bytes) {
17   // Given that the length of the memory range is in bytes and it is not
18   // necessarily aligned, we need to do the pointer arithmetic in byte* here.
19   const i::byte* start_as_byte = reinterpret_cast<const i::byte*>(start);
20   return start_as_byte + length_in_bytes;
21 }
22 
PCIsInCodeRange(const v8::MemoryRange & code_range,void * pc)23 bool PCIsInCodeRange(const v8::MemoryRange& code_range, void* pc) {
24   return pc >= code_range.start &&
25          pc < CalculateEnd(code_range.start, code_range.length_in_bytes);
26 }
27 
28 // This relies on the fact that the code pages are ordered, and that they don't
29 // overlap.
PCIsInCodePages(size_t code_pages_length,const MemoryRange * code_pages,void * pc)30 bool PCIsInCodePages(size_t code_pages_length, const MemoryRange* code_pages,
31                      void* pc) {
32   DCHECK(std::is_sorted(code_pages, code_pages + code_pages_length,
33                         [](const MemoryRange& a, const MemoryRange& b) {
34                           return a.start < b.start;
35                         }));
36 
37   MemoryRange fake_range{pc, 1};
38   auto it =
39       std::upper_bound(code_pages, code_pages + code_pages_length, fake_range,
40                        [](const MemoryRange& a, const MemoryRange& b) {
41                          return a.start < b.start;
42                        });
43   DCHECK_IMPLIES(it != code_pages + code_pages_length, pc < it->start);
44   if (it == code_pages) return false;
45   --it;
46   return it->start <= pc && pc < CalculateEnd(it->start, it->length_in_bytes);
47 }
48 
IsInJSEntryRange(const UnwindState & unwind_state,void * pc)49 bool IsInJSEntryRange(const UnwindState& unwind_state, void* pc) {
50   return PCIsInCodeRange(unwind_state.js_entry_stub.code, pc) ||
51          PCIsInCodeRange(unwind_state.js_construct_entry_stub.code, pc) ||
52          PCIsInCodeRange(unwind_state.js_run_microtasks_entry_stub.code, pc);
53 }
54 
IsInUnsafeJSEntryRange(const UnwindState & unwind_state,void * pc)55 bool IsInUnsafeJSEntryRange(const UnwindState& unwind_state, void* pc) {
56   return IsInJSEntryRange(unwind_state, pc);
57 
58   // TODO(petermarshall): We can be more precise by checking whether we are
59   // in JSEntry but after frame setup and before frame teardown, in which case
60   // we are safe to unwind the stack. For now, we bail out if the PC is anywhere
61   // within JSEntry.
62 }
63 
IsInJSEntryRange(const JSEntryStubs & entry_stubs,void * pc)64 bool IsInJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
65   return PCIsInCodeRange(entry_stubs.js_entry_stub.code, pc) ||
66          PCIsInCodeRange(entry_stubs.js_construct_entry_stub.code, pc) ||
67          PCIsInCodeRange(entry_stubs.js_run_microtasks_entry_stub.code, pc);
68 }
69 
IsInUnsafeJSEntryRange(const JSEntryStubs & entry_stubs,void * pc)70 bool IsInUnsafeJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
71   return IsInJSEntryRange(entry_stubs, pc);
72 
73   // TODO(petermarshall): We can be more precise by checking whether we are
74   // in JSEntry but after frame setup and before frame teardown, in which case
75   // we are safe to unwind the stack. For now, we bail out if the PC is anywhere
76   // within JSEntry.
77 }
78 
Load(i::Address address)79 i::Address Load(i::Address address) {
80   return *reinterpret_cast<i::Address*>(address);
81 }
82 
GetReturnAddressFromFP(void * fp,void * pc,const v8::UnwindState & unwind_state)83 void* GetReturnAddressFromFP(void* fp, void* pc,
84                              const v8::UnwindState& unwind_state) {
85   int caller_pc_offset = i::CommonFrameConstants::kCallerPCOffset;
86 #ifdef V8_TARGET_ARCH_ARM64
87   if (IsInJSEntryRange(unwind_state, pc)) {
88     caller_pc_offset = i::EntryFrameConstants::kDirectCallerPCOffset;
89   }
90 #endif
91   i::Address ret_addr =
92       Load(reinterpret_cast<i::Address>(fp) + caller_pc_offset);
93   return reinterpret_cast<void*>(i::PointerAuthentication::StripPAC(ret_addr));
94 }
95 
GetReturnAddressFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)96 void* GetReturnAddressFromFP(void* fp, void* pc,
97                              const JSEntryStubs& entry_stubs) {
98   int caller_pc_offset = i::CommonFrameConstants::kCallerPCOffset;
99 #ifdef V8_TARGET_ARCH_ARM64
100   if (IsInJSEntryRange(entry_stubs, pc)) {
101     caller_pc_offset = i::EntryFrameConstants::kDirectCallerPCOffset;
102   }
103 #endif
104   i::Address ret_addr =
105       Load(reinterpret_cast<i::Address>(fp) + caller_pc_offset);
106   return reinterpret_cast<void*>(i::PointerAuthentication::StripPAC(ret_addr));
107 }
108 
GetCallerFPFromFP(void * fp,void * pc,const v8::UnwindState & unwind_state)109 void* GetCallerFPFromFP(void* fp, void* pc,
110                         const v8::UnwindState& unwind_state) {
111   int caller_fp_offset = i::CommonFrameConstants::kCallerFPOffset;
112 #ifdef V8_TARGET_ARCH_ARM64
113   if (IsInJSEntryRange(unwind_state, pc)) {
114     caller_fp_offset = i::EntryFrameConstants::kDirectCallerFPOffset;
115   }
116 #endif
117   return reinterpret_cast<void*>(
118       Load(reinterpret_cast<i::Address>(fp) + caller_fp_offset));
119 }
120 
GetCallerFPFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)121 void* GetCallerFPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
122   int caller_fp_offset = i::CommonFrameConstants::kCallerFPOffset;
123 #ifdef V8_TARGET_ARCH_ARM64
124   if (IsInJSEntryRange(entry_stubs, pc)) {
125     caller_fp_offset = i::EntryFrameConstants::kDirectCallerFPOffset;
126   }
127 #endif
128   return reinterpret_cast<void*>(
129       Load(reinterpret_cast<i::Address>(fp) + caller_fp_offset));
130 }
131 
GetCallerSPFromFP(void * fp,void * pc,const v8::UnwindState & unwind_state)132 void* GetCallerSPFromFP(void* fp, void* pc,
133                         const v8::UnwindState& unwind_state) {
134   int caller_sp_offset = i::CommonFrameConstants::kCallerSPOffset;
135 #ifdef V8_TARGET_ARCH_ARM64
136   if (IsInJSEntryRange(unwind_state, pc)) {
137     caller_sp_offset = i::EntryFrameConstants::kDirectCallerSPOffset;
138   }
139 #endif
140   return reinterpret_cast<void*>(reinterpret_cast<i::Address>(fp) +
141                                  caller_sp_offset);
142 }
143 
GetCallerSPFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)144 void* GetCallerSPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
145   int caller_sp_offset = i::CommonFrameConstants::kCallerSPOffset;
146 #ifdef V8_TARGET_ARCH_ARM64
147   if (IsInJSEntryRange(entry_stubs, pc)) {
148     caller_sp_offset = i::EntryFrameConstants::kDirectCallerSPOffset;
149   }
150 #endif
151   return reinterpret_cast<void*>(reinterpret_cast<i::Address>(fp) +
152                                  caller_sp_offset);
153 }
154 
AddressIsInStack(const void * address,const void * stack_base,const void * stack_top)155 bool AddressIsInStack(const void* address, const void* stack_base,
156                       const void* stack_top) {
157   return address <= stack_base && address >= stack_top;
158 }
159 
160 }  // namespace
161 
TryUnwindV8Frames(const UnwindState & unwind_state,RegisterState * register_state,const void * stack_base)162 bool Unwinder::TryUnwindV8Frames(const UnwindState& unwind_state,
163                                  RegisterState* register_state,
164                                  const void* stack_base) {
165   const void* stack_top = register_state->sp;
166 
167   void* pc = register_state->pc;
168   if (PCIsInV8(unwind_state, pc) && !IsInUnsafeJSEntryRange(unwind_state, pc)) {
169     void* current_fp = register_state->fp;
170     if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
171 
172     // Peek at the return address that the caller pushed. If it's in V8, then we
173     // assume the caller frame is a JS frame and continue to unwind.
174     void* next_pc = GetReturnAddressFromFP(current_fp, pc, unwind_state);
175     while (PCIsInV8(unwind_state, next_pc)) {
176       current_fp = GetCallerFPFromFP(current_fp, pc, unwind_state);
177       if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
178       pc = next_pc;
179       next_pc = GetReturnAddressFromFP(current_fp, pc, unwind_state);
180     }
181 
182     void* final_sp = GetCallerSPFromFP(current_fp, pc, unwind_state);
183     if (!AddressIsInStack(final_sp, stack_base, stack_top)) return false;
184     register_state->sp = final_sp;
185 
186     // We don't check that the final FP value is within the stack bounds because
187     // this is just the rbp value that JSEntryStub pushed. On platforms like
188     // Win64 this is not used as a dedicated FP register, and could contain
189     // anything.
190     void* final_fp = GetCallerFPFromFP(current_fp, pc, unwind_state);
191     register_state->fp = final_fp;
192 
193     register_state->pc = next_pc;
194 
195     // Link register no longer valid after unwinding.
196     register_state->lr = nullptr;
197     return true;
198   }
199   return false;
200 }
201 
TryUnwindV8Frames(const JSEntryStubs & entry_stubs,size_t code_pages_length,const MemoryRange * code_pages,RegisterState * register_state,const void * stack_base)202 bool Unwinder::TryUnwindV8Frames(const JSEntryStubs& entry_stubs,
203                                  size_t code_pages_length,
204                                  const MemoryRange* code_pages,
205                                  RegisterState* register_state,
206                                  const void* stack_base) {
207   const void* stack_top = register_state->sp;
208 
209   void* pc = register_state->pc;
210   if (PCIsInV8(code_pages_length, code_pages, pc) &&
211       !IsInUnsafeJSEntryRange(entry_stubs, pc)) {
212     void* current_fp = register_state->fp;
213     if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
214 
215     // Peek at the return address that the caller pushed. If it's in V8, then we
216     // assume the caller frame is a JS frame and continue to unwind.
217     void* next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
218     while (PCIsInV8(code_pages_length, code_pages, next_pc)) {
219       current_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
220       if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
221       pc = next_pc;
222       next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
223     }
224 
225     void* final_sp = GetCallerSPFromFP(current_fp, pc, entry_stubs);
226     if (!AddressIsInStack(final_sp, stack_base, stack_top)) return false;
227     register_state->sp = final_sp;
228 
229     // We don't check that the final FP value is within the stack bounds because
230     // this is just the rbp value that JSEntryStub pushed. On platforms like
231     // Win64 this is not used as a dedicated FP register, and could contain
232     // anything.
233     void* final_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
234     register_state->fp = final_fp;
235 
236     register_state->pc = next_pc;
237 
238     // Link register no longer valid after unwinding.
239     register_state->lr = nullptr;
240     return true;
241   }
242   return false;
243 }
244 
PCIsInV8(const UnwindState & unwind_state,void * pc)245 bool Unwinder::PCIsInV8(const UnwindState& unwind_state, void* pc) {
246   return pc && (PCIsInCodeRange(unwind_state.code_range, pc) ||
247                 PCIsInCodeRange(unwind_state.embedded_code_range, pc));
248 }
249 
PCIsInV8(size_t code_pages_length,const MemoryRange * code_pages,void * pc)250 bool Unwinder::PCIsInV8(size_t code_pages_length, const MemoryRange* code_pages,
251                         void* pc) {
252   return pc && PCIsInCodePages(code_pages_length, code_pages, pc);
253 }
254 
255 }  // namespace v8
256