1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 *
4 * Copyright 2016 Mozilla Foundation
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 #include "wasm/WasmDebug.h"
20
21 #include "mozilla/BinarySearch.h"
22
23 #include "debugger/Debugger.h"
24 #include "ds/Sort.h"
25 #include "jit/AutoWritableJitCode.h"
26 #include "jit/MacroAssembler.h"
27 #include "wasm/WasmInstance.h"
28 #include "wasm/WasmJS.h"
29 #include "wasm/WasmStubs.h"
30 #include "wasm/WasmValidate.h"
31
32 #include "gc/FreeOp-inl.h"
33
34 using namespace js;
35 using namespace js::jit;
36 using namespace js::wasm;
37
38 using mozilla::BinarySearchIf;
39
DebugState(const Code & code,const Module & module)40 DebugState::DebugState(const Code& code, const Module& module)
41 : code_(&code),
42 module_(&module),
43 enterFrameTrapsEnabled_(false),
44 enterAndLeaveFrameTrapsCounter_(0) {
45 MOZ_ASSERT(code.metadata().debugEnabled);
46 }
47
trace(JSTracer * trc)48 void DebugState::trace(JSTracer* trc) {
49 for (auto iter = breakpointSites_.iter(); !iter.done(); iter.next()) {
50 WasmBreakpointSite* site = iter.get().value();
51 site->trace(trc);
52 }
53 }
54
finalize(JSFreeOp * fop)55 void DebugState::finalize(JSFreeOp* fop) {
56 for (auto iter = breakpointSites_.iter(); !iter.done(); iter.next()) {
57 WasmBreakpointSite* site = iter.get().value();
58 site->delete_(fop);
59 }
60 }
61
62 static const uint32_t DefaultBinarySourceColumnNumber = 1;
63
SlowCallSiteSearchByOffset(const MetadataTier & metadata,uint32_t offset)64 static const CallSite* SlowCallSiteSearchByOffset(const MetadataTier& metadata,
65 uint32_t offset) {
66 for (const CallSite& callSite : metadata.callSites) {
67 if (callSite.lineOrBytecode() == offset &&
68 callSite.kind() == CallSiteDesc::Breakpoint) {
69 return &callSite;
70 }
71 }
72 return nullptr;
73 }
74
getLineOffsets(size_t lineno,Vector<uint32_t> * offsets)75 bool DebugState::getLineOffsets(size_t lineno, Vector<uint32_t>* offsets) {
76 const CallSite* callsite =
77 SlowCallSiteSearchByOffset(metadata(Tier::Debug), lineno);
78 return !(callsite && !offsets->append(lineno));
79 }
80
getAllColumnOffsets(Vector<ExprLoc> * offsets)81 bool DebugState::getAllColumnOffsets(Vector<ExprLoc>* offsets) {
82 for (const CallSite& callSite : metadata(Tier::Debug).callSites) {
83 if (callSite.kind() != CallSite::Breakpoint) {
84 continue;
85 }
86 uint32_t offset = callSite.lineOrBytecode();
87 if (!offsets->emplaceBack(offset, DefaultBinarySourceColumnNumber,
88 offset)) {
89 return false;
90 }
91 }
92 return true;
93 }
94
getOffsetLocation(uint32_t offset,size_t * lineno,size_t * column)95 bool DebugState::getOffsetLocation(uint32_t offset, size_t* lineno,
96 size_t* column) {
97 if (!SlowCallSiteSearchByOffset(metadata(Tier::Debug), offset)) {
98 return false;
99 }
100 *lineno = offset;
101 *column = DefaultBinarySourceColumnNumber;
102 return true;
103 }
104
stepModeEnabled(uint32_t funcIndex) const105 bool DebugState::stepModeEnabled(uint32_t funcIndex) const {
106 return stepperCounters_.lookup(funcIndex).found();
107 }
108
incrementStepperCount(JSContext * cx,uint32_t funcIndex)109 bool DebugState::incrementStepperCount(JSContext* cx, uint32_t funcIndex) {
110 const CodeRange& codeRange =
111 codeRanges(Tier::Debug)[funcToCodeRangeIndex(funcIndex)];
112 MOZ_ASSERT(codeRange.isFunction());
113
114 StepperCounters::AddPtr p = stepperCounters_.lookupForAdd(funcIndex);
115 if (p) {
116 MOZ_ASSERT(p->value() > 0);
117 p->value()++;
118 return true;
119 }
120 if (!stepperCounters_.add(p, funcIndex, 1)) {
121 ReportOutOfMemory(cx);
122 return false;
123 }
124
125 AutoWritableJitCode awjc(
126 cx->runtime(), code_->segment(Tier::Debug).base() + codeRange.begin(),
127 codeRange.end() - codeRange.begin());
128
129 for (const CallSite& callSite : callSites(Tier::Debug)) {
130 if (callSite.kind() != CallSite::Breakpoint) {
131 continue;
132 }
133 uint32_t offset = callSite.returnAddressOffset();
134 if (codeRange.begin() <= offset && offset <= codeRange.end()) {
135 toggleDebugTrap(offset, true);
136 }
137 }
138 return true;
139 }
140
decrementStepperCount(JSFreeOp * fop,uint32_t funcIndex)141 void DebugState::decrementStepperCount(JSFreeOp* fop, uint32_t funcIndex) {
142 const CodeRange& codeRange =
143 codeRanges(Tier::Debug)[funcToCodeRangeIndex(funcIndex)];
144 MOZ_ASSERT(codeRange.isFunction());
145
146 MOZ_ASSERT(!stepperCounters_.empty());
147 StepperCounters::Ptr p = stepperCounters_.lookup(funcIndex);
148 MOZ_ASSERT(p);
149 if (--p->value()) {
150 return;
151 }
152
153 stepperCounters_.remove(p);
154
155 AutoWritableJitCode awjc(
156 fop->runtime(), code_->segment(Tier::Debug).base() + codeRange.begin(),
157 codeRange.end() - codeRange.begin());
158
159 for (const CallSite& callSite : callSites(Tier::Debug)) {
160 if (callSite.kind() != CallSite::Breakpoint) {
161 continue;
162 }
163 uint32_t offset = callSite.returnAddressOffset();
164 if (codeRange.begin() <= offset && offset <= codeRange.end()) {
165 bool enabled = breakpointSites_.has(offset);
166 toggleDebugTrap(offset, enabled);
167 }
168 }
169 }
170
hasBreakpointTrapAtOffset(uint32_t offset)171 bool DebugState::hasBreakpointTrapAtOffset(uint32_t offset) {
172 return SlowCallSiteSearchByOffset(metadata(Tier::Debug), offset);
173 }
174
toggleBreakpointTrap(JSRuntime * rt,uint32_t offset,bool enabled)175 void DebugState::toggleBreakpointTrap(JSRuntime* rt, uint32_t offset,
176 bool enabled) {
177 const CallSite* callSite =
178 SlowCallSiteSearchByOffset(metadata(Tier::Debug), offset);
179 if (!callSite) {
180 return;
181 }
182 size_t debugTrapOffset = callSite->returnAddressOffset();
183
184 const ModuleSegment& codeSegment = code_->segment(Tier::Debug);
185 const CodeRange* codeRange =
186 code_->lookupFuncRange(codeSegment.base() + debugTrapOffset);
187 MOZ_ASSERT(codeRange);
188
189 if (stepperCounters_.lookup(codeRange->funcIndex())) {
190 return; // no need to toggle when step mode is enabled
191 }
192
193 AutoWritableJitCode awjc(rt, codeSegment.base(), codeSegment.length());
194 toggleDebugTrap(debugTrapOffset, enabled);
195 }
196
getBreakpointSite(uint32_t offset) const197 WasmBreakpointSite* DebugState::getBreakpointSite(uint32_t offset) const {
198 WasmBreakpointSiteMap::Ptr p = breakpointSites_.lookup(offset);
199 if (!p) {
200 return nullptr;
201 }
202
203 return p->value();
204 }
205
getOrCreateBreakpointSite(JSContext * cx,Instance * instance,uint32_t offset)206 WasmBreakpointSite* DebugState::getOrCreateBreakpointSite(JSContext* cx,
207 Instance* instance,
208 uint32_t offset) {
209 WasmBreakpointSite* site;
210
211 WasmBreakpointSiteMap::AddPtr p = breakpointSites_.lookupForAdd(offset);
212 if (!p) {
213 site = cx->new_<WasmBreakpointSite>(instance->object(), offset);
214 if (!site) {
215 return nullptr;
216 }
217
218 if (!breakpointSites_.add(p, offset, site)) {
219 js_delete(site);
220 ReportOutOfMemory(cx);
221 return nullptr;
222 }
223
224 AddCellMemory(instance->object(), sizeof(WasmBreakpointSite),
225 MemoryUse::BreakpointSite);
226
227 toggleBreakpointTrap(cx->runtime(), offset, true);
228 } else {
229 site = p->value();
230 }
231 return site;
232 }
233
hasBreakpointSite(uint32_t offset)234 bool DebugState::hasBreakpointSite(uint32_t offset) {
235 return breakpointSites_.has(offset);
236 }
237
destroyBreakpointSite(JSFreeOp * fop,Instance * instance,uint32_t offset)238 void DebugState::destroyBreakpointSite(JSFreeOp* fop, Instance* instance,
239 uint32_t offset) {
240 WasmBreakpointSiteMap::Ptr p = breakpointSites_.lookup(offset);
241 MOZ_ASSERT(p);
242 fop->delete_(instance->objectUnbarriered(), p->value(),
243 MemoryUse::BreakpointSite);
244 breakpointSites_.remove(p);
245 toggleBreakpointTrap(fop->runtime(), offset, false);
246 }
247
clearBreakpointsIn(JSFreeOp * fop,WasmInstanceObject * instance,js::Debugger * dbg,JSObject * handler)248 void DebugState::clearBreakpointsIn(JSFreeOp* fop, WasmInstanceObject* instance,
249 js::Debugger* dbg, JSObject* handler) {
250 MOZ_ASSERT(instance);
251
252 // Breakpoints hold wrappers in the instance's compartment for the handler.
253 // Make sure we don't try to search for the unwrapped handler.
254 MOZ_ASSERT_IF(handler, instance->compartment() == handler->compartment());
255
256 if (breakpointSites_.empty()) {
257 return;
258 }
259 for (WasmBreakpointSiteMap::Enum e(breakpointSites_); !e.empty();
260 e.popFront()) {
261 WasmBreakpointSite* site = e.front().value();
262 MOZ_ASSERT(site->instanceObject == instance);
263
264 Breakpoint* nextbp;
265 for (Breakpoint* bp = site->firstBreakpoint(); bp; bp = nextbp) {
266 nextbp = bp->nextInSite();
267 MOZ_ASSERT(bp->site == site);
268 if ((!dbg || bp->debugger == dbg) &&
269 (!handler || bp->getHandler() == handler)) {
270 bp->delete_(fop);
271 }
272 }
273 if (site->isEmpty()) {
274 fop->delete_(instance, site, MemoryUse::BreakpointSite);
275 e.removeFront();
276 }
277 }
278 }
279
toggleDebugTrap(uint32_t offset,bool enabled)280 void DebugState::toggleDebugTrap(uint32_t offset, bool enabled) {
281 MOZ_ASSERT(offset);
282 uint8_t* trap = code_->segment(Tier::Debug).base() + offset;
283 const Uint32Vector& farJumpOffsets =
284 metadata(Tier::Debug).debugTrapFarJumpOffsets;
285 if (enabled) {
286 MOZ_ASSERT(farJumpOffsets.length() > 0);
287 size_t i = 0;
288 while (i < farJumpOffsets.length() && offset < farJumpOffsets[i]) {
289 i++;
290 }
291 if (i >= farJumpOffsets.length() ||
292 (i > 0 &&
293 offset - farJumpOffsets[i - 1] < farJumpOffsets[i] - offset)) {
294 i--;
295 }
296 uint8_t* farJump = code_->segment(Tier::Debug).base() + farJumpOffsets[i];
297 MacroAssembler::patchNopToCall(trap, farJump);
298 } else {
299 MacroAssembler::patchCallToNop(trap);
300 }
301 }
302
adjustEnterAndLeaveFrameTrapsState(JSContext * cx,bool enabled)303 void DebugState::adjustEnterAndLeaveFrameTrapsState(JSContext* cx,
304 bool enabled) {
305 MOZ_ASSERT_IF(!enabled, enterAndLeaveFrameTrapsCounter_ > 0);
306
307 bool wasEnabled = enterAndLeaveFrameTrapsCounter_ > 0;
308 if (enabled) {
309 ++enterAndLeaveFrameTrapsCounter_;
310 } else {
311 --enterAndLeaveFrameTrapsCounter_;
312 }
313 bool stillEnabled = enterAndLeaveFrameTrapsCounter_ > 0;
314 if (wasEnabled == stillEnabled) {
315 return;
316 }
317
318 const ModuleSegment& codeSegment = code_->segment(Tier::Debug);
319 AutoWritableJitCode awjc(cx->runtime(), codeSegment.base(),
320 codeSegment.length());
321 for (const CallSite& callSite : callSites(Tier::Debug)) {
322 if (callSite.kind() != CallSite::EnterFrame &&
323 callSite.kind() != CallSite::LeaveFrame) {
324 continue;
325 }
326 toggleDebugTrap(callSite.returnAddressOffset(), stillEnabled);
327 }
328 }
329
ensureEnterFrameTrapsState(JSContext * cx,bool enabled)330 void DebugState::ensureEnterFrameTrapsState(JSContext* cx, bool enabled) {
331 if (enterFrameTrapsEnabled_ == enabled) {
332 return;
333 }
334
335 adjustEnterAndLeaveFrameTrapsState(cx, enabled);
336
337 enterFrameTrapsEnabled_ = enabled;
338 }
339
debugGetLocalTypes(uint32_t funcIndex,ValTypeVector * locals,size_t * argsLength,StackResults * stackResults)340 bool DebugState::debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals,
341 size_t* argsLength,
342 StackResults* stackResults) {
343 const ValTypeVector& args = metadata().debugFuncArgTypes[funcIndex];
344 const ValTypeVector& results = metadata().debugFuncReturnTypes[funcIndex];
345 ResultType resultType(ResultType::Vector(results));
346 *argsLength = args.length();
347 *stackResults = ABIResultIter::HasStackResults(resultType)
348 ? StackResults::HasStackResults
349 : StackResults::NoStackResults;
350 if (!locals->appendAll(args)) {
351 return false;
352 }
353
354 // Decode local var types from wasm binary function body.
355 const CodeRange& range =
356 codeRanges(Tier::Debug)[funcToCodeRangeIndex(funcIndex)];
357 // In wasm, the Code points to the function start via funcLineOrBytecode.
358 size_t offsetInModule = range.funcLineOrBytecode();
359 Decoder d(bytecode().begin() + offsetInModule, bytecode().end(),
360 offsetInModule,
361 /* error = */ nullptr);
362 return DecodeValidatedLocalEntries(d, locals);
363 }
364
getGlobal(Instance & instance,uint32_t globalIndex,MutableHandleValue vp)365 bool DebugState::getGlobal(Instance& instance, uint32_t globalIndex,
366 MutableHandleValue vp) {
367 const GlobalDesc& global = metadata().globals[globalIndex];
368
369 if (global.isConstant()) {
370 LitVal value = global.constantValue();
371 switch (value.type().kind()) {
372 case ValType::I32:
373 vp.set(Int32Value(value.i32()));
374 break;
375 case ValType::I64:
376 // Just display as a Number; it's ok if we lose some precision
377 vp.set(NumberValue((double)value.i64()));
378 break;
379 case ValType::F32:
380 vp.set(NumberValue(JS::CanonicalizeNaN(value.f32())));
381 break;
382 case ValType::F64:
383 vp.set(NumberValue(JS::CanonicalizeNaN(value.f64())));
384 break;
385 case ValType::Ref:
386 // It's possible to do better. We could try some kind of hashing
387 // scheme, to make the pointer recognizable without revealing it.
388 vp.set(MagicValue(JS_OPTIMIZED_OUT));
389 break;
390 case ValType::V128:
391 // Debugger must be updated to handle this, and should be updated to
392 // handle i64 in any case.
393 vp.set(MagicValue(JS_OPTIMIZED_OUT));
394 break;
395 default:
396 MOZ_CRASH("Global constant type");
397 }
398 return true;
399 }
400
401 uint8_t* globalData = instance.globalData();
402 void* dataPtr = globalData + global.offset();
403 if (global.isIndirect()) {
404 dataPtr = *static_cast<void**>(dataPtr);
405 }
406 switch (global.type().kind()) {
407 case ValType::I32: {
408 vp.set(Int32Value(*static_cast<int32_t*>(dataPtr)));
409 break;
410 }
411 case ValType::I64: {
412 // Just display as a Number; it's ok if we lose some precision
413 vp.set(NumberValue((double)*static_cast<int64_t*>(dataPtr)));
414 break;
415 }
416 case ValType::F32: {
417 vp.set(NumberValue(JS::CanonicalizeNaN(*static_cast<float*>(dataPtr))));
418 break;
419 }
420 case ValType::F64: {
421 vp.set(NumberValue(JS::CanonicalizeNaN(*static_cast<double*>(dataPtr))));
422 break;
423 }
424 case ValType::Ref: {
425 // Just hide it. See above.
426 vp.set(MagicValue(JS_OPTIMIZED_OUT));
427 break;
428 }
429 case ValType::V128: {
430 // Just hide it. See above.
431 vp.set(MagicValue(JS_OPTIMIZED_OUT));
432 break;
433 }
434 default: {
435 MOZ_CRASH("Global variable type");
436 break;
437 }
438 }
439 return true;
440 }
441
getSourceMappingURL(JSContext * cx,MutableHandleString result) const442 bool DebugState::getSourceMappingURL(JSContext* cx,
443 MutableHandleString result) const {
444 result.set(nullptr);
445
446 for (const CustomSection& customSection : module_->customSections()) {
447 const Bytes& sectionName = customSection.name;
448 if (strlen(SourceMappingURLSectionName) != sectionName.length() ||
449 memcmp(SourceMappingURLSectionName, sectionName.begin(),
450 sectionName.length()) != 0) {
451 continue;
452 }
453
454 // Parse found "SourceMappingURL" custom section.
455 Decoder d(customSection.payload->begin(), customSection.payload->end(), 0,
456 /* error = */ nullptr);
457 uint32_t nchars;
458 if (!d.readVarU32(&nchars)) {
459 return true; // ignoring invalid section data
460 }
461 const uint8_t* chars;
462 if (!d.readBytes(nchars, &chars) || d.currentPosition() != d.end()) {
463 return true; // ignoring invalid section data
464 }
465
466 JS::UTF8Chars utf8Chars(reinterpret_cast<const char*>(chars), nchars);
467 JSString* str = JS_NewStringCopyUTF8N(cx, utf8Chars);
468 if (!str) {
469 return false;
470 }
471 result.set(str);
472 return true;
473 }
474
475 // Check presence of "SourceMap:" HTTP response header.
476 char* sourceMapURL = metadata().sourceMapURL.get();
477 if (sourceMapURL && strlen(sourceMapURL)) {
478 JS::UTF8Chars utf8Chars(sourceMapURL, strlen(sourceMapURL));
479 JSString* str = JS_NewStringCopyUTF8N(cx, utf8Chars);
480 if (!str) {
481 return false;
482 }
483 result.set(str);
484 }
485 return true;
486 }
487
addSizeOfMisc(MallocSizeOf mallocSizeOf,Metadata::SeenSet * seenMetadata,Code::SeenSet * seenCode,size_t * code,size_t * data) const488 void DebugState::addSizeOfMisc(MallocSizeOf mallocSizeOf,
489 Metadata::SeenSet* seenMetadata,
490 Code::SeenSet* seenCode, size_t* code,
491 size_t* data) const {
492 code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenMetadata, seenCode, code,
493 data);
494 module_->addSizeOfMisc(mallocSizeOf, seenMetadata, seenCode, code, data);
495 }
496