1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #ifndef vm_BytecodeUtil_h
8 #define vm_BytecodeUtil_h
9
10 /*
11 * JS bytecode definitions.
12 */
13
14 #include "mozilla/Assertions.h"
15 #include "mozilla/Attributes.h"
16 #include "mozilla/EndianUtils.h"
17
18 #include <algorithm>
19 #include <stddef.h>
20 #include <stdint.h>
21
22 #include "jstypes.h"
23 #include "NamespaceImports.h"
24
25 #include "js/TypeDecls.h"
26 #include "js/UniquePtr.h"
27 #include "js/Utility.h"
28 #include "js/Value.h"
29 #include "vm/BytecodeFormatFlags.h" // JOF_*
30 #include "vm/GeneratorResumeKind.h"
31 #include "vm/Opcodes.h"
32 #include "vm/SharedStencil.h" // js::GCThingIndex
33 #include "vm/ThrowMsgKind.h" // ThrowMsgKind, ThrowCondition
34
35 namespace js {
36 class Sprinter;
37 } // namespace js
38
39 /*
40 * JS operation bytecodes.
41 */
42 enum class JSOp : uint8_t {
43 #define ENUMERATE_OPCODE(op, ...) op,
44 FOR_EACH_OPCODE(ENUMERATE_OPCODE)
45 #undef ENUMERATE_OPCODE
46 };
47
48 /* Shorthand for type from format. */
49
JOF_TYPE(uint32_t fmt)50 static inline uint32_t JOF_TYPE(uint32_t fmt) { return fmt & JOF_TYPEMASK; }
51
52 /* Shorthand for mode from format. */
53
JOF_MODE(uint32_t fmt)54 static inline uint32_t JOF_MODE(uint32_t fmt) { return fmt & JOF_MODEMASK; }
55
56 /*
57 * Immediate operand getters, setters, and bounds.
58 */
59
GET_UINT8(jsbytecode * pc)60 static MOZ_ALWAYS_INLINE uint8_t GET_UINT8(jsbytecode* pc) {
61 return uint8_t(pc[1]);
62 }
63
SET_UINT8(jsbytecode * pc,uint8_t u)64 static MOZ_ALWAYS_INLINE void SET_UINT8(jsbytecode* pc, uint8_t u) {
65 pc[1] = jsbytecode(u);
66 }
67
68 /* Common uint16_t immediate format helpers. */
69
UINT16_HI(uint16_t i)70 static inline jsbytecode UINT16_HI(uint16_t i) { return jsbytecode(i >> 8); }
71
UINT16_LO(uint16_t i)72 static inline jsbytecode UINT16_LO(uint16_t i) { return jsbytecode(i); }
73
GET_UINT16(const jsbytecode * pc)74 static MOZ_ALWAYS_INLINE uint16_t GET_UINT16(const jsbytecode* pc) {
75 uint16_t result;
76 mozilla::NativeEndian::copyAndSwapFromLittleEndian(&result, pc + 1, 1);
77 return result;
78 }
79
SET_UINT16(jsbytecode * pc,uint16_t i)80 static MOZ_ALWAYS_INLINE void SET_UINT16(jsbytecode* pc, uint16_t i) {
81 mozilla::NativeEndian::copyAndSwapToLittleEndian(pc + 1, &i, 1);
82 }
83
84 static const unsigned UINT16_LIMIT = 1 << 16;
85
86 /* Helpers for accessing the offsets of jump opcodes. */
87 static const unsigned JUMP_OFFSET_LEN = 4;
88 static const int32_t JUMP_OFFSET_MIN = INT32_MIN;
89 static const int32_t JUMP_OFFSET_MAX = INT32_MAX;
90
GET_UINT24(const jsbytecode * pc)91 static MOZ_ALWAYS_INLINE uint32_t GET_UINT24(const jsbytecode* pc) {
92 #if MOZ_LITTLE_ENDIAN()
93 // Do a single 32-bit load (for opcode and operand), then shift off the
94 // opcode.
95 uint32_t result;
96 memcpy(&result, pc, 4);
97 return result >> 8;
98 #else
99 return uint32_t((pc[3] << 16) | (pc[2] << 8) | pc[1]);
100 #endif
101 }
102
SET_UINT24(jsbytecode * pc,uint32_t i)103 static MOZ_ALWAYS_INLINE void SET_UINT24(jsbytecode* pc, uint32_t i) {
104 MOZ_ASSERT(i < (1 << 24));
105
106 #if MOZ_LITTLE_ENDIAN()
107 memcpy(pc + 1, &i, 3);
108 #else
109 pc[1] = jsbytecode(i);
110 pc[2] = jsbytecode(i >> 8);
111 pc[3] = jsbytecode(i >> 16);
112 #endif
113 }
114
GET_INT8(const jsbytecode * pc)115 static MOZ_ALWAYS_INLINE int8_t GET_INT8(const jsbytecode* pc) {
116 return int8_t(pc[1]);
117 }
118
GET_UINT32(const jsbytecode * pc)119 static MOZ_ALWAYS_INLINE uint32_t GET_UINT32(const jsbytecode* pc) {
120 uint32_t result;
121 mozilla::NativeEndian::copyAndSwapFromLittleEndian(&result, pc + 1, 1);
122 return result;
123 }
124
SET_UINT32(jsbytecode * pc,uint32_t u)125 static MOZ_ALWAYS_INLINE void SET_UINT32(jsbytecode* pc, uint32_t u) {
126 mozilla::NativeEndian::copyAndSwapToLittleEndian(pc + 1, &u, 1);
127 }
128
GET_INLINE_VALUE(const jsbytecode * pc)129 static MOZ_ALWAYS_INLINE JS::Value GET_INLINE_VALUE(const jsbytecode* pc) {
130 uint64_t raw;
131 mozilla::NativeEndian::copyAndSwapFromLittleEndian(&raw, pc + 1, 1);
132 return JS::Value::fromRawBits(raw);
133 }
134
SET_INLINE_VALUE(jsbytecode * pc,const JS::Value & v)135 static MOZ_ALWAYS_INLINE void SET_INLINE_VALUE(jsbytecode* pc,
136 const JS::Value& v) {
137 uint64_t raw = v.asRawBits();
138 mozilla::NativeEndian::copyAndSwapToLittleEndian(pc + 1, &raw, 1);
139 }
140
GET_INT32(const jsbytecode * pc)141 static MOZ_ALWAYS_INLINE int32_t GET_INT32(const jsbytecode* pc) {
142 return static_cast<int32_t>(GET_UINT32(pc));
143 }
144
SET_INT32(jsbytecode * pc,int32_t i)145 static MOZ_ALWAYS_INLINE void SET_INT32(jsbytecode* pc, int32_t i) {
146 SET_UINT32(pc, static_cast<uint32_t>(i));
147 }
148
GET_JUMP_OFFSET(jsbytecode * pc)149 static MOZ_ALWAYS_INLINE int32_t GET_JUMP_OFFSET(jsbytecode* pc) {
150 return GET_INT32(pc);
151 }
152
SET_JUMP_OFFSET(jsbytecode * pc,int32_t off)153 static MOZ_ALWAYS_INLINE void SET_JUMP_OFFSET(jsbytecode* pc, int32_t off) {
154 SET_INT32(pc, off);
155 }
156
157 static const unsigned GCTHING_INDEX_LEN = 4;
158
GET_GCTHING_INDEX(const jsbytecode * pc)159 static MOZ_ALWAYS_INLINE js::GCThingIndex GET_GCTHING_INDEX(
160 const jsbytecode* pc) {
161 return js::GCThingIndex(GET_UINT32(pc));
162 }
163
SET_GCTHING_INDEX(jsbytecode * pc,js::GCThingIndex index)164 static MOZ_ALWAYS_INLINE void SET_GCTHING_INDEX(jsbytecode* pc,
165 js::GCThingIndex index) {
166 SET_UINT32(pc, index.index);
167 }
168
169 // Index limit is determined by SrcNote::FourByteOffsetFlag, see
170 // frontend/BytecodeEmitter.h.
171 static const unsigned INDEX_LIMIT_LOG2 = 31;
172 static const uint32_t INDEX_LIMIT = uint32_t(1) << INDEX_LIMIT_LOG2;
173
ARGC_HI(uint16_t argc)174 static inline jsbytecode ARGC_HI(uint16_t argc) { return UINT16_HI(argc); }
175
ARGC_LO(uint16_t argc)176 static inline jsbytecode ARGC_LO(uint16_t argc) { return UINT16_LO(argc); }
177
GET_ARGC(const jsbytecode * pc)178 static inline uint16_t GET_ARGC(const jsbytecode* pc) { return GET_UINT16(pc); }
179
180 static const unsigned ARGC_LIMIT = UINT16_LIMIT;
181
GET_ARGNO(const jsbytecode * pc)182 static inline uint16_t GET_ARGNO(const jsbytecode* pc) {
183 return GET_UINT16(pc);
184 }
185
SET_ARGNO(jsbytecode * pc,uint16_t argno)186 static inline void SET_ARGNO(jsbytecode* pc, uint16_t argno) {
187 SET_UINT16(pc, argno);
188 }
189
190 static const unsigned ARGNO_LEN = 2;
191 static const unsigned ARGNO_LIMIT = UINT16_LIMIT;
192
GET_LOCALNO(const jsbytecode * pc)193 static inline uint32_t GET_LOCALNO(const jsbytecode* pc) {
194 return GET_UINT24(pc);
195 }
196
SET_LOCALNO(jsbytecode * pc,uint32_t varno)197 static inline void SET_LOCALNO(jsbytecode* pc, uint32_t varno) {
198 SET_UINT24(pc, varno);
199 }
200
201 static const unsigned LOCALNO_LEN = 3;
202 static const unsigned LOCALNO_BITS = 24;
203 static const uint32_t LOCALNO_LIMIT = 1 << LOCALNO_BITS;
204
GET_RESUMEINDEX(const jsbytecode * pc)205 static inline uint32_t GET_RESUMEINDEX(const jsbytecode* pc) {
206 return GET_UINT24(pc);
207 }
208
SET_RESUMEINDEX(jsbytecode * pc,uint32_t resumeIndex)209 static inline void SET_RESUMEINDEX(jsbytecode* pc, uint32_t resumeIndex) {
210 SET_UINT24(pc, resumeIndex);
211 }
212
213 static const unsigned ICINDEX_LEN = 4;
214
GET_ICINDEX(const jsbytecode * pc)215 static inline uint32_t GET_ICINDEX(const jsbytecode* pc) {
216 return GET_UINT32(pc);
217 }
218
SET_ICINDEX(jsbytecode * pc,uint32_t icIndex)219 static inline void SET_ICINDEX(jsbytecode* pc, uint32_t icIndex) {
220 SET_UINT32(pc, icIndex);
221 }
222
LoopHeadDepthHint(jsbytecode * pc)223 static inline unsigned LoopHeadDepthHint(jsbytecode* pc) {
224 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
225 return GET_UINT8(pc + 4);
226 }
227
SetLoopHeadDepthHint(jsbytecode * pc,unsigned loopDepth)228 static inline void SetLoopHeadDepthHint(jsbytecode* pc, unsigned loopDepth) {
229 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
230 uint8_t data = std::min(loopDepth, unsigned(UINT8_MAX));
231 SET_UINT8(pc + 4, data);
232 }
233
IsBackedgePC(jsbytecode * pc)234 static inline bool IsBackedgePC(jsbytecode* pc) {
235 switch (JSOp(*pc)) {
236 case JSOp::Goto:
237 case JSOp::JumpIfTrue:
238 return GET_JUMP_OFFSET(pc) < 0;
239 default:
240 return false;
241 }
242 }
243
IsBackedgeForLoopHead(jsbytecode * pc,jsbytecode * loopHead)244 static inline bool IsBackedgeForLoopHead(jsbytecode* pc, jsbytecode* loopHead) {
245 MOZ_ASSERT(JSOp(*loopHead) == JSOp::LoopHead);
246 return IsBackedgePC(pc) && pc + GET_JUMP_OFFSET(pc) == loopHead;
247 }
248
249 /*
250 * Describes the 'hops' component of a JOF_ENVCOORD opcode.
251 *
252 * Note: this component is only 8 bits wide, limiting the maximum number of
253 * scopes between a use and def to roughly 255. This is a pretty small limit but
254 * note that SpiderMonkey's recursive descent parser can only parse about this
255 * many functions before hitting the C-stack recursion limit so this shouldn't
256 * be a significant limitation in practice.
257 */
258
GET_ENVCOORD_HOPS(jsbytecode * pc)259 static inline uint8_t GET_ENVCOORD_HOPS(jsbytecode* pc) {
260 return GET_UINT8(pc);
261 }
262
SET_ENVCOORD_HOPS(jsbytecode * pc,uint8_t hops)263 static inline void SET_ENVCOORD_HOPS(jsbytecode* pc, uint8_t hops) {
264 SET_UINT8(pc, hops);
265 }
266
267 static const unsigned ENVCOORD_HOPS_LEN = 1;
268 static const unsigned ENVCOORD_HOPS_BITS = 8;
269 static const unsigned ENVCOORD_HOPS_LIMIT = 1 << ENVCOORD_HOPS_BITS;
270
271 /* Describes the 'slot' component of a JOF_ENVCOORD opcode. */
GET_ENVCOORD_SLOT(const jsbytecode * pc)272 static inline uint32_t GET_ENVCOORD_SLOT(const jsbytecode* pc) {
273 return GET_UINT24(pc);
274 }
275
SET_ENVCOORD_SLOT(jsbytecode * pc,uint32_t slot)276 static inline void SET_ENVCOORD_SLOT(jsbytecode* pc, uint32_t slot) {
277 SET_UINT24(pc, slot);
278 }
279
280 static const unsigned ENVCOORD_SLOT_LEN = 3;
281 static const unsigned ENVCOORD_SLOT_BITS = 24;
282 static const uint32_t ENVCOORD_SLOT_LIMIT = 1 << ENVCOORD_SLOT_BITS;
283
284 struct JSCodeSpec {
285 uint8_t length; /* length including opcode byte */
286 int8_t nuses; /* arity, -1 if variadic */
287 int8_t ndefs; /* number of stack results */
288 uint32_t format; /* immediate operand format */
289 };
290
291 namespace js {
292
293 extern const JSCodeSpec CodeSpecTable[];
294
CodeSpec(JSOp op)295 inline const JSCodeSpec& CodeSpec(JSOp op) {
296 return CodeSpecTable[uint8_t(op)];
297 }
298
299 extern const char* const CodeNameTable[];
300
CodeName(JSOp op)301 inline const char* CodeName(JSOp op) { return CodeNameTable[uint8_t(op)]; }
302
303 /* Shorthand for type from opcode. */
304
JOF_OPTYPE(JSOp op)305 static inline uint32_t JOF_OPTYPE(JSOp op) {
306 return JOF_TYPE(CodeSpec(op).format);
307 }
308
IsJumpOpcode(JSOp op)309 static inline bool IsJumpOpcode(JSOp op) { return JOF_OPTYPE(op) == JOF_JUMP; }
310
BytecodeFallsThrough(JSOp op)311 static inline bool BytecodeFallsThrough(JSOp op) {
312 // Note:
313 // * JSOp::Yield/JSOp::Await is considered to fall through, like JSOp::Call.
314 // * JSOp::Gosub falls through indirectly, after executing a 'finally'.
315 switch (op) {
316 case JSOp::Goto:
317 case JSOp::Default:
318 case JSOp::Return:
319 case JSOp::RetRval:
320 case JSOp::Retsub:
321 case JSOp::FinalYieldRval:
322 case JSOp::Throw:
323 case JSOp::ThrowMsg:
324 case JSOp::ThrowSetConst:
325 case JSOp::TableSwitch:
326 return false;
327 default:
328 return true;
329 }
330 }
331
BytecodeIsJumpTarget(JSOp op)332 static inline bool BytecodeIsJumpTarget(JSOp op) {
333 switch (op) {
334 case JSOp::JumpTarget:
335 case JSOp::LoopHead:
336 case JSOp::AfterYield:
337 return true;
338 default:
339 return false;
340 }
341 }
342
StackUses(jsbytecode * pc)343 MOZ_ALWAYS_INLINE unsigned StackUses(jsbytecode* pc) {
344 JSOp op = JSOp(*pc);
345 int nuses = CodeSpec(op).nuses;
346 if (nuses >= 0) {
347 return nuses;
348 }
349
350 MOZ_ASSERT(nuses == -1);
351 switch (op) {
352 case JSOp::PopN:
353 return GET_UINT16(pc);
354 case JSOp::New:
355 case JSOp::SuperCall:
356 return 2 + GET_ARGC(pc) + 1;
357 default:
358 /* stack: fun, this, [argc arguments] */
359 MOZ_ASSERT(op == JSOp::Call || op == JSOp::CallIgnoresRv ||
360 op == JSOp::Eval || op == JSOp::CallIter ||
361 op == JSOp::StrictEval || op == JSOp::FunCall ||
362 op == JSOp::FunApply);
363 return 2 + GET_ARGC(pc);
364 }
365 }
366
StackDefs(jsbytecode * pc)367 MOZ_ALWAYS_INLINE unsigned StackDefs(jsbytecode* pc) {
368 int ndefs = CodeSpec(JSOp(*pc)).ndefs;
369 MOZ_ASSERT(ndefs >= 0);
370 return ndefs;
371 }
372
373 #if defined(DEBUG) || defined(JS_JITSPEW)
374 /*
375 * Given bytecode address pc in script's main program code, compute the operand
376 * stack depth just before (JSOp) *pc executes. If *pc is not reachable, return
377 * false.
378 */
379 extern bool ReconstructStackDepth(JSContext* cx, JSScript* script,
380 jsbytecode* pc, uint32_t* depth,
381 bool* reachablePC);
382 #endif
383
384 } /* namespace js */
385
386 #define JSDVG_IGNORE_STACK 0
387 #define JSDVG_SEARCH_STACK 1
388
389 namespace js {
390
391 /*
392 * Find the source expression that resulted in v, and return a newly allocated
393 * C-string containing it. Fall back on v's string conversion (fallback) if we
394 * can't find the bytecode that generated and pushed v on the operand stack.
395 *
396 * Search the current stack frame if spindex is JSDVG_SEARCH_STACK. Don't
397 * look for v on the stack if spindex is JSDVG_IGNORE_STACK. Otherwise,
398 * spindex is the negative index of v, measured from cx->fp->sp, or from a
399 * lower frame's sp if cx->fp is native.
400 *
401 * The optional argument skipStackHits can be used to skip a hit in the stack
402 * frame. This can be useful in self-hosted code that wants to report value
403 * errors containing decompiled values that are useful for the user, instead of
404 * values used internally by the self-hosted code.
405 *
406 * The caller must call JS_free on the result after a successful call.
407 */
408 UniqueChars DecompileValueGenerator(JSContext* cx, int spindex, HandleValue v,
409 HandleString fallback,
410 int skipStackHits = 0);
411
412 /*
413 * Decompile the formal argument at formalIndex in the nearest non-builtin
414 * stack frame, falling back with converting v to source.
415 */
416 JSString* DecompileArgument(JSContext* cx, int formalIndex, HandleValue v);
417
GetOpLength(JSOp op)418 static inline unsigned GetOpLength(JSOp op) {
419 MOZ_ASSERT(uint8_t(op) < JSOP_LIMIT);
420 MOZ_ASSERT(CodeSpec(op).length > 0);
421 return CodeSpec(op).length;
422 }
423
GetBytecodeLength(const jsbytecode * pc)424 static inline unsigned GetBytecodeLength(const jsbytecode* pc) {
425 JSOp op = (JSOp)*pc;
426 return GetOpLength(op);
427 }
428
BytecodeIsPopped(jsbytecode * pc)429 static inline bool BytecodeIsPopped(jsbytecode* pc) {
430 jsbytecode* next = pc + GetBytecodeLength(pc);
431 return JSOp(*next) == JSOp::Pop;
432 }
433
434 extern bool IsValidBytecodeOffset(JSContext* cx, JSScript* script,
435 size_t offset);
436
IsArgOp(JSOp op)437 inline bool IsArgOp(JSOp op) { return JOF_OPTYPE(op) == JOF_QARG; }
438
IsLocalOp(JSOp op)439 inline bool IsLocalOp(JSOp op) { return JOF_OPTYPE(op) == JOF_LOCAL; }
440
IsAliasedVarOp(JSOp op)441 inline bool IsAliasedVarOp(JSOp op) { return JOF_OPTYPE(op) == JOF_ENVCOORD; }
442
IsGlobalOp(JSOp op)443 inline bool IsGlobalOp(JSOp op) { return CodeSpec(op).format & JOF_GNAME; }
444
IsPropertySetOp(JSOp op)445 inline bool IsPropertySetOp(JSOp op) {
446 return CodeSpec(op).format & JOF_PROPSET;
447 }
448
IsPropertyInitOp(JSOp op)449 inline bool IsPropertyInitOp(JSOp op) {
450 return CodeSpec(op).format & JOF_PROPINIT;
451 }
452
IsLooseEqualityOp(JSOp op)453 inline bool IsLooseEqualityOp(JSOp op) {
454 return op == JSOp::Eq || op == JSOp::Ne;
455 }
456
IsStrictEqualityOp(JSOp op)457 inline bool IsStrictEqualityOp(JSOp op) {
458 return op == JSOp::StrictEq || op == JSOp::StrictNe;
459 }
460
IsEqualityOp(JSOp op)461 inline bool IsEqualityOp(JSOp op) {
462 return IsLooseEqualityOp(op) || IsStrictEqualityOp(op);
463 }
464
IsRelationalOp(JSOp op)465 inline bool IsRelationalOp(JSOp op) {
466 return op == JSOp::Lt || op == JSOp::Le || op == JSOp::Gt || op == JSOp::Ge;
467 }
468
IsCheckStrictOp(JSOp op)469 inline bool IsCheckStrictOp(JSOp op) {
470 return CodeSpec(op).format & JOF_CHECKSTRICT;
471 }
472
IsNameOp(JSOp op)473 inline bool IsNameOp(JSOp op) { return CodeSpec(op).format & JOF_NAME; }
474
475 #ifdef DEBUG
IsCheckSloppyOp(JSOp op)476 inline bool IsCheckSloppyOp(JSOp op) {
477 return CodeSpec(op).format & JOF_CHECKSLOPPY;
478 }
479 #endif
480
IsAtomOp(JSOp op)481 inline bool IsAtomOp(JSOp op) { return JOF_OPTYPE(op) == JOF_ATOM; }
482
IsGetPropOp(JSOp op)483 inline bool IsGetPropOp(JSOp op) { return op == JSOp::GetProp; }
484
IsGetPropPC(const jsbytecode * pc)485 inline bool IsGetPropPC(const jsbytecode* pc) { return IsGetPropOp(JSOp(*pc)); }
486
IsHiddenInitOp(JSOp op)487 inline bool IsHiddenInitOp(JSOp op) {
488 return op == JSOp::InitHiddenProp || op == JSOp::InitHiddenElem ||
489 op == JSOp::InitHiddenPropGetter || op == JSOp::InitHiddenElemGetter ||
490 op == JSOp::InitHiddenPropSetter || op == JSOp::InitHiddenElemSetter ||
491 op == JSOp::InitLockedElem;
492 }
493
IsStrictSetPC(jsbytecode * pc)494 inline bool IsStrictSetPC(jsbytecode* pc) {
495 JSOp op = JSOp(*pc);
496 return op == JSOp::StrictSetProp || op == JSOp::StrictSetName ||
497 op == JSOp::StrictSetGName || op == JSOp::StrictSetElem;
498 }
499
IsSetPropOp(JSOp op)500 inline bool IsSetPropOp(JSOp op) {
501 return op == JSOp::SetProp || op == JSOp::StrictSetProp ||
502 op == JSOp::SetName || op == JSOp::StrictSetName ||
503 op == JSOp::SetGName || op == JSOp::StrictSetGName;
504 }
505
IsSetPropPC(const jsbytecode * pc)506 inline bool IsSetPropPC(const jsbytecode* pc) { return IsSetPropOp(JSOp(*pc)); }
507
IsGetElemOp(JSOp op)508 inline bool IsGetElemOp(JSOp op) { return op == JSOp::GetElem; }
509
IsGetElemPC(const jsbytecode * pc)510 inline bool IsGetElemPC(const jsbytecode* pc) { return IsGetElemOp(JSOp(*pc)); }
511
IsSetElemOp(JSOp op)512 inline bool IsSetElemOp(JSOp op) {
513 return op == JSOp::SetElem || op == JSOp::StrictSetElem;
514 }
515
IsSetElemPC(const jsbytecode * pc)516 inline bool IsSetElemPC(const jsbytecode* pc) { return IsSetElemOp(JSOp(*pc)); }
517
IsElemPC(const jsbytecode * pc)518 inline bool IsElemPC(const jsbytecode* pc) {
519 return CodeSpec(JSOp(*pc)).format & JOF_ELEM;
520 }
521
IsInvokeOp(JSOp op)522 inline bool IsInvokeOp(JSOp op) { return CodeSpec(op).format & JOF_INVOKE; }
523
IsInvokePC(jsbytecode * pc)524 inline bool IsInvokePC(jsbytecode* pc) { return IsInvokeOp(JSOp(*pc)); }
525
IsStrictEvalPC(jsbytecode * pc)526 inline bool IsStrictEvalPC(jsbytecode* pc) {
527 JSOp op = JSOp(*pc);
528 return op == JSOp::StrictEval || op == JSOp::StrictSpreadEval;
529 }
530
IsConstructOp(JSOp op)531 inline bool IsConstructOp(JSOp op) {
532 return CodeSpec(op).format & JOF_CONSTRUCT;
533 }
IsConstructPC(const jsbytecode * pc)534 inline bool IsConstructPC(const jsbytecode* pc) {
535 return IsConstructOp(JSOp(*pc));
536 }
537
IsSpreadOp(JSOp op)538 inline bool IsSpreadOp(JSOp op) { return CodeSpec(op).format & JOF_SPREAD; }
539
IsSpreadPC(const jsbytecode * pc)540 inline bool IsSpreadPC(const jsbytecode* pc) { return IsSpreadOp(JSOp(*pc)); }
541
GetBytecodeInteger(jsbytecode * pc)542 static inline int32_t GetBytecodeInteger(jsbytecode* pc) {
543 switch (JSOp(*pc)) {
544 case JSOp::Zero:
545 return 0;
546 case JSOp::One:
547 return 1;
548 case JSOp::Uint16:
549 return GET_UINT16(pc);
550 case JSOp::Uint24:
551 return GET_UINT24(pc);
552 case JSOp::Int8:
553 return GET_INT8(pc);
554 case JSOp::Int32:
555 return GET_INT32(pc);
556 default:
557 MOZ_CRASH("Bad op");
558 }
559 }
560
BytecodeOpHasIC(JSOp op)561 inline bool BytecodeOpHasIC(JSOp op) { return CodeSpec(op).format & JOF_IC; }
562
GetCheckPrivateFieldOperands(jsbytecode * pc,ThrowCondition * throwCondition,ThrowMsgKind * throwKind)563 inline void GetCheckPrivateFieldOperands(jsbytecode* pc,
564 ThrowCondition* throwCondition,
565 ThrowMsgKind* throwKind) {
566 static_assert(sizeof(ThrowCondition) == sizeof(uint8_t));
567 static_assert(sizeof(ThrowMsgKind) == sizeof(uint8_t));
568
569 MOZ_ASSERT(JSOp(*pc) == JSOp::CheckPrivateField);
570 uint8_t throwConditionByte = GET_UINT8(pc);
571 uint8_t throwKindByte = GET_UINT8(pc + 1);
572
573 *throwCondition = static_cast<ThrowCondition>(throwConditionByte);
574 *throwKind = static_cast<ThrowMsgKind>(throwKindByte);
575
576 MOZ_ASSERT(*throwCondition == ThrowCondition::ThrowHas ||
577 *throwCondition == ThrowCondition::ThrowHasNot ||
578 *throwCondition == ThrowCondition::OnlyCheckRhs);
579
580 MOZ_ASSERT(*throwKind == ThrowMsgKind::PrivateDoubleInit ||
581 *throwKind == ThrowMsgKind::MissingPrivateOnGet ||
582 *throwKind == ThrowMsgKind::MissingPrivateOnSet);
583 }
584
585 // Return true iff the combination of the ThrowCondition and hasOwn result
586 // will throw an exception.
CheckPrivateFieldWillThrow(ThrowCondition condition,bool hasOwn)587 static inline bool CheckPrivateFieldWillThrow(ThrowCondition condition,
588 bool hasOwn) {
589 if ((condition == ThrowCondition::ThrowHasNot && !hasOwn) ||
590 (condition == ThrowCondition::ThrowHas && hasOwn)) {
591 // Met a throw condition.
592 return true;
593 }
594
595 return false;
596 }
597
598 /*
599 * Counts accumulated for a single opcode in a script. The counts tracked vary
600 * between opcodes, and this structure ensures that counts are accessed in a
601 * coherent fashion.
602 */
603 class PCCounts {
604 /*
605 * Offset of the pc inside the script. This fields is used to lookup opcode
606 * which have annotations.
607 */
608 size_t pcOffset_;
609
610 /*
611 * Record the number of execution of one instruction, or the number of
612 * throws executed.
613 */
614 uint64_t numExec_;
615
616 public:
PCCounts(size_t off)617 explicit PCCounts(size_t off) : pcOffset_(off), numExec_(0) {}
618
pcOffset()619 size_t pcOffset() const { return pcOffset_; }
620
621 // Used for sorting and searching.
622 bool operator<(const PCCounts& rhs) const {
623 return pcOffset_ < rhs.pcOffset_;
624 }
625
numExec()626 uint64_t& numExec() { return numExec_; }
numExec()627 uint64_t numExec() const { return numExec_; }
628
629 static const char numExecName[];
630 };
631
GetNextPc(jsbytecode * pc)632 static inline jsbytecode* GetNextPc(jsbytecode* pc) {
633 return pc + GetBytecodeLength(pc);
634 }
635
IntToResumeKind(int32_t value)636 inline GeneratorResumeKind IntToResumeKind(int32_t value) {
637 MOZ_ASSERT(uint32_t(value) <= uint32_t(GeneratorResumeKind::Return));
638 return static_cast<GeneratorResumeKind>(value);
639 }
640
ResumeKindFromPC(jsbytecode * pc)641 inline GeneratorResumeKind ResumeKindFromPC(jsbytecode* pc) {
642 MOZ_ASSERT(JSOp(*pc) == JSOp::ResumeKind);
643 return IntToResumeKind(GET_UINT8(pc));
644 }
645
646 #if defined(DEBUG) || defined(JS_JITSPEW)
647
648 enum class DisassembleSkeptically { No, Yes };
649
650 /*
651 * Disassemblers, for debugging only.
652 */
653 [[nodiscard]] extern bool Disassemble(
654 JSContext* cx, JS::Handle<JSScript*> script, bool lines, Sprinter* sp,
655 DisassembleSkeptically skeptically = DisassembleSkeptically::No);
656
657 unsigned Disassemble1(JSContext* cx, JS::Handle<JSScript*> script,
658 jsbytecode* pc, unsigned loc, bool lines, Sprinter* sp);
659
660 #endif
661
662 [[nodiscard]] extern bool DumpRealmPCCounts(JSContext* cx);
663
664 } // namespace js
665
666 #endif /* vm_BytecodeUtil_h */
667