1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 *
3 * ***** BEGIN LICENSE BLOCK *****
4 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
5 *
6 * The contents of this file are subject to the Mozilla Public License Version
7 * 1.1 (the "License"); you may not use this file except in compliance with
8 * the License. You may obtain a copy of the License at
9 * http://www.mozilla.org/MPL/
10 *
11 * Software distributed under the License is distributed on an "AS IS" basis,
12 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
13 * for the specific language governing rights and limitations under the
14 * License.
15 *
16 * The Original Code is Mozilla Communicator client code, released
17 * March 31, 1998.
18 *
19 * The Initial Developer of the Original Code is
20 * Netscape Communications Corporation.
21 * Portions created by the Initial Developer are Copyright (C) 1998
22 * the Initial Developer. All Rights Reserved.
23 *
24 * Contributor(s):
25 *
26 * Alternatively, the contents of this file may be used under the terms of
27 * either of the GNU General Public License Version 2 or later (the "GPL"),
28 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 * in which case the provisions of the GPL or the LGPL are applicable instead
30 * of those above. If you wish to allow use of your version of this file only
31 * under the terms of either the GPL or the LGPL, and not to allow others to
32 * use your version of this file under the terms of the MPL, indicate your
33 * decision by deleting the provisions above and replace them with the notice
34 * and other provisions required by the GPL or the LGPL. If you do not delete
35 * the provisions above, a recipient may use your version of this file under
36 * the terms of any one of the MPL, the GPL or the LGPL.
37 *
38 * ***** END LICENSE BLOCK ***** */
39
40 /*
41 * JS bytecode generation.
42 */
43 #include "jsstddef.h"
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <string.h>
48 #include "jstypes.h"
49 #include "jsarena.h" /* Added by JSIFY */
50 #include "jsutil.h" /* Added by JSIFY */
51 #include "jsbit.h"
52 #include "jsprf.h"
53 #include "jsapi.h"
54 #include "jsatom.h"
55 #include "jscntxt.h"
56 #include "jsconfig.h"
57 #include "jsemit.h"
58 #include "jsfun.h"
59 #include "jsnum.h"
60 #include "jsopcode.h"
61 #include "jsparse.h"
62 #include "jsregexp.h"
63 #include "jsscan.h"
64 #include "jsscope.h"
65 #include "jsscript.h"
66
67 /* Allocation chunk counts, must be powers of two in general. */
68 #define BYTECODE_CHUNK 256 /* code allocation increment */
69 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
70 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
71
72 /* Macros to compute byte sizes from typed element counts. */
73 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
74 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
75 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
76
77 JS_FRIEND_API(JSBool)
js_InitCodeGenerator(JSContext * cx,JSCodeGenerator * cg,JSArenaPool * codePool,JSArenaPool * notePool,const char * filename,uintN lineno,JSPrincipals * principals)78 js_InitCodeGenerator(JSContext *cx, JSCodeGenerator *cg,
79 JSArenaPool *codePool, JSArenaPool *notePool,
80 const char *filename, uintN lineno,
81 JSPrincipals *principals)
82 {
83 memset(cg, 0, sizeof *cg);
84 TREE_CONTEXT_INIT(&cg->treeContext);
85 cg->treeContext.flags |= TCF_COMPILING;
86 cg->codePool = codePool;
87 cg->notePool = notePool;
88 cg->codeMark = JS_ARENA_MARK(codePool);
89 cg->noteMark = JS_ARENA_MARK(notePool);
90 cg->tempMark = JS_ARENA_MARK(&cx->tempPool);
91 cg->current = &cg->main;
92 cg->filename = filename;
93 cg->firstLine = cg->prolog.currentLine = cg->main.currentLine = lineno;
94 cg->principals = principals;
95 ATOM_LIST_INIT(&cg->atomList);
96 cg->prolog.noteMask = cg->main.noteMask = SRCNOTE_CHUNK - 1;
97 ATOM_LIST_INIT(&cg->constList);
98 return JS_TRUE;
99 }
100
101 JS_FRIEND_API(void)
js_FinishCodeGenerator(JSContext * cx,JSCodeGenerator * cg)102 js_FinishCodeGenerator(JSContext *cx, JSCodeGenerator *cg)
103 {
104 TREE_CONTEXT_FINISH(&cg->treeContext);
105 JS_ARENA_RELEASE(cg->codePool, cg->codeMark);
106 JS_ARENA_RELEASE(cg->notePool, cg->noteMark);
107 JS_ARENA_RELEASE(&cx->tempPool, cg->tempMark);
108 }
109
110 static ptrdiff_t
EmitCheck(JSContext * cx,JSCodeGenerator * cg,JSOp op,ptrdiff_t delta)111 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
112 {
113 jsbytecode *base, *limit, *next;
114 ptrdiff_t offset, length;
115 size_t incr, size;
116
117 base = CG_BASE(cg);
118 next = CG_NEXT(cg);
119 limit = CG_LIMIT(cg);
120 offset = PTRDIFF(next, base, jsbytecode);
121 if (next + delta > limit) {
122 length = offset + delta;
123 length = (length <= BYTECODE_CHUNK)
124 ? BYTECODE_CHUNK
125 : JS_BIT(JS_CeilingLog2(length));
126 incr = BYTECODE_SIZE(length);
127 if (!base) {
128 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
129 } else {
130 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
131 incr -= size;
132 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
133 }
134 if (!base) {
135 JS_ReportOutOfMemory(cx);
136 return -1;
137 }
138 CG_BASE(cg) = base;
139 CG_LIMIT(cg) = base + length;
140 CG_NEXT(cg) = base + offset;
141 }
142 return offset;
143 }
144
145 static void
UpdateDepth(JSContext * cx,JSCodeGenerator * cg,ptrdiff_t target)146 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
147 {
148 jsbytecode *pc;
149 const JSCodeSpec *cs;
150 intN nuses;
151
152 pc = CG_CODE(cg, target);
153 cs = &js_CodeSpec[pc[0]];
154 nuses = cs->nuses;
155 if (nuses < 0)
156 nuses = 2 + GET_ARGC(pc); /* stack: fun, this, [argc arguments] */
157 cg->stackDepth -= nuses;
158 if (cg->stackDepth < 0) {
159 char numBuf[12];
160 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
161 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
162 JSMSG_STACK_UNDERFLOW,
163 cg->filename ? cg->filename : "stdin", numBuf);
164 }
165 cg->stackDepth += cs->ndefs;
166 if ((uintN)cg->stackDepth > cg->maxStackDepth)
167 cg->maxStackDepth = cg->stackDepth;
168 }
169
170 ptrdiff_t
js_Emit1(JSContext * cx,JSCodeGenerator * cg,JSOp op)171 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
172 {
173 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
174
175 if (offset >= 0) {
176 *CG_NEXT(cg)++ = (jsbytecode)op;
177 UpdateDepth(cx, cg, offset);
178 }
179 return offset;
180 }
181
182 ptrdiff_t
js_Emit2(JSContext * cx,JSCodeGenerator * cg,JSOp op,jsbytecode op1)183 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
184 {
185 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
186
187 if (offset >= 0) {
188 jsbytecode *next = CG_NEXT(cg);
189 next[0] = (jsbytecode)op;
190 next[1] = op1;
191 CG_NEXT(cg) = next + 2;
192 UpdateDepth(cx, cg, offset);
193 }
194 return offset;
195 }
196
197 ptrdiff_t
js_Emit3(JSContext * cx,JSCodeGenerator * cg,JSOp op,jsbytecode op1,jsbytecode op2)198 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
199 jsbytecode op2)
200 {
201 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
202
203 if (offset >= 0) {
204 jsbytecode *next = CG_NEXT(cg);
205 next[0] = (jsbytecode)op;
206 next[1] = op1;
207 next[2] = op2;
208 CG_NEXT(cg) = next + 3;
209 UpdateDepth(cx, cg, offset);
210 }
211 return offset;
212 }
213
214 ptrdiff_t
js_EmitN(JSContext * cx,JSCodeGenerator * cg,JSOp op,size_t extra)215 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
216 {
217 ptrdiff_t length = 1 + (ptrdiff_t)extra;
218 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
219
220 if (offset >= 0) {
221 jsbytecode *next = CG_NEXT(cg);
222 *next = (jsbytecode)op;
223 memset(next + 1, 0, BYTECODE_SIZE(extra));
224 CG_NEXT(cg) = next + length;
225 UpdateDepth(cx, cg, offset);
226 }
227 return offset;
228 }
229
230 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
231 const char js_with_statement_str[] = "with statement";
232 const char js_script_str[] = "script";
233
234 static const char *statementName[] = {
235 "block", /* BLOCK */
236 "label statement", /* LABEL */
237 "if statement", /* IF */
238 "else statement", /* ELSE */
239 "switch statement", /* SWITCH */
240 js_with_statement_str, /* WITH */
241 "try statement", /* TRY */
242 "catch block", /* CATCH */
243 "finally statement", /* FINALLY */
244 "do loop", /* DO_LOOP */
245 "for loop", /* FOR_LOOP */
246 "for/in loop", /* FOR_IN_LOOP */
247 "while loop", /* WHILE_LOOP */
248 };
249
250 static const char *
StatementName(JSCodeGenerator * cg)251 StatementName(JSCodeGenerator *cg)
252 {
253 if (!cg->treeContext.topStmt)
254 return js_script_str;
255 return statementName[cg->treeContext.topStmt->type];
256 }
257
258 static void
ReportStatementTooLarge(JSContext * cx,JSCodeGenerator * cg)259 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
260 {
261 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
262 StatementName(cg));
263 }
264
265 /**
266 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
267 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
268 into unconditional (gotos and gosubs), and conditional jumps or branches
269 (which pop a value, test it, and jump depending on its value). Most jumps
270 have just one immediate operand, a signed offset from the jump opcode's pc
271 to the target bytecode. The lookup and table switch opcodes may contain
272 many jump offsets.
273
274 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
275 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
276 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
277 the extended form of the JSOP_OR branch opcode). The unextended or short
278 formats have 16-bit signed immediate offset operands, the extended or long
279 formats have 32-bit signed immediates. The span-dependency problem consists
280 of selecting as few long instructions as possible, or about as few -- since
281 jumps can span other jumps, extending one jump may cause another to need to
282 be extended.
283
284 Most JS scripts are short, so need no extended jumps. We optimize for this
285 case by generating short jumps until we know a long jump is needed. After
286 that point, we keep generating short jumps, but each jump's 16-bit immediate
287 offset operand is actually an unsigned index into cg->spanDeps, an array of
288 JSSpanDep structs. Each struct tells the top offset in the script of the
289 opcode, the "before" offset of the jump (which will be the same as top for
290 simplex jumps, but which will index further into the bytecode array for a
291 non-initial jump offset in a lookup or table switch), the after "offset"
292 adjusted during span-dependent instruction selection (initially the same
293 value as the "before" offset), and the jump target (more below).
294
295 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
296 ensure that all bytecode generated so far can be inspected to discover where
297 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
298 that we generate span-dependency records sorted by their offsets, so we can
299 binary-search when trying to find a JSSpanDep for a given bytecode offset,
300 or the nearest JSSpanDep at or above a given pc.
301
302 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
303 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
304 tells us that we need to binary-search for the cg->spanDeps entry by the
305 jump opcode's bytecode offset (sd->before).
306
307 Jump targets need to be maintained in a data structure that lets us look
308 up an already-known target by its address (jumps may have a common target),
309 and that also lets us update the addresses (script-relative, a.k.a. absolute
310 offsets) of targets that come after a jump target (for when a jump below
311 that target needs to be extended). We use an AVL tree, implemented using
312 recursion, but with some tricky optimizations to its height-balancing code
313 (see http://www.enteract.com/~bradapp/ftp/src/libs/C++/AvlTrees.html).
314
315 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
316 positive sign, even though they link "backward" (i.e., toward lower bytecode
317 address). We don't want to waste space and search time in the AVL tree for
318 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
319 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
320 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
321 target, or is still awaiting backpatching.
322
323 Note that backpatch chains would present a problem for BuildSpanDepTable,
324 which inspects bytecode to build cg->spanDeps on demand, when the first
325 short jump offset overflows. To solve this temporary problem, we emit a
326 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_PUSH for jumps that push a
327 result on the interpreter's stack, namely JSOP_GOSUB; or JSOP_BACKPATCH_POP
328 for branch ops) whose nuses/ndefs counts help keep the stack balanced, but
329 whose opcode format distinguishes its backpatch delta immediate operand from
330 a normal jump offset.
331 */
332 static int
BalanceJumpTargets(JSJumpTarget ** jtp)333 BalanceJumpTargets(JSJumpTarget **jtp)
334 {
335 JSJumpTarget *jt, *jt2, *root;
336 int dir, otherDir, heightChanged;
337 JSBool doubleRotate;
338
339 jt = *jtp;
340 JS_ASSERT(jt->balance != 0);
341
342 if (jt->balance < -1) {
343 dir = JT_RIGHT;
344 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
345 } else if (jt->balance > 1) {
346 dir = JT_LEFT;
347 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
348 } else {
349 return 0;
350 }
351
352 otherDir = JT_OTHER_DIR(dir);
353 if (doubleRotate) {
354 jt2 = jt->kids[otherDir];
355 *jtp = root = jt2->kids[dir];
356
357 jt->kids[otherDir] = root->kids[dir];
358 root->kids[dir] = jt;
359
360 jt2->kids[dir] = root->kids[otherDir];
361 root->kids[otherDir] = jt2;
362
363 heightChanged = 1;
364 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
365 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
366 root->balance = 0;
367 } else {
368 *jtp = root = jt->kids[otherDir];
369 jt->kids[otherDir] = root->kids[dir];
370 root->kids[dir] = jt;
371
372 heightChanged = (root->balance != 0);
373 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
374 }
375
376 return heightChanged;
377 }
378
379 typedef struct AddJumpTargetArgs {
380 JSContext *cx;
381 JSCodeGenerator *cg;
382 ptrdiff_t offset;
383 JSJumpTarget *node;
384 } AddJumpTargetArgs;
385
386 static int
AddJumpTarget(AddJumpTargetArgs * args,JSJumpTarget ** jtp)387 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
388 {
389 JSJumpTarget *jt;
390 int balanceDelta;
391
392 jt = *jtp;
393 if (!jt) {
394 JSCodeGenerator *cg = args->cg;
395
396 jt = cg->jtFreeList;
397 if (jt) {
398 cg->jtFreeList = jt->kids[JT_LEFT];
399 } else {
400 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
401 sizeof *jt);
402 if (!jt) {
403 JS_ReportOutOfMemory(args->cx);
404 return 0;
405 }
406 }
407 jt->offset = args->offset;
408 jt->balance = 0;
409 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
410 cg->numJumpTargets++;
411 args->node = jt;
412 *jtp = jt;
413 return 1;
414 }
415
416 if (jt->offset == args->offset) {
417 args->node = jt;
418 return 0;
419 }
420
421 if (args->offset < jt->offset)
422 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
423 else
424 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
425 if (!args->node)
426 return 0;
427
428 jt->balance += balanceDelta;
429 return (balanceDelta && jt->balance)
430 ? 1 - BalanceJumpTargets(jtp)
431 : 0;
432 }
433
434 #ifdef DEBUG_brendan
AVLCheck(JSJumpTarget * jt)435 static int AVLCheck(JSJumpTarget *jt)
436 {
437 int lh, rh;
438
439 if (!jt) return 0;
440 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
441 lh = AVLCheck(jt->kids[JT_LEFT]);
442 rh = AVLCheck(jt->kids[JT_RIGHT]);
443 JS_ASSERT(jt->balance == rh - lh);
444 return 1 + JS_MAX(lh, rh);
445 }
446 #endif
447
448 static JSBool
SetSpanDepTarget(JSContext * cx,JSCodeGenerator * cg,JSSpanDep * sd,ptrdiff_t off)449 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
450 ptrdiff_t off)
451 {
452 AddJumpTargetArgs args;
453
454 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
455 ReportStatementTooLarge(cx, cg);
456 return JS_FALSE;
457 }
458
459 args.cx = cx;
460 args.cg = cg;
461 args.offset = sd->top + off;
462 args.node = NULL;
463 AddJumpTarget(&args, &cg->jumpTargets);
464 if (!args.node)
465 return JS_FALSE;
466
467 #ifdef DEBUG_brendan
468 AVLCheck(cg->jumpTargets);
469 #endif
470
471 SD_SET_TARGET(sd, args.node);
472 return JS_TRUE;
473 }
474
475 #define SPANDEPS_MIN 256
476 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
477 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
478
479 static JSBool
AddSpanDep(JSContext * cx,JSCodeGenerator * cg,jsbytecode * pc,jsbytecode * pc2,ptrdiff_t off)480 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
481 ptrdiff_t off)
482 {
483 uintN index;
484 JSSpanDep *sdbase, *sd;
485 size_t size;
486
487 index = cg->numSpanDeps;
488 if (index + 1 == 0) {
489 ReportStatementTooLarge(cx, cg);
490 return JS_FALSE;
491 }
492
493 if ((index & (index - 1)) == 0 &&
494 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
495 if (!sdbase) {
496 size = SPANDEPS_SIZE_MIN;
497 JS_ARENA_ALLOCATE_CAST(sdbase, JSSpanDep *, &cx->tempPool, size);
498 } else {
499 size = SPANDEPS_SIZE(index);
500 JS_ARENA_GROW_CAST(sdbase, JSSpanDep *, &cx->tempPool, size, size);
501 }
502 if (!sdbase)
503 return JS_FALSE;
504 cg->spanDeps = sdbase;
505 }
506
507 cg->numSpanDeps = index + 1;
508 sd = cg->spanDeps + index;
509 sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
510 sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode);
511
512 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
513 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
514 if (off != 0) {
515 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
516 if (off > BPDELTA_MAX) {
517 ReportStatementTooLarge(cx, cg);
518 return JS_FALSE;
519 }
520 }
521 SD_SET_BPDELTA(sd, off);
522 } else if (off == 0) {
523 /* Jump offset will be patched directly, without backpatch chaining. */
524 SD_SET_TARGET(sd, NULL);
525 } else {
526 /* The jump offset in off is non-zero, therefore it's already known. */
527 if (!SetSpanDepTarget(cx, cg, sd, off))
528 return JS_FALSE;
529 }
530
531 if (index > SPANDEP_INDEX_MAX)
532 index = SPANDEP_INDEX_HUGE;
533 SET_SPANDEP_INDEX(pc2, index);
534 return JS_TRUE;
535 }
536
537 static JSBool
BuildSpanDepTable(JSContext * cx,JSCodeGenerator * cg)538 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
539 {
540 jsbytecode *pc, *end;
541 JSOp op;
542 const JSCodeSpec *cs;
543 ptrdiff_t len, off;
544
545 pc = CG_BASE(cg);
546 end = CG_NEXT(cg);
547 while (pc < end) {
548 op = (JSOp)*pc;
549 cs = &js_CodeSpec[op];
550 len = (ptrdiff_t)cs->length;
551
552 switch (cs->format & JOF_TYPEMASK) {
553 case JOF_JUMP:
554 off = GET_JUMP_OFFSET(pc);
555 if (!AddSpanDep(cx, cg, pc, pc, off))
556 return JS_FALSE;
557 break;
558
559 #if JS_HAS_SWITCH_STATEMENT
560 case JOF_TABLESWITCH:
561 {
562 jsbytecode *pc2;
563 jsint i, low, high;
564
565 pc2 = pc;
566 off = GET_JUMP_OFFSET(pc2);
567 if (!AddSpanDep(cx, cg, pc, pc2, off))
568 return JS_FALSE;
569 pc2 += JUMP_OFFSET_LEN;
570 low = GET_JUMP_OFFSET(pc2);
571 pc2 += JUMP_OFFSET_LEN;
572 high = GET_JUMP_OFFSET(pc2);
573 pc2 += JUMP_OFFSET_LEN;
574 for (i = low; i <= high; i++) {
575 off = GET_JUMP_OFFSET(pc2);
576 if (!AddSpanDep(cx, cg, pc, pc2, off))
577 return JS_FALSE;
578 pc2 += JUMP_OFFSET_LEN;
579 }
580 len = 1 + pc2 - pc;
581 break;
582 }
583
584 case JOF_LOOKUPSWITCH:
585 {
586 jsbytecode *pc2;
587 jsint npairs;
588
589 pc2 = pc;
590 off = GET_JUMP_OFFSET(pc2);
591 if (!AddSpanDep(cx, cg, pc, pc2, off))
592 return JS_FALSE;
593 pc2 += JUMP_OFFSET_LEN;
594 npairs = (jsint) GET_ATOM_INDEX(pc2);
595 pc2 += ATOM_INDEX_LEN;
596 while (npairs) {
597 pc2 += ATOM_INDEX_LEN;
598 off = GET_JUMP_OFFSET(pc2);
599 if (!AddSpanDep(cx, cg, pc, pc2, off))
600 return JS_FALSE;
601 pc2 += JUMP_OFFSET_LEN;
602 npairs--;
603 }
604 len = 1 + pc2 - pc;
605 break;
606 }
607 #endif /* JS_HAS_SWITCH_STATEMENT */
608 }
609
610 pc += len;
611 }
612
613 return JS_TRUE;
614 }
615
616 static JSSpanDep *
GetSpanDep(JSCodeGenerator * cg,jsbytecode * pc)617 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
618 {
619 uintN index;
620 ptrdiff_t offset;
621 int lo, hi, mid;
622 JSSpanDep *sd;
623
624 index = GET_SPANDEP_INDEX(pc);
625 if (index != SPANDEP_INDEX_HUGE)
626 return cg->spanDeps + index;
627
628 offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
629 lo = 0;
630 hi = cg->numSpanDeps - 1;
631 while (lo <= hi) {
632 mid = (lo + hi) / 2;
633 sd = cg->spanDeps + mid;
634 if (sd->before == offset)
635 return sd;
636 if (sd->before < offset)
637 lo = mid + 1;
638 else
639 hi = mid - 1;
640 }
641
642 JS_ASSERT(0);
643 return NULL;
644 }
645
646 static JSBool
SetBackPatchDelta(JSContext * cx,JSCodeGenerator * cg,jsbytecode * pc,ptrdiff_t delta)647 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
648 ptrdiff_t delta)
649 {
650 JSSpanDep *sd;
651
652 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
653 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
654 SET_JUMP_OFFSET(pc, delta);
655 return JS_TRUE;
656 }
657
658 if (delta > BPDELTA_MAX) {
659 ReportStatementTooLarge(cx, cg);
660 return JS_FALSE;
661 }
662
663 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
664 return JS_FALSE;
665
666 sd = GetSpanDep(cg, pc);
667 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
668 SD_SET_BPDELTA(sd, delta);
669 return JS_TRUE;
670 }
671
672 static void
UpdateJumpTargets(JSJumpTarget * jt,ptrdiff_t pivot,ptrdiff_t delta)673 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
674 {
675 if (jt->offset > pivot) {
676 jt->offset += delta;
677 if (jt->kids[JT_LEFT])
678 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
679 }
680 if (jt->kids[JT_RIGHT])
681 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
682 }
683
684 static JSSpanDep *
FindNearestSpanDep(JSCodeGenerator * cg,ptrdiff_t offset,int lo,JSSpanDep * guard)685 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
686 JSSpanDep *guard)
687 {
688 int num, hi, mid;
689 JSSpanDep *sdbase, *sd;
690
691 num = cg->numSpanDeps;
692 JS_ASSERT(num > 0);
693 hi = num - 1;
694 sdbase = cg->spanDeps;
695 while (lo <= hi) {
696 mid = (lo + hi) / 2;
697 sd = sdbase + mid;
698 if (sd->before == offset)
699 return sd;
700 if (sd->before < offset)
701 lo = mid + 1;
702 else
703 hi = mid - 1;
704 }
705 if (lo == num)
706 return guard;
707 sd = sdbase + lo;
708 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
709 return sd;
710 }
711
712 static void
FreeJumpTargets(JSCodeGenerator * cg,JSJumpTarget * jt)713 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
714 {
715 if (jt->kids[JT_LEFT])
716 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
717 if (jt->kids[JT_RIGHT])
718 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
719 jt->kids[JT_LEFT] = cg->jtFreeList;
720 cg->jtFreeList = jt;
721 }
722
723 static JSBool
OptimizeSpanDeps(JSContext * cx,JSCodeGenerator * cg)724 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
725 {
726 jsbytecode *pc, *oldpc, *base, *limit, *next;
727 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
728 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
729 JSBool done;
730 JSOp op;
731 uint32 type;
732 size_t size, incr;
733 jssrcnote *sn, *snlimit;
734 JSSrcNoteSpec *spec;
735 uintN i, n, noteIndex;
736 JSTryNote *tn, *tnlimit;
737 #ifdef DEBUG_brendan
738 int passes = 0;
739 #endif
740
741 base = CG_BASE(cg);
742 sdbase = cg->spanDeps;
743 sdlimit = sdbase + cg->numSpanDeps;
744 offset = CG_OFFSET(cg);
745 growth = 0;
746
747 do {
748 done = JS_TRUE;
749 delta = 0;
750 top = pivot = -1;
751 sdtop = NULL;
752 pc = NULL;
753 op = JSOP_NOP;
754 type = 0;
755 #ifdef DEBUG_brendan
756 passes++;
757 #endif
758
759 for (sd = sdbase; sd < sdlimit; sd++) {
760 JS_ASSERT(JT_HAS_TAG(sd->target));
761 sd->offset += delta;
762
763 if (sd->top != top) {
764 sdtop = sd;
765 top = sd->top;
766 JS_ASSERT(top == sd->before);
767 pivot = sd->offset;
768 pc = base + top;
769 op = (JSOp) *pc;
770 type = (js_CodeSpec[op].format & JOF_TYPEMASK);
771 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
772 /*
773 * We already extended all the jump offset operands for
774 * the opcode at sd->top. Jumps and branches have only
775 * one jump offset operand, but switches have many, all
776 * of which are adjacent in cg->spanDeps.
777 */
778 continue;
779 }
780
781 JS_ASSERT(type == JOF_JUMP ||
782 type == JOF_TABLESWITCH ||
783 type == JOF_LOOKUPSWITCH);
784 }
785
786 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
787 span = SD_TARGET_OFFSET(sd) - pivot;
788 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
789 ptrdiff_t deltaFromTop = 0;
790
791 done = JS_FALSE;
792
793 switch (op) {
794 case JSOP_GOTO: op = JSOP_GOTOX; break;
795 case JSOP_IFEQ: op = JSOP_IFEQX; break;
796 case JSOP_IFNE: op = JSOP_IFNEX; break;
797 case JSOP_OR: op = JSOP_ORX; break;
798 case JSOP_AND: op = JSOP_ANDX; break;
799 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
800 case JSOP_CASE: op = JSOP_CASEX; break;
801 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
802 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
803 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
804 default: JS_ASSERT(0);
805 }
806 *pc = (jsbytecode) op;
807
808 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
809 if (sd2 <= sd) {
810 /*
811 * sd2->offset already includes delta as it stood
812 * before we entered this loop, but it must also
813 * include the delta relative to top due to all the
814 * extended jump offset immediates for the opcode
815 * starting at top, which we extend in this loop.
816 *
817 * If there is only one extended jump offset, then
818 * sd2->offset won't change and this for loop will
819 * iterate once only.
820 */
821 sd2->offset += deltaFromTop;
822 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
823 } else {
824 /*
825 * sd2 comes after sd, and won't be revisited by
826 * the outer for loop, so we have to increase its
827 * offset by delta, not merely by deltaFromTop.
828 */
829 sd2->offset += delta;
830 }
831
832 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
833 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
834 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
835 }
836 sd = sd2 - 1;
837 }
838 }
839 }
840
841 growth += delta;
842 } while (!done);
843
844 if (growth) {
845 #ifdef DEBUG_brendan
846 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
847 cg->filename ? cg->filename : "stdin", cg->firstLine,
848 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
849 passes, offset + growth, offset, growth);
850 #endif
851
852 /*
853 * Ensure that we have room for the extended jumps, but don't round up
854 * to a power of two -- we're done generating code, so we cut to fit.
855 */
856 limit = CG_LIMIT(cg);
857 length = offset + growth;
858 next = base + length;
859 if (next > limit) {
860 JS_ASSERT(length > BYTECODE_CHUNK);
861 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
862 incr = BYTECODE_SIZE(length) - size;
863 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
864 if (!base) {
865 JS_ReportOutOfMemory(cx);
866 return JS_FALSE;
867 }
868 CG_BASE(cg) = base;
869 CG_LIMIT(cg) = next = base + length;
870 }
871 CG_NEXT(cg) = next;
872
873 /*
874 * Set up a fake span dependency record to guard the end of the code
875 * being generated. This guard record is returned as a fencepost by
876 * FindNearestSpanDep if there is no real spandep at or above a given
877 * unextended code offset.
878 */
879 guard.top = -1;
880 guard.offset = offset + growth;
881 guard.before = offset;
882 guard.target = NULL;
883 }
884
885 /*
886 * Now work backwards through the span dependencies, copying chunks of
887 * bytecode between each extended jump toward the end of the grown code
888 * space, and restoring immediate offset operands for all jump bytecodes.
889 * The first chunk of bytecodes, starting at base and ending at the first
890 * extended jump offset (NB: this chunk includes the operation bytecode
891 * just before that immediate jump offset), doesn't need to be copied.
892 */
893 JS_ASSERT(sd == sdlimit);
894 top = -1;
895 while (--sd >= sdbase) {
896 if (sd->top != top) {
897 top = sd->top;
898 op = (JSOp) base[top];
899 type = (js_CodeSpec[op].format & JOF_TYPEMASK);
900
901 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
902 continue;
903 sd2++;
904 pivot = sd2->offset;
905 JS_ASSERT(top == sd2->before);
906 }
907
908 oldpc = base + sd->before;
909 span = SD_TARGET_OFFSET(sd) - pivot;
910
911 /*
912 * If this jump didn't need to be extended, restore its span immediate
913 * offset operand now, overwriting the index of sd within cg->spanDeps
914 * that was stored temporarily after *pc when BuildSpanDepTable ran.
915 *
916 * Note that span might fit in 16 bits even for an extended jump op,
917 * if the op has multiple span operands, not all of which overflowed
918 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
919 * range for a short jump, but others are not).
920 */
921 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
922 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
923 SET_JUMP_OFFSET(oldpc, span);
924 continue;
925 }
926
927 /*
928 * Set up parameters needed to copy the next run of bytecode starting
929 * at offset (which is a cursor into the unextended, original bytecode
930 * vector), down to sd->before (a cursor of the same scale as offset,
931 * it's the index of the original jump pc). Reuse delta to count the
932 * nominal number of bytes to copy.
933 */
934 pc = base + sd->offset;
935 delta = offset - sd->before;
936 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
937
938 /*
939 * Don't bother copying the jump offset we're about to reset, but do
940 * copy the bytecode at oldpc (which comes just before its immediate
941 * jump offset operand), on the next iteration through the loop, by
942 * including it in offset's new value.
943 */
944 offset = sd->before + 1;
945 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
946 if (size) {
947 memmove(pc + 1 + JUMPX_OFFSET_LEN,
948 oldpc + 1 + JUMP_OFFSET_LEN,
949 size);
950 }
951
952 SET_JUMPX_OFFSET(pc, span);
953 }
954
955 if (growth) {
956 /*
957 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
958 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
959 * at each sd that moved. The future may bring different offset sizes
960 * for span-dependent instruction operands. However, we fix only main
961 * notes here, not prolog notes -- we know that prolog opcodes are not
962 * span-dependent, and aren't likely ever to be.
963 */
964 offset = growth = 0;
965 sd = sdbase;
966 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
967 sn < snlimit;
968 sn = SN_NEXT(sn)) {
969 /*
970 * Recall that the offset of a given note includes its delta, and
971 * tells the offset of the annotated bytecode from the main entry
972 * point of the script.
973 */
974 offset += SN_DELTA(sn);
975 while (sd < sdlimit && sd->before < offset) {
976 /*
977 * To compute the delta to add to sn, we need to look at the
978 * spandep after sd, whose offset - (before + growth) tells by
979 * how many bytes sd's instruction grew.
980 */
981 sd2 = sd + 1;
982 if (sd2 == sdlimit)
983 sd2 = &guard;
984 delta = sd2->offset - (sd2->before + growth);
985 if (delta > 0) {
986 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
987 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
988 if (!sn)
989 return JS_FALSE;
990 snlimit = cg->main.notes + cg->main.noteCount;
991 growth += delta;
992 }
993 sd++;
994 }
995
996 /*
997 * If sn has span-dependent offset operands, check whether each
998 * covers further span-dependencies, and increase those operands
999 * accordingly. Some source notes measure offset not from the
1000 * annotated pc, but from that pc plus some small bias. NB: we
1001 * assume that spec->offsetBias can't itself span span-dependent
1002 * instructions!
1003 */
1004 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1005 if (spec->isSpanDep) {
1006 pivot = offset + spec->offsetBias;
1007 n = spec->arity;
1008 for (i = 0; i < n; i++) {
1009 span = js_GetSrcNoteOffset(sn, i);
1010 if (span == 0)
1011 continue;
1012 target = pivot + span * spec->isSpanDep;
1013 sd2 = FindNearestSpanDep(cg, target,
1014 (target >= pivot)
1015 ? sd - sdbase
1016 : 0,
1017 &guard);
1018
1019 /*
1020 * Increase target by sd2's before-vs-after offset delta,
1021 * which is absolute (i.e., relative to start of script,
1022 * as is target). Recompute the span by subtracting its
1023 * adjusted pivot from target.
1024 */
1025 target += sd2->offset - sd2->before;
1026 span = target - (pivot + growth);
1027 span *= spec->isSpanDep;
1028 noteIndex = sn - cg->main.notes;
1029 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1030 return JS_FALSE;
1031 sn = cg->main.notes + noteIndex;
1032 snlimit = cg->main.notes + cg->main.noteCount;
1033 }
1034 }
1035 }
1036
1037 /*
1038 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1039 * not clear how we can beat that).
1040 */
1041 for (tn = cg->tryBase, tnlimit = cg->tryNext; tn < tnlimit; tn++) {
1042 /*
1043 * First, look for the nearest span dependency at/above tn->start.
1044 * There may not be any such spandep, in which case the guard will
1045 * be returned.
1046 */
1047 offset = tn->start;
1048 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1049 delta = sd->offset - sd->before;
1050 tn->start = offset + delta;
1051
1052 /*
1053 * Next, find the nearest spandep at/above tn->start + tn->length.
1054 * Use its delta minus tn->start's delta to increase tn->length.
1055 */
1056 length = tn->length;
1057 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1058 if (sd2 != sd)
1059 tn->length = length + sd2->offset - sd2->before - delta;
1060
1061 /*
1062 * Finally, adjust tn->catchStart upward only if it is non-zero,
1063 * and provided there are spandeps below it that grew.
1064 */
1065 offset = tn->catchStart;
1066 if (offset != 0) {
1067 sd = FindNearestSpanDep(cg, offset, sd2 - sdbase, &guard);
1068 tn->catchStart = offset + sd->offset - sd->before;
1069 }
1070 }
1071 }
1072
1073 #ifdef DEBUG_brendan
1074 {
1075 uintN bigspans = 0;
1076 top = -1;
1077 for (sd = sdbase; sd < sdlimit; sd++) {
1078 offset = sd->offset;
1079
1080 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1081 if (sd->top != top) {
1082 JS_ASSERT(top == -1 ||
1083 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1084 bigspans != 0);
1085 bigspans = 0;
1086 top = sd->top;
1087 JS_ASSERT(top == sd->before);
1088 op = (JSOp) base[offset];
1089 type = (js_CodeSpec[op].format & JOF_TYPEMASK);
1090 JS_ASSERT(type == JOF_JUMP ||
1091 type == JOF_JUMPX ||
1092 type == JOF_TABLESWITCH ||
1093 type == JOF_TABLESWITCHX ||
1094 type == JOF_LOOKUPSWITCH ||
1095 type == JOF_LOOKUPSWITCHX);
1096 pivot = offset;
1097 }
1098
1099 pc = base + offset;
1100 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1101 span = GET_JUMPX_OFFSET(pc);
1102 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1103 bigspans++;
1104 } else {
1105 JS_ASSERT(type == JOF_TABLESWITCHX ||
1106 type == JOF_LOOKUPSWITCHX);
1107 }
1108 } else {
1109 span = GET_JUMP_OFFSET(pc);
1110 }
1111 JS_ASSERT(SD_TARGET_OFFSET(sd) == pivot + span);
1112 }
1113 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1114 }
1115 #endif
1116
1117 /*
1118 * Reset so we optimize at most once -- cg may be used for further code
1119 * generation of successive, independent, top-level statements. No jump
1120 * can span top-level statements, because JS lacks goto.
1121 */
1122 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1123 JS_ArenaFreeAllocation(&cx->tempPool, cg->spanDeps,
1124 JS_MAX(size, SPANDEPS_SIZE_MIN));
1125 cg->spanDeps = NULL;
1126 FreeJumpTargets(cg, cg->jumpTargets);
1127 cg->jumpTargets = NULL;
1128 cg->numSpanDeps = cg->numJumpTargets = 0;
1129 return JS_TRUE;
1130 }
1131
1132 static JSBool
EmitJump(JSContext * cx,JSCodeGenerator * cg,JSOp op,ptrdiff_t off)1133 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1134 {
1135 ptrdiff_t jmp;
1136 jsbytecode *pc;
1137
1138 if (off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off) {
1139 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
1140 return JS_FALSE;
1141 }
1142
1143 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1144 if (jmp >= 0 && cg->spanDeps) {
1145 pc = CG_CODE(cg, jmp);
1146 if (!AddSpanDep(cx, cg, pc, pc, off))
1147 return JS_FALSE;
1148 }
1149 return jmp;
1150 }
1151
1152 static ptrdiff_t
GetJumpOffset(JSCodeGenerator * cg,jsbytecode * pc)1153 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1154 {
1155 JSSpanDep *sd;
1156 JSJumpTarget *jt;
1157 ptrdiff_t top;
1158
1159 if (!cg->spanDeps)
1160 return GET_JUMP_OFFSET(pc);
1161
1162 sd = GetSpanDep(cg, pc);
1163 jt = sd->target;
1164 if (!JT_HAS_TAG(jt))
1165 return JT_TO_BPDELTA(jt);
1166
1167 top = sd->top;
1168 while (--sd >= cg->spanDeps && sd->top == top)
1169 continue;
1170 sd++;
1171 return JT_CLR_TAG(jt)->offset - sd->offset;
1172 }
1173
1174 JSBool
js_SetJumpOffset(JSContext * cx,JSCodeGenerator * cg,jsbytecode * pc,ptrdiff_t off)1175 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1176 ptrdiff_t off)
1177 {
1178 if (!cg->spanDeps) {
1179 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1180 SET_JUMP_OFFSET(pc, off);
1181 return JS_TRUE;
1182 }
1183
1184 if (!BuildSpanDepTable(cx, cg))
1185 return JS_FALSE;
1186 }
1187
1188 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1189 }
1190
1191 JSBool
js_InWithStatement(JSTreeContext * tc)1192 js_InWithStatement(JSTreeContext *tc)
1193 {
1194 JSStmtInfo *stmt;
1195
1196 for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1197 if (stmt->type == STMT_WITH)
1198 return JS_TRUE;
1199 }
1200 return JS_FALSE;
1201 }
1202
1203 JSBool
js_InCatchBlock(JSTreeContext * tc,JSAtom * atom)1204 js_InCatchBlock(JSTreeContext *tc, JSAtom *atom)
1205 {
1206 JSStmtInfo *stmt;
1207
1208 for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1209 if (stmt->type == STMT_CATCH && stmt->label == atom)
1210 return JS_TRUE;
1211 }
1212 return JS_FALSE;
1213 }
1214
1215 void
js_PushStatement(JSTreeContext * tc,JSStmtInfo * stmt,JSStmtType type,ptrdiff_t top)1216 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1217 ptrdiff_t top)
1218 {
1219 stmt->type = type;
1220 SET_STATEMENT_TOP(stmt, top);
1221 stmt->label = NULL;
1222 stmt->down = tc->topStmt;
1223 tc->topStmt = stmt;
1224 }
1225
1226 /*
1227 * Emit a backpatch op with offset pointing to the previous jump of this type,
1228 * so that we can walk back up the chain fixing up the op and jump offset.
1229 */
1230 #define EMIT_BACKPATCH_OP(cx, cg, last, op, jmp) \
1231 JS_BEGIN_MACRO \
1232 ptrdiff_t offset, delta; \
1233 offset = CG_OFFSET(cg); \
1234 delta = offset - (last); \
1235 last = offset; \
1236 JS_ASSERT(delta > 0); \
1237 jmp = EmitJump((cx), (cg), (op), (delta)); \
1238 JS_END_MACRO
1239
1240 /* Emit additional bytecode(s) for non-local jumps. */
1241 static JSBool
EmitNonLocalJumpFixup(JSContext * cx,JSCodeGenerator * cg,JSStmtInfo * toStmt,JSOp * returnop)1242 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1243 JSOp *returnop)
1244 {
1245 intN depth;
1246 JSStmtInfo *stmt;
1247 ptrdiff_t jmp;
1248
1249 /*
1250 * Return from within a try block that has a finally clause must be split
1251 * into two ops: JSOP_SETRVAL, to pop the r.v. and store it in fp->rval;
1252 * and JSOP_RETRVAL, which makes control flow go back to the caller, who
1253 * picks up fp->rval as usual. Otherwise, the stack will be unbalanced
1254 * when executing the finally clause.
1255 *
1256 * We mutate *returnop once only if we find an enclosing try-block (viz,
1257 * STMT_FINALLY) to ensure that we emit just one JSOP_SETRVAL before one
1258 * or more JSOP_GOSUBs and other fixup opcodes emitted by this function.
1259 * Our caller (the TOK_RETURN case of js_EmitTree) then emits *returnop.
1260 * The fixup opcodes and gosubs must interleave in the proper order, from
1261 * inner statement to outer, so that finally clauses run at the correct
1262 * stack depth.
1263 */
1264 if (returnop) {
1265 JS_ASSERT(*returnop == JSOP_RETURN);
1266 for (stmt = cg->treeContext.topStmt; stmt != toStmt;
1267 stmt = stmt->down) {
1268 if (stmt->type == STMT_FINALLY) {
1269 if (js_Emit1(cx, cg, JSOP_SETRVAL) < 0)
1270 return JS_FALSE;
1271 *returnop = JSOP_RETRVAL;
1272 break;
1273 }
1274 }
1275
1276 /*
1277 * If there are no try-with-finally blocks open around this return
1278 * statement, we can generate a return forthwith and skip generating
1279 * any fixup code.
1280 */
1281 if (*returnop == JSOP_RETURN)
1282 return JS_TRUE;
1283 }
1284
1285 /*
1286 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1287 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1288 * end of a with statement, so we save cg->stackDepth here and restore it
1289 * just before a successful return.
1290 */
1291 depth = cg->stackDepth;
1292 for (stmt = cg->treeContext.topStmt; stmt != toStmt; stmt = stmt->down) {
1293 switch (stmt->type) {
1294 case STMT_FINALLY:
1295 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1296 return JS_FALSE;
1297 EMIT_BACKPATCH_OP(cx, cg, stmt->gosub, JSOP_BACKPATCH_PUSH, jmp);
1298 if (jmp < 0)
1299 return JS_FALSE;
1300 break;
1301
1302 case STMT_WITH:
1303 case STMT_CATCH:
1304 /* There's a With object on the stack that we need to pop. */
1305 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1306 return JS_FALSE;
1307 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1308 return JS_FALSE;
1309 break;
1310
1311 case STMT_FOR_IN_LOOP:
1312 /*
1313 * The iterator and the object being iterated need to be popped.
1314 * JSOP_POP2 isn't decompiled, so it doesn't need to be HIDDEN.
1315 */
1316 if (js_Emit1(cx, cg, JSOP_POP2) < 0)
1317 return JS_FALSE;
1318 break;
1319
1320 case STMT_SUBROUTINE:
1321 /* There's a retsub pc-offset on the stack that we need to pop. */
1322 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1323 return JS_FALSE;
1324 if (js_Emit1(cx, cg, JSOP_POP) < 0)
1325 return JS_FALSE;
1326 break;
1327
1328 default:;
1329 }
1330 }
1331
1332 cg->stackDepth = depth;
1333 return JS_TRUE;
1334 }
1335
1336 static ptrdiff_t
EmitGoto(JSContext * cx,JSCodeGenerator * cg,JSStmtInfo * toStmt,ptrdiff_t * last,JSAtomListElement * label,JSSrcNoteType noteType)1337 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1338 ptrdiff_t *last, JSAtomListElement *label, JSSrcNoteType noteType)
1339 {
1340 intN index;
1341 ptrdiff_t jmp;
1342
1343 if (!EmitNonLocalJumpFixup(cx, cg, toStmt, NULL))
1344 return -1;
1345
1346 if (label) {
1347 index = js_NewSrcNote(cx, cg, noteType);
1348 if (index < 0)
1349 return -1;
1350 if (!js_SetSrcNoteOffset(cx, cg, (uintN)index, 0,
1351 (ptrdiff_t) ALE_INDEX(label))) {
1352 return -1;
1353 }
1354 } else if (noteType != SRC_NULL) {
1355 if (js_NewSrcNote(cx, cg, noteType) < 0)
1356 return -1;
1357 }
1358
1359 EMIT_BACKPATCH_OP(cx, cg, *last, JSOP_BACKPATCH, jmp);
1360 return jmp;
1361 }
1362
1363 static JSBool
BackPatch(JSContext * cx,JSCodeGenerator * cg,ptrdiff_t last,jsbytecode * target,jsbytecode op)1364 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1365 jsbytecode *target, jsbytecode op)
1366 {
1367 jsbytecode *pc, *stop;
1368 ptrdiff_t delta, span;
1369
1370 pc = CG_CODE(cg, last);
1371 stop = CG_CODE(cg, -1);
1372 while (pc != stop) {
1373 delta = GetJumpOffset(cg, pc);
1374 span = PTRDIFF(target, pc, jsbytecode);
1375 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1376
1377 /*
1378 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1379 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1380 * and need to see the JSOP_BACKPATCH* op at *pc).
1381 */
1382 *pc = op;
1383 pc -= delta;
1384 }
1385 return JS_TRUE;
1386 }
1387
1388 void
js_PopStatement(JSTreeContext * tc)1389 js_PopStatement(JSTreeContext *tc)
1390 {
1391 tc->topStmt = tc->topStmt->down;
1392 }
1393
1394 JSBool
js_PopStatementCG(JSContext * cx,JSCodeGenerator * cg)1395 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1396 {
1397 JSStmtInfo *stmt;
1398
1399 stmt = cg->treeContext.topStmt;
1400 if (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1401 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1402 JSOP_GOTO)) {
1403 return JS_FALSE;
1404 }
1405 js_PopStatement(&cg->treeContext);
1406 return JS_TRUE;
1407 }
1408
1409 JSBool
js_DefineCompileTimeConstant(JSContext * cx,JSCodeGenerator * cg,JSAtom * atom,JSParseNode * pn)1410 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1411 JSParseNode *pn)
1412 {
1413 jsdouble dval;
1414 jsint ival;
1415 JSAtom *valueAtom;
1416 JSAtomListElement *ale;
1417
1418 /* XXX just do numbers for now */
1419 if (pn->pn_type == TOK_NUMBER) {
1420 dval = pn->pn_dval;
1421 valueAtom = (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival))
1422 ? js_AtomizeInt(cx, ival, 0)
1423 : js_AtomizeDouble(cx, dval, 0);
1424 if (!valueAtom)
1425 return JS_FALSE;
1426 ale = js_IndexAtom(cx, atom, &cg->constList);
1427 if (!ale)
1428 return JS_FALSE;
1429 ALE_SET_VALUE(ale, ATOM_KEY(valueAtom));
1430 }
1431 return JS_TRUE;
1432 }
1433
1434 JSBool
js_LookupCompileTimeConstant(JSContext * cx,JSCodeGenerator * cg,JSAtom * atom,jsval * vp)1435 js_LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1436 jsval *vp)
1437 {
1438 JSBool ok;
1439 JSStackFrame *fp;
1440 JSAtomListElement *ale;
1441 JSObject *obj, *pobj;
1442 JSProperty *prop;
1443 uintN attrs;
1444
1445 /*
1446 * fp chases cg down the stack, but only until we reach the outermost cg.
1447 * This enables propagating consts from top-level into switch cases in a
1448 * function compiled along with the top-level script. All stack frames
1449 * with matching code generators should be flagged with JSFRAME_COMPILING;
1450 * we check sanity here.
1451 */
1452 *vp = JSVAL_VOID;
1453 ok = JS_TRUE;
1454 fp = cx->fp;
1455 do {
1456 JS_ASSERT(fp->flags & JSFRAME_COMPILING);
1457
1458 obj = fp->varobj;
1459 if (obj == fp->scopeChain &&
1460 !js_InWithStatement(&cg->treeContext) &&
1461 !js_InCatchBlock(&cg->treeContext, atom)) {
1462 ATOM_LIST_SEARCH(ale, &cg->constList, atom);
1463 if (ale) {
1464 *vp = ALE_VALUE(ale);
1465 return JS_TRUE;
1466 }
1467
1468 /*
1469 * Try looking in the variable object for a direct property that
1470 * is readonly and permanent. We know such a property can't be
1471 * shadowed by another property on obj's prototype chain, or a
1472 * with object or catch variable; nor can prop's value be changed,
1473 * nor can prop be deleted.
1474 */
1475 prop = NULL;
1476 ok = OBJ_LOOKUP_PROPERTY(cx, obj, (jsid)atom, &pobj, &prop);
1477 if (ok) {
1478 if (pobj == obj &&
1479 (fp->flags & (JSFRAME_EVAL | JSFRAME_COMPILE_N_GO))) {
1480 /*
1481 * We're compiling code that will be executed immediately,
1482 * not re-executed against a different scope chain and/or
1483 * variable object. Therefore we can get constant values
1484 * from our variable object here.
1485 */
1486 ok = OBJ_GET_ATTRIBUTES(cx, obj, (jsid)atom, prop, &attrs);
1487 if (ok && !(~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)))
1488 ok = OBJ_GET_PROPERTY(cx, obj, (jsid)atom, vp);
1489 }
1490 if (prop)
1491 OBJ_DROP_PROPERTY(cx, pobj, prop);
1492 }
1493 if (!ok || prop)
1494 break;
1495 }
1496 fp = fp->down;
1497 } while ((cg = cg->parent) != NULL);
1498 return ok;
1499 }
1500
1501 /*
1502 * Allocate an index invariant for all activations of the code being compiled
1503 * in cg, that can be used to store and fetch a reference to a cloned RegExp
1504 * object that shares the same JSRegExp private data created for the object
1505 * literal in pn->pn_atom. We need clones to hold lastIndex and other direct
1506 * properties that should not be shared among threads sharing a precompiled
1507 * function or script.
1508 *
1509 * If the code being compiled is function code, allocate a reserved slot in
1510 * the cloned function object that shares its precompiled script with other
1511 * cloned function objects and with the compiler-created clone-parent. There
1512 * are fun->nregexps such reserved slots in each function object cloned from
1513 * fun->object. NB: during compilation, funobj slots must never be allocated,
1514 * because js_AllocSlot could hand out one of the slots that should be given
1515 * to a regexp clone.
1516 *
1517 * If the code being compiled is global code, reserve the fp->vars slot at
1518 * ALE_INDEX(ale), by ensuring that cg->treeContext.numGlobalVars is at least
1519 * one more than this index. For global code, fp->vars is parallel to the
1520 * global script->atomMap.vector array, but possibly shorter for the common
1521 * case (where var declarations and regexp literals cluster toward the front
1522 * of the script or function body).
1523 *
1524 * Global variable name literals in script->atomMap have fast-global slot
1525 * numbers (stored as int-tagged jsvals) in the corresponding fp->vars array
1526 * element. The atomIndex for a regexp object literal thus also addresses an
1527 * fp->vars element that is not used by any optimized global variable, so we
1528 * use that GC-scanned element to keep the regexp object clone alive, as well
1529 * as to lazily create and find it at run-time for the JSOP_REGEXP bytecode.
1530 *
1531 * In no case can cx->fp->varobj be a Call object here, because that implies
1532 * we are compiling eval code, in which case (cx->fp->flags & JSFRAME_EVAL)
1533 * is true, and js_GetToken will have already selected JSOP_OBJECT instead of
1534 * JSOP_REGEXP, to avoid all this RegExp object cloning business.
1535 *
1536 * Why clone regexp objects? ECMA specifies that when a regular expression
1537 * literal is scanned, a RegExp object is created. In the spec, compilation
1538 * and execution happen indivisibly, but in this implementation and many of
1539 * its embeddings, code is precompiled early and re-executed in multiple
1540 * threads, or using multiple global objects, or both, for efficiency.
1541 *
1542 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
1543 * objects, which makes for collisions on the lastIndex property (especially
1544 * for global regexps) and on any ad-hoc properties. Also, __proto__ and
1545 * __parent__ refer to the pre-compilation prototype and global objects, a
1546 * pigeon-hole problem for instanceof tests.
1547 */
1548 static JSBool
IndexRegExpClone(JSContext * cx,JSParseNode * pn,JSAtomListElement * ale,JSCodeGenerator * cg)1549 IndexRegExpClone(JSContext *cx, JSParseNode *pn, JSAtomListElement *ale,
1550 JSCodeGenerator *cg)
1551 {
1552 JSObject *varobj, *reobj;
1553 JSClass *clasp;
1554 JSFunction *fun;
1555 JSRegExp *re;
1556 uint16 *countPtr;
1557 uintN cloneIndex;
1558
1559 JS_ASSERT(!(cx->fp->flags & (JSFRAME_EVAL | JSFRAME_COMPILE_N_GO)));
1560
1561 varobj = cx->fp->varobj;
1562 clasp = OBJ_GET_CLASS(cx, varobj);
1563 if (clasp == &js_FunctionClass) {
1564 fun = (JSFunction *) JS_GetPrivate(cx, varobj);
1565 countPtr = &fun->nregexps;
1566 cloneIndex = *countPtr;
1567 } else {
1568 JS_ASSERT(clasp != &js_CallClass);
1569 countPtr = &cg->treeContext.numGlobalVars;
1570 cloneIndex = ALE_INDEX(ale);
1571 }
1572
1573 if ((cloneIndex + 1) >> 16) {
1574 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1575 JSMSG_NEED_DIET, js_script_str);
1576 return JS_FALSE;
1577 }
1578 if (cloneIndex >= *countPtr)
1579 *countPtr = cloneIndex + 1;
1580
1581 reobj = ATOM_TO_OBJECT(pn->pn_atom);
1582 JS_ASSERT(OBJ_GET_CLASS(cx, reobj) == &js_RegExpClass);
1583 re = (JSRegExp *) JS_GetPrivate(cx, reobj);
1584 re->cloneIndex = cloneIndex;
1585 return JS_TRUE;
1586 }
1587
1588 /*
1589 * Emit a bytecode and its 2-byte constant (atom) index immediate operand.
1590 * NB: We use cx and cg from our caller's lexical environment, and return
1591 * false on error.
1592 */
1593 #define EMIT_ATOM_INDEX_OP(op, atomIndex) \
1594 JS_BEGIN_MACRO \
1595 if (js_Emit3(cx, cg, op, ATOM_INDEX_HI(atomIndex), \
1596 ATOM_INDEX_LO(atomIndex)) < 0) { \
1597 return JS_FALSE; \
1598 } \
1599 JS_END_MACRO
1600
1601 static JSBool
EmitAtomOp(JSContext * cx,JSParseNode * pn,JSOp op,JSCodeGenerator * cg)1602 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1603 {
1604 JSAtomListElement *ale;
1605
1606 ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
1607 if (!ale)
1608 return JS_FALSE;
1609 if (op == JSOP_REGEXP && !IndexRegExpClone(cx, pn, ale, cg))
1610 return JS_FALSE;
1611 EMIT_ATOM_INDEX_OP(op, ALE_INDEX(ale));
1612 return JS_TRUE;
1613 }
1614
1615 /*
1616 * This routine tries to optimize name gets and sets to stack slot loads and
1617 * stores, given the variables object and scope chain in cx's top frame, the
1618 * compile-time context in tc, and a TOK_NAME node pn. It returns false on
1619 * error, true on success.
1620 *
1621 * The caller can inspect pn->pn_slot for a non-negative slot number to tell
1622 * whether optimization occurred, in which case LookupArgOrVar also updated
1623 * pn->pn_op. If pn->pn_slot is still -1 on return, pn->pn_op nevertheless
1624 * may have been optimized, e.g., from JSOP_NAME to JSOP_ARGUMENTS. Whether
1625 * or not pn->pn_op was modified, if this function finds an argument or local
1626 * variable name, pn->pn_attrs will contain the property's attributes after a
1627 * successful return.
1628 */
1629 static JSBool
LookupArgOrVar(JSContext * cx,JSTreeContext * tc,JSParseNode * pn)1630 LookupArgOrVar(JSContext *cx, JSTreeContext *tc, JSParseNode *pn)
1631 {
1632 JSStackFrame *fp;
1633 JSObject *obj, *pobj;
1634 JSClass *clasp;
1635 JSBool optimizeGlobals;
1636 JSAtom *atom;
1637 JSProperty *prop;
1638 JSScopeProperty *sprop;
1639 JSOp op;
1640 JSPropertyOp getter;
1641 uintN attrs;
1642 jsint slot;
1643 JSAtomListElement *ale;
1644
1645 JS_ASSERT(pn->pn_type == TOK_NAME);
1646 if (pn->pn_slot >= 0 || pn->pn_op == JSOP_ARGUMENTS)
1647 return JS_TRUE;
1648
1649 /*
1650 * We can't optimize if var and closure (a local function not in a larger
1651 * expression and not at top-level within another's body) collide.
1652 * XXX suboptimal: keep track of colliding names and deoptimize only those
1653 */
1654 if (tc->flags & TCF_FUN_CLOSURE_VS_VAR)
1655 return JS_TRUE;
1656
1657 /*
1658 * We can't optimize if we're not compiling a function body, whether via
1659 * eval, or directly when compiling a function statement or expression.
1660 */
1661 fp = cx->fp;
1662 obj = fp->varobj;
1663 clasp = OBJ_GET_CLASS(cx, obj);
1664 if (clasp != &js_FunctionClass && clasp != &js_CallClass) {
1665 /*
1666 * Optimize global variable accesses if there are at least 100 uses
1667 * in unambiguous contexts, or failing that, if least half of all the
1668 * uses of global vars/consts/functions are in loops.
1669 */
1670 optimizeGlobals = (tc->globalUses >= 100 ||
1671 (tc->loopyGlobalUses &&
1672 tc->loopyGlobalUses >= tc->globalUses / 2));
1673 if (!optimizeGlobals)
1674 return JS_TRUE;
1675 } else {
1676 optimizeGlobals = JS_FALSE;
1677 }
1678
1679 /*
1680 * We can't optimize if we're in an eval called inside a with statement,
1681 * or we're compiling a with statement and its body, or we're in a catch
1682 * block whose exception variable has the same name as pn.
1683 */
1684 atom = pn->pn_atom;
1685 if (fp->scopeChain != obj ||
1686 js_InWithStatement(tc) ||
1687 js_InCatchBlock(tc, atom)) {
1688 return JS_TRUE;
1689 }
1690
1691 op = pn->pn_op;
1692 getter = NULL;
1693 #ifdef __GNUC__
1694 attrs = slot = 0; /* quell GCC overwarning */
1695 #endif
1696 if (optimizeGlobals) {
1697 /*
1698 * We are optimizing global variables, and there is no pre-existing
1699 * global property named atom. If atom was declared via const or var,
1700 * optimize pn to access fp->vars using the appropriate JOF_QVAR op.
1701 */
1702 ATOM_LIST_SEARCH(ale, &tc->decls, atom);
1703 if (!ale) {
1704 /* Use precedes declaration, or name is never declared. */
1705 return JS_TRUE;
1706 }
1707
1708 attrs = (ALE_JSOP(ale) == JSOP_DEFCONST)
1709 ? JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT
1710 : JSPROP_ENUMERATE | JSPROP_PERMANENT;
1711
1712 /* Index atom so we can map fast global number to name. */
1713 JS_ASSERT(tc->flags & TCF_COMPILING);
1714 ale = js_IndexAtom(cx, atom, &((JSCodeGenerator *) tc)->atomList);
1715 if (!ale)
1716 return JS_FALSE;
1717
1718 /* Defend against tc->numGlobalVars 16-bit overflow. */
1719 slot = ALE_INDEX(ale);
1720 if ((slot + 1) >> 16)
1721 return JS_TRUE;
1722
1723 if ((uint16)(slot + 1) > tc->numGlobalVars)
1724 tc->numGlobalVars = (uint16)(slot + 1);
1725 } else {
1726 /*
1727 * We may be able to optimize name to stack slot. Look for an argument
1728 * or variable property in the function, or its call object, not found
1729 * in any prototype object. Rewrite pn_op and update pn accordingly.
1730 * NB: We know that JSOP_DELNAME on an argument or variable evaluates
1731 * to false, due to JSPROP_PERMANENT.
1732 */
1733 if (!js_LookupProperty(cx, obj, (jsid)atom, &pobj, &prop))
1734 return JS_FALSE;
1735 sprop = (JSScopeProperty *) prop;
1736 if (sprop) {
1737 if (pobj == obj) {
1738 getter = sprop->getter;
1739 attrs = sprop->attrs;
1740 slot = (sprop->flags & SPROP_HAS_SHORTID) ? sprop->shortid : -1;
1741 }
1742 OBJ_DROP_PROPERTY(cx, pobj, prop);
1743 }
1744 }
1745
1746 if (optimizeGlobals || getter) {
1747 if (optimizeGlobals) {
1748 switch (op) {
1749 case JSOP_NAME: op = JSOP_GETGVAR; break;
1750 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
1751 case JSOP_SETCONST: /* NB: no change */ break;
1752 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
1753 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
1754 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
1755 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
1756 case JSOP_FORNAME: /* NB: no change */ break;
1757 case JSOP_DELNAME: /* NB: no change */ break;
1758 default: JS_ASSERT(0);
1759 }
1760 } else if (getter == js_GetLocalVariable ||
1761 getter == js_GetCallVariable) {
1762 switch (op) {
1763 case JSOP_NAME: op = JSOP_GETVAR; break;
1764 case JSOP_SETNAME: op = JSOP_SETVAR; break;
1765 case JSOP_SETCONST: op = JSOP_SETVAR; break;
1766 case JSOP_INCNAME: op = JSOP_INCVAR; break;
1767 case JSOP_NAMEINC: op = JSOP_VARINC; break;
1768 case JSOP_DECNAME: op = JSOP_DECVAR; break;
1769 case JSOP_NAMEDEC: op = JSOP_VARDEC; break;
1770 case JSOP_FORNAME: op = JSOP_FORVAR; break;
1771 case JSOP_DELNAME: op = JSOP_FALSE; break;
1772 default: JS_ASSERT(0);
1773 }
1774 } else if (getter == js_GetArgument ||
1775 (getter == js_CallClass.getProperty &&
1776 fp->fun && (uintN) slot < fp->fun->nargs)) {
1777 switch (op) {
1778 case JSOP_NAME: op = JSOP_GETARG; break;
1779 case JSOP_SETNAME: op = JSOP_SETARG; break;
1780 case JSOP_INCNAME: op = JSOP_INCARG; break;
1781 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
1782 case JSOP_DECNAME: op = JSOP_DECARG; break;
1783 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
1784 case JSOP_FORNAME: op = JSOP_FORARG; break;
1785 case JSOP_DELNAME: op = JSOP_FALSE; break;
1786 default: JS_ASSERT(0);
1787 }
1788 }
1789 if (op != pn->pn_op) {
1790 pn->pn_op = op;
1791 pn->pn_slot = slot;
1792 }
1793 pn->pn_attrs = attrs;
1794 }
1795
1796 if (pn->pn_slot < 0) {
1797 /*
1798 * We couldn't optimize pn, so it's not a global or local slot name.
1799 * Now we must check for the predefined arguments variable. It may be
1800 * overridden by assignment, in which case the function is heavyweight
1801 * and the interpreter will look up 'arguments' in the function's call
1802 * object.
1803 */
1804 if (pn->pn_op == JSOP_NAME &&
1805 atom == cx->runtime->atomState.argumentsAtom) {
1806 pn->pn_op = JSOP_ARGUMENTS;
1807 return JS_TRUE;
1808 }
1809
1810 tc->flags |= TCF_FUN_USES_NONLOCALS;
1811 }
1812 return JS_TRUE;
1813 }
1814
1815 /*
1816 * If pn contains a useful expression, return true with *answer set to true.
1817 * If pn contains a useless expression, return true with *answer set to false.
1818 * Return false on error.
1819 *
1820 * The caller should initialize *answer to false and invoke this function on
1821 * an expression statement or similar subtree to decide whether the tree could
1822 * produce code that has any side effects. For an expression statement, we
1823 * define useless code as code with no side effects, because the main effect,
1824 * the value left on the stack after the code executes, will be discarded by a
1825 * pop bytecode.
1826 */
1827 static JSBool
CheckSideEffects(JSContext * cx,JSTreeContext * tc,JSParseNode * pn,JSBool * answer)1828 CheckSideEffects(JSContext *cx, JSTreeContext *tc, JSParseNode *pn,
1829 JSBool *answer)
1830 {
1831 JSBool ok;
1832 JSFunction *fun;
1833 JSParseNode *pn2;
1834
1835 ok = JS_TRUE;
1836 if (!pn || *answer)
1837 return ok;
1838
1839 switch (pn->pn_arity) {
1840 case PN_FUNC:
1841 /*
1842 * A named function is presumed useful: we can't yet know that it is
1843 * not called. The side effects are the creation of a scope object
1844 * to parent this function object, and the binding of the function's
1845 * name in that scope object. See comments at case JSOP_NAMEDFUNOBJ:
1846 * in jsinterp.c.
1847 */
1848 fun = (JSFunction *) JS_GetPrivate(cx, ATOM_TO_OBJECT(pn->pn_funAtom));
1849 if (fun->atom)
1850 *answer = JS_TRUE;
1851 break;
1852
1853 case PN_LIST:
1854 if (pn->pn_type == TOK_NEW ||
1855 pn->pn_type == TOK_LP ||
1856 pn->pn_type == TOK_LB) {
1857 /*
1858 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
1859 * are presumed to be useful, because they may have side effects
1860 * even if their main effect (their return value) is discarded.
1861 *
1862 * TOK_LB binary trees of 3 or more nodes are flattened into lists
1863 * to avoid too much recursion. All such lists must be presumed
1864 * to be useful because each index operation could invoke a getter
1865 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
1866 * does not apply here: arguments[i][j] might invoke a getter).
1867 */
1868 *answer = JS_TRUE;
1869 } else {
1870 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
1871 ok &= CheckSideEffects(cx, tc, pn2, answer);
1872 }
1873 break;
1874
1875 case PN_TERNARY:
1876 ok = CheckSideEffects(cx, tc, pn->pn_kid1, answer) &&
1877 CheckSideEffects(cx, tc, pn->pn_kid2, answer) &&
1878 CheckSideEffects(cx, tc, pn->pn_kid3, answer);
1879 break;
1880
1881 case PN_BINARY:
1882 if (pn->pn_type == TOK_ASSIGN) {
1883 /*
1884 * Assignment is presumed to be useful, even if the next operation
1885 * is another assignment overwriting this one's ostensible effect,
1886 * because the left operand may be a property with a setter that
1887 * has side effects.
1888 */
1889 *answer = JS_TRUE;
1890 } else {
1891 if (pn->pn_type == TOK_LB) {
1892 pn2 = pn->pn_left;
1893 if (pn2->pn_type == TOK_NAME && !LookupArgOrVar(cx, tc, pn2))
1894 return JS_FALSE;
1895 if (pn2->pn_op != JSOP_ARGUMENTS) {
1896 /*
1897 * Any indexed property reference could call a getter with
1898 * side effects, except for arguments[i] where arguments is
1899 * unambiguous.
1900 */
1901 *answer = JS_TRUE;
1902 }
1903 }
1904 ok = CheckSideEffects(cx, tc, pn->pn_left, answer) &&
1905 CheckSideEffects(cx, tc, pn->pn_right, answer);
1906 }
1907 break;
1908
1909 case PN_UNARY:
1910 if (pn->pn_type == TOK_INC || pn->pn_type == TOK_DEC ||
1911 pn->pn_type == TOK_DELETE ||
1912 pn->pn_type == TOK_THROW ||
1913 pn->pn_type == TOK_DEFSHARP) {
1914 /* All these operations have effects that we must commit. */
1915 *answer = JS_TRUE;
1916 } else {
1917 ok = CheckSideEffects(cx, tc, pn->pn_kid, answer);
1918 }
1919 break;
1920
1921 case PN_NAME:
1922 if (pn->pn_type == TOK_NAME) {
1923 if (!LookupArgOrVar(cx, tc, pn))
1924 return JS_FALSE;
1925 if (pn->pn_slot < 0 && pn->pn_op != JSOP_ARGUMENTS) {
1926 /*
1927 * Not an argument or local variable use, so this expression
1928 * could invoke a getter that has side effects.
1929 */
1930 *answer = JS_TRUE;
1931 }
1932 }
1933 pn2 = pn->pn_expr;
1934 if (pn->pn_type == TOK_DOT && pn2->pn_type == TOK_NAME) {
1935 if (!LookupArgOrVar(cx, tc, pn2))
1936 return JS_FALSE;
1937 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
1938 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
1939 /*
1940 * Any dotted property reference could call a getter, except
1941 * for arguments.length where arguments is unambiguous.
1942 */
1943 *answer = JS_TRUE;
1944 }
1945 }
1946 ok = CheckSideEffects(cx, tc, pn2, answer);
1947 break;
1948
1949 case PN_NULLARY:
1950 if (pn->pn_type == TOK_DEBUGGER)
1951 *answer = JS_TRUE;
1952 break;
1953 }
1954 return ok;
1955 }
1956
1957 static JSBool
EmitPropOp(JSContext * cx,JSParseNode * pn,JSOp op,JSCodeGenerator * cg)1958 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1959 {
1960 JSParseNode *pn2, *pndot, *pnup, *pndown;
1961 ptrdiff_t top;
1962 JSAtomListElement *ale;
1963
1964 pn2 = pn->pn_expr;
1965 if (op == JSOP_GETPROP &&
1966 pn->pn_type == TOK_DOT &&
1967 pn2->pn_type == TOK_NAME) {
1968 /* Try to optimize arguments.length into JSOP_ARGCNT. */
1969 if (!LookupArgOrVar(cx, &cg->treeContext, pn2))
1970 return JS_FALSE;
1971 if (pn2->pn_op == JSOP_ARGUMENTS &&
1972 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1973 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
1974 }
1975 }
1976
1977 /*
1978 * If the object operand is also a dotted property reference, reverse the
1979 * list linked via pn_expr temporarily so we can iterate over it from the
1980 * bottom up (reversing again as we go), to avoid excessive recursion.
1981 */
1982 if (pn2->pn_type == TOK_DOT) {
1983 pndot = pn2;
1984 pnup = NULL;
1985 top = CG_OFFSET(cg);
1986 for (;;) {
1987 /* Reverse pndot->pn_expr to point up, not down. */
1988 pndot->pn_offset = top;
1989 pndown = pndot->pn_expr;
1990 pndot->pn_expr = pnup;
1991 if (pndown->pn_type != TOK_DOT)
1992 break;
1993 pnup = pndot;
1994 pndot = pndown;
1995 }
1996
1997 /* pndown is a primary expression, not a dotted property reference. */
1998 if (!js_EmitTree(cx, cg, pndown))
1999 return JS_FALSE;
2000
2001 do {
2002 /* Walk back up the list, emitting annotated name ops. */
2003 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2004 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2005 return JS_FALSE;
2006 }
2007 ale = js_IndexAtom(cx, pndot->pn_atom, &cg->atomList);
2008 if (!ale)
2009 return JS_FALSE;
2010 EMIT_ATOM_INDEX_OP(pndot->pn_op, ALE_INDEX(ale));
2011
2012 /* Reverse the pn_expr link again. */
2013 pnup = pndot->pn_expr;
2014 pndot->pn_expr = pndown;
2015 pndown = pndot;
2016 } while ((pndot = pnup) != NULL);
2017 } else {
2018 if (!js_EmitTree(cx, cg, pn2))
2019 return JS_FALSE;
2020 }
2021
2022 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - pn2->pn_offset) < 0)
2023 return JS_FALSE;
2024 if (!pn->pn_atom) {
2025 JS_ASSERT(op == JSOP_IMPORTALL);
2026 if (js_Emit1(cx, cg, op) < 0)
2027 return JS_FALSE;
2028 } else {
2029 ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
2030 if (!ale)
2031 return JS_FALSE;
2032 EMIT_ATOM_INDEX_OP(op, ALE_INDEX(ale));
2033 }
2034 return JS_TRUE;
2035 }
2036
2037 static JSBool
EmitElemOp(JSContext * cx,JSParseNode * pn,JSOp op,JSCodeGenerator * cg)2038 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2039 {
2040 ptrdiff_t top;
2041 JSParseNode *left, *right, *next;
2042 jsint slot;
2043
2044 top = CG_OFFSET(cg);
2045 if (pn->pn_arity == PN_LIST) {
2046 /* Left-associative operator chain to avoid too much recursion. */
2047 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2048 JS_ASSERT(pn->pn_count >= 3);
2049 left = pn->pn_head;
2050 right = PN_LAST(pn);
2051 next = left->pn_next;
2052 JS_ASSERT(next != right);
2053
2054 /*
2055 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2056 * one or more index expression and JSOP_GETELEM op pairs.
2057 */
2058 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2059 if (!LookupArgOrVar(cx, &cg->treeContext, left))
2060 return JS_FALSE;
2061 if (left->pn_op == JSOP_ARGUMENTS &&
2062 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2063 (jsuint)slot < ATOM_INDEX_LIMIT) {
2064 left->pn_offset = next->pn_offset = top;
2065 EMIT_ATOM_INDEX_OP(JSOP_ARGSUB, (jsatomid)slot);
2066 left = next;
2067 next = left->pn_next;
2068 }
2069 }
2070
2071 /*
2072 * Check whether we generated JSOP_ARGSUB, just above, and have only
2073 * one more index expression to emit. Given arguments[0][j], we must
2074 * skip the while loop altogether, falling through to emit code for j
2075 * (in the subtree referenced by right), followed by the annotated op,
2076 * at the bottom of this function.
2077 */
2078 JS_ASSERT(next != right || pn->pn_count == 3);
2079 if (left == pn->pn_head) {
2080 if (!js_EmitTree(cx, cg, left))
2081 return JS_FALSE;
2082 }
2083 while (next != right) {
2084 if (!js_EmitTree(cx, cg, next))
2085 return JS_FALSE;
2086 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2087 return JS_FALSE;
2088 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2089 return JS_FALSE;
2090 next = next->pn_next;
2091 }
2092 } else {
2093 JS_ASSERT(pn->pn_arity == PN_BINARY);
2094 left = pn->pn_left;
2095 right = pn->pn_right;
2096
2097 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2098 if (op == JSOP_GETELEM &&
2099 left->pn_type == TOK_NAME &&
2100 right->pn_type == TOK_NUMBER) {
2101 if (!LookupArgOrVar(cx, &cg->treeContext, left))
2102 return JS_FALSE;
2103 if (left->pn_op == JSOP_ARGUMENTS &&
2104 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2105 (jsuint)slot < ATOM_INDEX_LIMIT) {
2106 left->pn_offset = right->pn_offset = top;
2107 EMIT_ATOM_INDEX_OP(JSOP_ARGSUB, (jsatomid)slot);
2108 return JS_TRUE;
2109 }
2110 }
2111
2112 if (!js_EmitTree(cx, cg, left))
2113 return JS_FALSE;
2114 }
2115 if (!js_EmitTree(cx, cg, right))
2116 return JS_FALSE;
2117 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2118 return JS_FALSE;
2119 return js_Emit1(cx, cg, op) >= 0;
2120 }
2121
2122 static JSBool
EmitNumberOp(JSContext * cx,jsdouble dval,JSCodeGenerator * cg)2123 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2124 {
2125 jsint ival;
2126 jsatomid atomIndex;
2127 JSAtom *atom;
2128 JSAtomListElement *ale;
2129
2130 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2131 if (ival == 0)
2132 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2133 if (ival == 1)
2134 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2135 if ((jsuint)ival < (jsuint)ATOM_INDEX_LIMIT) {
2136 atomIndex = (jsatomid)ival;
2137 EMIT_ATOM_INDEX_OP(JSOP_UINT16, atomIndex);
2138 return JS_TRUE;
2139 }
2140 atom = js_AtomizeInt(cx, ival, 0);
2141 } else {
2142 atom = js_AtomizeDouble(cx, dval, 0);
2143 }
2144 if (!atom)
2145 return JS_FALSE;
2146 ale = js_IndexAtom(cx, atom, &cg->atomList);
2147 if (!ale)
2148 return JS_FALSE;
2149 EMIT_ATOM_INDEX_OP(JSOP_NUMBER, ALE_INDEX(ale));
2150 return JS_TRUE;
2151 }
2152
2153 #if JS_HAS_SWITCH_STATEMENT
2154 static JSBool
EmitSwitch(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn,JSStmtInfo * stmtInfo)2155 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2156 JSStmtInfo *stmtInfo)
2157 {
2158 JSOp switchOp;
2159 JSBool ok, hasDefault, constPropagated;
2160 ptrdiff_t top, off, defaultOffset;
2161 JSParseNode *pn2, *pn3, *pn4;
2162 uint32 caseCount, tableLength;
2163 JSParseNode **table;
2164 jsdouble d;
2165 jsint i, low, high;
2166 jsval v;
2167 JSAtom *atom;
2168 JSAtomListElement *ale;
2169 intN noteIndex;
2170 size_t switchSize, tableSize;
2171 jsbytecode *pc, *savepc;
2172
2173 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2174 switchOp = JSOP_TABLESWITCH;
2175 ok = JS_TRUE;
2176 hasDefault = constPropagated = JS_FALSE;
2177 defaultOffset = -1;
2178
2179 /* Emit code for the discriminant first. */
2180 if (!js_EmitTree(cx, cg, pn->pn_kid1))
2181 return JS_FALSE;
2182
2183 /* Switch bytecodes run from here till end of final case. */
2184 top = CG_OFFSET(cg);
2185 js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2186
2187 pn2 = pn->pn_kid2;
2188 caseCount = pn2->pn_count;
2189 tableLength = 0;
2190 table = NULL;
2191
2192 if (caseCount == 0 ||
2193 (caseCount == 1 &&
2194 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
2195 caseCount = 0;
2196 low = 0;
2197 high = -1;
2198 } else {
2199 #define INTMAP_LENGTH 256
2200 jsbitmap intmap_space[INTMAP_LENGTH];
2201 jsbitmap *intmap = NULL;
2202 int32 intmap_bitlen = 0;
2203
2204 low = JSVAL_INT_MAX;
2205 high = JSVAL_INT_MIN;
2206
2207 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2208 if (pn3->pn_type == TOK_DEFAULT) {
2209 hasDefault = JS_TRUE;
2210 caseCount--; /* one of the "cases" was the default */
2211 continue;
2212 }
2213
2214 JS_ASSERT(pn3->pn_type == TOK_CASE);
2215 if (switchOp == JSOP_CONDSWITCH)
2216 continue;
2217
2218 pn4 = pn3->pn_left;
2219 switch (pn4->pn_type) {
2220 case TOK_NUMBER:
2221 d = pn4->pn_dval;
2222 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
2223 pn3->pn_val = INT_TO_JSVAL(i);
2224 } else {
2225 atom = js_AtomizeDouble(cx, d, 0);
2226 if (!atom) {
2227 ok = JS_FALSE;
2228 goto release;
2229 }
2230 pn3->pn_val = ATOM_KEY(atom);
2231 }
2232 break;
2233 case TOK_STRING:
2234 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
2235 break;
2236 case TOK_NAME:
2237 if (!pn4->pn_expr) {
2238 ok = js_LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
2239 if (!ok)
2240 goto release;
2241 if (!JSVAL_IS_VOID(v)) {
2242 pn3->pn_val = v;
2243 constPropagated = JS_TRUE;
2244 break;
2245 }
2246 }
2247 /* FALL THROUGH */
2248 case TOK_PRIMARY:
2249 if (pn4->pn_op == JSOP_TRUE) {
2250 pn3->pn_val = JSVAL_TRUE;
2251 break;
2252 }
2253 if (pn4->pn_op == JSOP_FALSE) {
2254 pn3->pn_val = JSVAL_FALSE;
2255 break;
2256 }
2257 /* FALL THROUGH */
2258 default:
2259 switchOp = JSOP_CONDSWITCH;
2260 continue;
2261 }
2262
2263 JS_ASSERT(JSVAL_IS_NUMBER(pn3->pn_val) ||
2264 JSVAL_IS_STRING(pn3->pn_val) ||
2265 JSVAL_IS_BOOLEAN(pn3->pn_val));
2266
2267 if (switchOp != JSOP_TABLESWITCH)
2268 continue;
2269 if (!JSVAL_IS_INT(pn3->pn_val)) {
2270 switchOp = JSOP_LOOKUPSWITCH;
2271 continue;
2272 }
2273 i = JSVAL_TO_INT(pn3->pn_val);
2274 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
2275 switchOp = JSOP_LOOKUPSWITCH;
2276 continue;
2277 }
2278 if (i < low)
2279 low = i;
2280 if (high < i)
2281 high = i;
2282
2283 /*
2284 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
2285 * We bias i by 65536 if it's negative, and hope that's a rare
2286 * case (because it requires a malloc'd bitmap).
2287 */
2288 if (i < 0)
2289 i += JS_BIT(16);
2290 if (i >= intmap_bitlen) {
2291 if (!intmap &&
2292 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
2293 intmap = intmap_space;
2294 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
2295 } else {
2296 /* Just grab 8K for the worst-case bitmap. */
2297 intmap_bitlen = JS_BIT(16);
2298 intmap = (jsbitmap *)
2299 JS_malloc(cx,
2300 (JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
2301 * sizeof(jsbitmap));
2302 if (!intmap) {
2303 JS_ReportOutOfMemory(cx);
2304 return JS_FALSE;
2305 }
2306 }
2307 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
2308 }
2309 if (JS_TEST_BIT(intmap, i)) {
2310 switchOp = JSOP_LOOKUPSWITCH;
2311 continue;
2312 }
2313 JS_SET_BIT(intmap, i);
2314 }
2315
2316 release:
2317 if (intmap && intmap != intmap_space)
2318 JS_free(cx, intmap);
2319 if (!ok)
2320 return JS_FALSE;
2321
2322 /*
2323 * Compute table length and select lookup instead if overlarge or
2324 * more than half-sparse.
2325 */
2326 if (switchOp == JSOP_TABLESWITCH) {
2327 tableLength = (uint32)(high - low + 1);
2328 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
2329 switchOp = JSOP_LOOKUPSWITCH;
2330 }
2331 }
2332
2333 /*
2334 * Emit a note with two offsets: first tells total switch code length,
2335 * second tells offset to first JSOP_CASE if condswitch.
2336 */
2337 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
2338 if (noteIndex < 0)
2339 return JS_FALSE;
2340
2341 if (switchOp == JSOP_CONDSWITCH) {
2342 /*
2343 * 0 bytes of immediate for unoptimized ECMAv2 switch.
2344 */
2345 switchSize = 0;
2346 } else if (switchOp == JSOP_TABLESWITCH) {
2347 /*
2348 * 3 offsets (len, low, high) before the table, 1 per entry.
2349 */
2350 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
2351 } else {
2352 /*
2353 * JSOP_LOOKUPSWITCH:
2354 * 1 offset (len) and 1 atom index (npairs) before the table,
2355 * 1 atom index and 1 jump offset per entry.
2356 */
2357 switchSize = (size_t)(JUMP_OFFSET_LEN + ATOM_INDEX_LEN +
2358 (ATOM_INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
2359 }
2360
2361 /*
2362 * Emit switchOp followed by switchSize bytes of jump or lookup table.
2363 *
2364 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
2365 * to emit the immediate operand(s) by which bytecode readers such as
2366 * BuildSpanDepTable discover the length of the switch opcode *before*
2367 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
2368 * also important to zero all unknown jump offset immediate operands,
2369 * so they can be converted to span dependencies with null targets to
2370 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
2371 */
2372 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
2373 return JS_FALSE;
2374
2375 off = -1;
2376 if (switchOp == JSOP_CONDSWITCH) {
2377 intN caseNoteIndex = -1;
2378
2379 /* Emit code for evaluating cases and jumping to case statements. */
2380 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2381 pn4 = pn3->pn_left;
2382 if (pn4 && !js_EmitTree(cx, cg, pn4))
2383 return JS_FALSE;
2384 if (caseNoteIndex >= 0) {
2385 /* off is the previous JSOP_CASE's bytecode offset. */
2386 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2387 CG_OFFSET(cg) - off)) {
2388 return JS_FALSE;
2389 }
2390 }
2391 if (pn3->pn_type == TOK_DEFAULT)
2392 continue;
2393 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
2394 if (caseNoteIndex < 0)
2395 return JS_FALSE;
2396 off = EmitJump(cx, cg, JSOP_CASE, 0);
2397 if (off < 0)
2398 return JS_FALSE;
2399 pn3->pn_offset = off;
2400 if (pn3 == pn2->pn_head) {
2401 /* Switch note's second offset is to first JSOP_CASE. */
2402 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
2403 off - top)) {
2404 return JS_FALSE;
2405 }
2406 }
2407 }
2408
2409 /* Emit default even if no explicit default statement. */
2410 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
2411 if (defaultOffset < 0)
2412 return JS_FALSE;
2413 } else {
2414 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
2415
2416 if (switchOp == JSOP_TABLESWITCH) {
2417 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2418 SET_JUMP_OFFSET(pc, low);
2419 pc += JUMP_OFFSET_LEN;
2420 SET_JUMP_OFFSET(pc, high);
2421 pc += JUMP_OFFSET_LEN;
2422
2423 /*
2424 * Use malloc to avoid arena bloat for programs with many switches.
2425 * We free table if non-null at label out, so all control flow must
2426 * exit this function through goto out or goto bad.
2427 */
2428 if (tableLength != 0) {
2429 tableSize = (size_t)tableLength * sizeof *table;
2430 table = (JSParseNode **) JS_malloc(cx, tableSize);
2431 if (!table)
2432 return JS_FALSE;
2433 memset(table, 0, tableSize);
2434 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2435 if (pn3->pn_type == TOK_DEFAULT)
2436 continue;
2437 i = JSVAL_TO_INT(pn3->pn_val);
2438 i -= low;
2439 JS_ASSERT((uint32)i < tableLength);
2440 table[i] = pn3;
2441 }
2442 }
2443 } else {
2444 JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
2445
2446 /* Fill in the number of cases. */
2447 SET_ATOM_INDEX(pc, caseCount);
2448 pc += ATOM_INDEX_LEN;
2449 }
2450
2451 /*
2452 * After this point, all control flow involving JSOP_TABLESWITCH
2453 * must set ok and goto out to exit this function. To keep things
2454 * simple, all switchOp cases exit that way.
2455 */
2456 if (constPropagated) {
2457 /*
2458 * Skip switchOp, as we are not setting jump offsets in the two
2459 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
2460 * unless there was an error.
2461 */
2462 savepc = CG_NEXT(cg);
2463 CG_NEXT(cg) = pc + 1;
2464 if (switchOp == JSOP_TABLESWITCH) {
2465 for (i = 0; i < (jsint)tableLength; i++) {
2466 pn3 = table[i];
2467 if (pn3 &&
2468 (pn4 = pn3->pn_left) != NULL &&
2469 pn4->pn_type == TOK_NAME) {
2470 /* Note a propagated constant with the const's name. */
2471 JS_ASSERT(!pn4->pn_expr);
2472 ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
2473 if (!ale)
2474 goto bad;
2475 CG_NEXT(cg) = pc;
2476 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
2477 ALE_INDEX(ale)) < 0) {
2478 goto bad;
2479 }
2480 }
2481 pc += JUMP_OFFSET_LEN;
2482 }
2483 } else {
2484 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2485 pn4 = pn3->pn_left;
2486 if (pn4 && pn4->pn_type == TOK_NAME) {
2487 /* Note a propagated constant with the const's name. */
2488 JS_ASSERT(!pn4->pn_expr);
2489 ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
2490 if (!ale)
2491 goto bad;
2492 CG_NEXT(cg) = pc;
2493 if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
2494 ALE_INDEX(ale)) < 0) {
2495 goto bad;
2496 }
2497 }
2498 pc += ATOM_INDEX_LEN + JUMP_OFFSET_LEN;
2499 }
2500 }
2501 CG_NEXT(cg) = savepc;
2502 }
2503 }
2504
2505 /* Emit code for each case's statements, copying pn_offset up to pn3. */
2506 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2507 if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
2508 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, pn3->pn_offset);
2509 pn4 = pn3->pn_right;
2510 ok = js_EmitTree(cx, cg, pn4);
2511 if (!ok)
2512 goto out;
2513 pn3->pn_offset = pn4->pn_offset;
2514 if (pn3->pn_type == TOK_DEFAULT)
2515 off = pn3->pn_offset - top;
2516 }
2517
2518 if (!hasDefault) {
2519 /* If no default case, offset for default is to end of switch. */
2520 off = CG_OFFSET(cg) - top;
2521 }
2522
2523 /* We better have set "off" by now. */
2524 JS_ASSERT(off != -1);
2525
2526 /* Set the default offset (to end of switch if no default). */
2527 if (switchOp == JSOP_CONDSWITCH) {
2528 pc = NULL;
2529 JS_ASSERT(defaultOffset != -1);
2530 ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
2531 off - (defaultOffset - top));
2532 if (!ok)
2533 goto out;
2534 } else {
2535 pc = CG_CODE(cg, top);
2536 ok = js_SetJumpOffset(cx, cg, pc, off);
2537 if (!ok)
2538 goto out;
2539 pc += JUMP_OFFSET_LEN;
2540 }
2541
2542 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
2543 off = CG_OFFSET(cg) - top;
2544 ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
2545 if (!ok)
2546 goto out;
2547
2548 if (switchOp == JSOP_TABLESWITCH) {
2549 /* Skip over the already-initialized switch bounds. */
2550 pc += 2 * JUMP_OFFSET_LEN;
2551
2552 /* Fill in the jump table, if there is one. */
2553 for (i = 0; i < (jsint)tableLength; i++) {
2554 pn3 = table[i];
2555 off = pn3 ? pn3->pn_offset - top : 0;
2556 ok = js_SetJumpOffset(cx, cg, pc, off);
2557 if (!ok)
2558 goto out;
2559 pc += JUMP_OFFSET_LEN;
2560 }
2561 } else if (switchOp == JSOP_LOOKUPSWITCH) {
2562 /* Skip over the already-initialized number of cases. */
2563 pc += ATOM_INDEX_LEN;
2564
2565 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2566 if (pn3->pn_type == TOK_DEFAULT)
2567 continue;
2568 atom = js_AtomizeValue(cx, pn3->pn_val, 0);
2569 if (!atom)
2570 goto bad;
2571 ale = js_IndexAtom(cx, atom, &cg->atomList);
2572 if (!ale)
2573 goto bad;
2574 SET_ATOM_INDEX(pc, ALE_INDEX(ale));
2575 pc += ATOM_INDEX_LEN;
2576
2577 off = pn3->pn_offset - top;
2578 ok = js_SetJumpOffset(cx, cg, pc, off);
2579 if (!ok)
2580 goto out;
2581 pc += JUMP_OFFSET_LEN;
2582 }
2583 }
2584
2585 out:
2586 if (table)
2587 JS_free(cx, table);
2588 return ok && js_PopStatementCG(cx, cg);
2589
2590 bad:
2591 ok = JS_FALSE;
2592 goto out;
2593 }
2594 #endif /* JS_HAS_SWITCH_STATEMENT */
2595
2596 JSBool
js_EmitFunctionBody(JSContext * cx,JSCodeGenerator * cg,JSParseNode * body,JSFunction * fun)2597 js_EmitFunctionBody(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body,
2598 JSFunction *fun)
2599 {
2600 JSStackFrame *fp, frame;
2601 JSObject *funobj;
2602 JSBool ok;
2603
2604 if (!js_AllocTryNotes(cx, cg))
2605 return JS_FALSE;
2606
2607 fp = cx->fp;
2608 funobj = fun->object;
2609 JS_ASSERT(!fp || (fp->fun != fun && fp->varobj != funobj &&
2610 fp->scopeChain != funobj));
2611 memset(&frame, 0, sizeof frame);
2612 frame.fun = fun;
2613 frame.varobj = frame.scopeChain = funobj;
2614 frame.down = fp;
2615 frame.flags = JS_HAS_COMPILE_N_GO_OPTION(cx)
2616 ? JSFRAME_COMPILING | JSFRAME_COMPILE_N_GO
2617 : JSFRAME_COMPILING;
2618 cx->fp = &frame;
2619 ok = js_EmitTree(cx, cg, body);
2620 cx->fp = fp;
2621 if (!ok)
2622 return JS_FALSE;
2623
2624 fun->u.script = js_NewScriptFromCG(cx, cg, fun);
2625 if (!fun->u.script)
2626 return JS_FALSE;
2627 fun->interpreted = JS_TRUE;
2628 if (cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT)
2629 fun->flags |= JSFUN_HEAVYWEIGHT;
2630 return JS_TRUE;
2631 }
2632
2633 /* A macro for inlining at the top of js_EmitTree (whence it came). */
2634 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, pn) \
2635 JS_BEGIN_MACRO \
2636 uintN line_ = (pn)->pn_pos.begin.lineno; \
2637 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
2638 if (delta_ != 0) { \
2639 /* \
2640 * Encode any change in the current source line number by using \
2641 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
2642 * whichever consumes less space. \
2643 * \
2644 * NB: We handle backward line number deltas (possible with for \
2645 * loops where the update part is emitted after the body, but its \
2646 * line number is <= any line number in the body) here by letting \
2647 * unsigned delta_ wrap to a very large number, which triggers a \
2648 * SRC_SETLINE. \
2649 */ \
2650 CG_CURRENT_LINE(cg) = line_; \
2651 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
2652 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
2653 return JS_FALSE; \
2654 } else { \
2655 do { \
2656 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
2657 return JS_FALSE; \
2658 } while (--delta_ != 0); \
2659 } \
2660 } \
2661 JS_END_MACRO
2662
2663 /* A function, so that we avoid macro-bloating all the other callsites. */
2664 static JSBool
UpdateLineNumberNotes(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn)2665 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
2666 {
2667 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
2668 return JS_TRUE;
2669 }
2670
2671 JSBool
js_EmitTree(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn)2672 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
2673 {
2674 JSBool ok, useful, wantval;
2675 JSStmtInfo *stmt, stmtInfo;
2676 ptrdiff_t top, off, tmp, beq, jmp;
2677 JSParseNode *pn2, *pn3;
2678 JSAtom *atom;
2679 JSAtomListElement *ale;
2680 jsatomid atomIndex;
2681 intN noteIndex;
2682 JSSrcNoteType noteType;
2683 jsbytecode *pc;
2684 JSOp op;
2685 uint32 argc;
2686 int stackDummy;
2687
2688 if (!JS_CHECK_STACK_SIZE(cx, stackDummy)) {
2689 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED);
2690 return JS_FALSE;
2691 }
2692
2693 ok = JS_TRUE;
2694 cg->emitLevel++;
2695 pn->pn_offset = top = CG_OFFSET(cg);
2696
2697 /* Emit notes to tell the current bytecode's source line number. */
2698 UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
2699
2700 switch (pn->pn_type) {
2701 case TOK_FUNCTION:
2702 {
2703 void *cg2mark;
2704 JSCodeGenerator *cg2;
2705 JSFunction *fun;
2706
2707 /* Generate code for the function's body. */
2708 cg2mark = JS_ARENA_MARK(&cx->tempPool);
2709 JS_ARENA_ALLOCATE_TYPE(cg2, JSCodeGenerator, &cx->tempPool);
2710 if (!cg2) {
2711 JS_ReportOutOfMemory(cx);
2712 return JS_FALSE;
2713 }
2714 if (!js_InitCodeGenerator(cx, cg2, cg->codePool, cg->notePool,
2715 cg->filename, pn->pn_pos.begin.lineno,
2716 cg->principals)) {
2717 return JS_FALSE;
2718 }
2719 cg2->treeContext.flags = pn->pn_flags | TCF_IN_FUNCTION;
2720 cg2->treeContext.tryCount = pn->pn_tryCount;
2721 cg2->parent = cg;
2722 fun = (JSFunction *) JS_GetPrivate(cx, ATOM_TO_OBJECT(pn->pn_funAtom));
2723 if (!js_EmitFunctionBody(cx, cg2, pn->pn_body, fun))
2724 return JS_FALSE;
2725
2726 /*
2727 * We need an activation object if an inner peeks out, or if such
2728 * inner-peeking caused one of our inners to become heavyweight.
2729 */
2730 if (cg2->treeContext.flags &
2731 (TCF_FUN_USES_NONLOCALS | TCF_FUN_HEAVYWEIGHT)) {
2732 cg->treeContext.flags |= TCF_FUN_HEAVYWEIGHT;
2733 }
2734 js_FinishCodeGenerator(cx, cg2);
2735 JS_ARENA_RELEASE(&cx->tempPool, cg2mark);
2736
2737 /* Make the function object a literal in the outer script's pool. */
2738 ale = js_IndexAtom(cx, pn->pn_funAtom, &cg->atomList);
2739 if (!ale)
2740 return JS_FALSE;
2741 atomIndex = ALE_INDEX(ale);
2742
2743 #if JS_HAS_LEXICAL_CLOSURE
2744 /* Emit a bytecode pointing to the closure object in its immediate. */
2745 if (pn->pn_op != JSOP_NOP) {
2746 EMIT_ATOM_INDEX_OP(pn->pn_op, atomIndex);
2747 break;
2748 }
2749 #endif
2750
2751 /* Top-level named functions need a nop for decompilation. */
2752 noteIndex = js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)atomIndex);
2753 if (noteIndex < 0 ||
2754 js_Emit1(cx, cg, JSOP_NOP) < 0) {
2755 return JS_FALSE;
2756 }
2757
2758 /*
2759 * Top-levels also need a prolog op to predefine their names in the
2760 * variable object, or if local, to fill their stack slots.
2761 */
2762 CG_SWITCH_TO_PROLOG(cg);
2763
2764 #if JS_HAS_LEXICAL_CLOSURE
2765 if (cg->treeContext.flags & TCF_IN_FUNCTION) {
2766 JSObject *obj, *pobj;
2767 JSProperty *prop;
2768 uintN slot;
2769
2770 obj = OBJ_GET_PARENT(cx, fun->object);
2771 if (!js_LookupProperty(cx, obj, (jsid)fun->atom, &pobj,
2772 &prop)) {
2773 return JS_FALSE;
2774 }
2775 JS_ASSERT(prop && pobj == obj);
2776 slot = ((JSScopeProperty *) prop)->shortid;
2777 OBJ_DROP_PROPERTY(cx, pobj, prop);
2778
2779 /* Emit [JSOP_DEFLOCALFUN, local variable slot, atomIndex]. */
2780 off = js_EmitN(cx, cg, JSOP_DEFLOCALFUN, VARNO_LEN+ATOM_INDEX_LEN);
2781 if (off < 0)
2782 return JS_FALSE;
2783 pc = CG_CODE(cg, off);
2784 SET_VARNO(pc, slot);
2785 pc += VARNO_LEN;
2786 SET_ATOM_INDEX(pc, atomIndex);
2787 } else
2788 #endif
2789 EMIT_ATOM_INDEX_OP(JSOP_DEFFUN, atomIndex);
2790
2791 CG_SWITCH_TO_MAIN(cg);
2792 break;
2793 }
2794
2795 #if JS_HAS_EXPORT_IMPORT
2796 case TOK_EXPORT:
2797 pn2 = pn->pn_head;
2798 if (pn2->pn_type == TOK_STAR) {
2799 /*
2800 * 'export *' must have no other elements in the list (what would
2801 * be the point?).
2802 */
2803 if (js_Emit1(cx, cg, JSOP_EXPORTALL) < 0)
2804 return JS_FALSE;
2805 } else {
2806 /*
2807 * If not 'export *', the list consists of NAME nodes identifying
2808 * properties of the variables object to flag as exported.
2809 */
2810 do {
2811 ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
2812 if (!ale)
2813 return JS_FALSE;
2814 EMIT_ATOM_INDEX_OP(JSOP_EXPORTNAME, ALE_INDEX(ale));
2815 } while ((pn2 = pn2->pn_next) != NULL);
2816 }
2817 break;
2818
2819 case TOK_IMPORT:
2820 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
2821 /*
2822 * Each subtree on an import list is rooted by a DOT or LB node.
2823 * A DOT may have a null pn_atom member, in which case pn_op must
2824 * be JSOP_IMPORTALL -- see EmitPropOp above.
2825 */
2826 if (!js_EmitTree(cx, cg, pn2))
2827 return JS_FALSE;
2828 }
2829 break;
2830 #endif /* JS_HAS_EXPORT_IMPORT */
2831
2832 case TOK_IF:
2833 /* Initialize so we can detect else-if chains and avoid recursion. */
2834 stmtInfo.type = STMT_IF;
2835 beq = jmp = -1;
2836 noteIndex = -1;
2837
2838 if_again:
2839 /* Emit code for the condition before pushing stmtInfo. */
2840 if (!js_EmitTree(cx, cg, pn->pn_kid1))
2841 return JS_FALSE;
2842 if (stmtInfo.type == STMT_IF) {
2843 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_IF,
2844 CG_OFFSET(cg));
2845 } else {
2846 /*
2847 * We came here from the goto further below that detects else-if
2848 * chains, so we must mutate stmtInfo back into a STMT_IF record.
2849 * Also (see below for why) we need a note offset for SRC_IF_ELSE
2850 * to help the decompiler.
2851 */
2852 JS_ASSERT(stmtInfo.type == STMT_ELSE);
2853 stmtInfo.type = STMT_IF;
2854 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
2855 return JS_FALSE;
2856 }
2857
2858 /* Emit an annotated branch-if-false around the then part. */
2859 pn3 = pn->pn_kid3;
2860 noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
2861 if (noteIndex < 0)
2862 return JS_FALSE;
2863 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
2864 if (beq < 0)
2865 return JS_FALSE;
2866
2867 /* Emit code for the then and optional else parts. */
2868 if (!js_EmitTree(cx, cg, pn->pn_kid2))
2869 return JS_FALSE;
2870 if (pn3) {
2871 /* Modify stmtInfo so we know we're in the else part. */
2872 stmtInfo.type = STMT_ELSE;
2873
2874 /*
2875 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
2876 * around the else part. The js_PopStatementCG call at the bottom
2877 * of this switch case will fix up the backpatch chain linked from
2878 * stmtInfo.breaks.
2879 */
2880 jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
2881 if (jmp < 0)
2882 return JS_FALSE;
2883
2884 /* Ensure the branch-if-false comes here, then emit the else. */
2885 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
2886 if (pn3->pn_type == TOK_IF) {
2887 pn = pn3;
2888 goto if_again;
2889 }
2890
2891 if (!js_EmitTree(cx, cg, pn3))
2892 return JS_FALSE;
2893
2894 /*
2895 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
2896 * the decompiler's benefit. We can't just "back up" from the pc
2897 * of the else clause, because we don't know whether an extended
2898 * jump was required to leap from the end of the then clause over
2899 * the else clause.
2900 */
2901 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
2902 return JS_FALSE;
2903 } else {
2904 /* No else part, fixup the branch-if-false to come here. */
2905 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
2906 }
2907 ok = js_PopStatementCG(cx, cg);
2908 break;
2909
2910 #if JS_HAS_SWITCH_STATEMENT
2911 case TOK_SWITCH:
2912 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
2913 ok = EmitSwitch(cx, cg, pn, &stmtInfo);
2914 break;
2915 #endif /* JS_HAS_SWITCH_STATEMENT */
2916
2917 case TOK_WHILE:
2918 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_WHILE_LOOP, top);
2919 if (!js_EmitTree(cx, cg, pn->pn_left))
2920 return JS_FALSE;
2921 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
2922 if (noteIndex < 0)
2923 return JS_FALSE;
2924 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
2925 if (beq < 0)
2926 return JS_FALSE;
2927 if (!js_EmitTree(cx, cg, pn->pn_right))
2928 return JS_FALSE;
2929 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
2930 if (jmp < 0)
2931 return JS_FALSE;
2932 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
2933 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
2934 return JS_FALSE;
2935 ok = js_PopStatementCG(cx, cg);
2936 break;
2937
2938 #if JS_HAS_DO_WHILE_LOOP
2939 case TOK_DO:
2940 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
2941 if (js_NewSrcNote(cx, cg, SRC_WHILE) < 0 ||
2942 js_Emit1(cx, cg, JSOP_NOP) < 0) {
2943 return JS_FALSE;
2944 }
2945
2946 /* Compile the loop body. */
2947 top = CG_OFFSET(cg);
2948 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_DO_LOOP, top);
2949 if (!js_EmitTree(cx, cg, pn->pn_left))
2950 return JS_FALSE;
2951
2952 /* Set loop and enclosing label update offsets, for continue. */
2953 stmt = &stmtInfo;
2954 do {
2955 stmt->update = CG_OFFSET(cg);
2956 } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
2957
2958 /* Compile the loop condition, now that continues know where to go. */
2959 if (!js_EmitTree(cx, cg, pn->pn_right))
2960 return JS_FALSE;
2961
2962 /*
2963 * No source note needed, because JSOP_IFNE is used only for do-while.
2964 * If we ever use JSOP_IFNE for other purposes, we can still avoid yet
2965 * another note here, by storing (jmp - top) in the SRC_WHILE note's
2966 * offset, and fetching that delta in order to decompile recursively.
2967 */
2968 if (EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg)) < 0)
2969 return JS_FALSE;
2970 ok = js_PopStatementCG(cx, cg);
2971 break;
2972 #endif /* JS_HAS_DO_WHILE_LOOP */
2973
2974 case TOK_FOR:
2975 beq = 0; /* suppress gcc warnings */
2976 pn2 = pn->pn_left;
2977 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_FOR_LOOP, top);
2978
2979 if (pn2->pn_type == TOK_IN) {
2980 /* Set stmtInfo type for later testing. */
2981 stmtInfo.type = STMT_FOR_IN_LOOP;
2982 noteIndex = -1;
2983
2984 /*
2985 * If the left part is 'var x', emit code to define x if necessary
2986 * using a prolog opcode, but do not emit a pop. If the left part
2987 * is 'var x = i', emit prolog code to define x if necessary; then
2988 * emit code to evaluate i, assign the result to x, and pop the
2989 * result off the stack.
2990 *
2991 * All the logic to do this is implemented in the outer switch's
2992 * TOK_VAR case, conditioned on pn_extra flags set by the parser.
2993 *
2994 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
2995 * called here will generate the SRC_VAR note for the assignment
2996 * op that sets x = i, hoisting the initialized var declaration
2997 * out of the loop: 'var x = i; for (x in o) ...'.
2998 *
2999 * In the 'for (var x in o) ...' case, nothing but the prolog op
3000 * (if needed) should be generated here, we must emit the SRC_VAR
3001 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
3002 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
3003 */
3004 pn3 = pn2->pn_left;
3005 if (pn3->pn_type == TOK_VAR && !js_EmitTree(cx, cg, pn3))
3006 return JS_FALSE;
3007
3008 /* Emit a push to allocate the iterator. */
3009 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
3010 return JS_FALSE;
3011
3012 /* Compile the object expression to the right of 'in'. */
3013 if (!js_EmitTree(cx, cg, pn2->pn_right))
3014 return JS_FALSE;
3015 if (js_Emit1(cx, cg, JSOP_TOOBJECT) < 0)
3016 return JS_FALSE;
3017
3018 top = CG_OFFSET(cg);
3019 SET_STATEMENT_TOP(&stmtInfo, top);
3020
3021 /* Compile a JSOP_FOR* bytecode based on the left hand side. */
3022 switch (pn3->pn_type) {
3023 case TOK_VAR:
3024 pn3 = pn3->pn_head;
3025 JS_ASSERT(pn3->pn_type == TOK_NAME);
3026 if (!pn3->pn_expr && js_NewSrcNote(cx, cg, SRC_VAR) < 0)
3027 return JS_FALSE;
3028 /* FALL THROUGH */
3029 case TOK_NAME:
3030 if (pn3->pn_slot >= 0) {
3031 op = pn3->pn_op;
3032 switch (op) {
3033 case JSOP_GETARG: /* FALL THROUGH */
3034 case JSOP_SETARG: op = JSOP_FORARG; break;
3035 case JSOP_GETVAR: /* FALL THROUGH */
3036 case JSOP_SETVAR: op = JSOP_FORVAR; break;
3037 case JSOP_GETGVAR:
3038 case JSOP_SETGVAR: op = JSOP_FORNAME; break;
3039 default: JS_ASSERT(0);
3040 }
3041 } else {
3042 pn3->pn_op = JSOP_FORNAME;
3043 if (!LookupArgOrVar(cx, &cg->treeContext, pn3))
3044 return JS_FALSE;
3045 op = pn3->pn_op;
3046 }
3047 if (pn3->pn_slot >= 0) {
3048 if (pn3->pn_attrs & JSPROP_READONLY)
3049 op = JSOP_GETVAR;
3050 atomIndex = (jsatomid) pn3->pn_slot;
3051 EMIT_ATOM_INDEX_OP(op, atomIndex);
3052 } else {
3053 if (!EmitAtomOp(cx, pn3, op, cg))
3054 return JS_FALSE;
3055 }
3056 break;
3057
3058 case TOK_DOT:
3059 if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg))
3060 return JS_FALSE;
3061 break;
3062
3063 case TOK_LB:
3064 /*
3065 * We separate the first/next bytecode from the enumerator
3066 * variable binding to avoid any side-effects in the index
3067 * expression (e.g., for (x[i++] in {}) should not bind x[i]
3068 * or increment i at all).
3069 */
3070 if (!js_Emit1(cx, cg, JSOP_FORELEM))
3071 return JS_FALSE;
3072
3073 /*
3074 * Emit a SRC_WHILE note with offset telling the distance to
3075 * the loop-closing jump (we can't reckon from the branch at
3076 * the top of the loop, because the loop-closing jump might
3077 * need to be an extended jump, independent of whether the
3078 * branch is short or long).
3079 */
3080 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
3081 if (noteIndex < 0)
3082 return JS_FALSE;
3083 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
3084 if (beq < 0)
3085 return JS_FALSE;
3086
3087 /* Now that we're safely past the IFEQ, commit side effects. */
3088 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
3089 return JS_FALSE;
3090 break;
3091
3092 default:
3093 JS_ASSERT(0);
3094 }
3095 if (pn3->pn_type != TOK_LB) {
3096 /* Annotate so the decompiler can find the loop-closing jump. */
3097 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
3098 if (noteIndex < 0)
3099 return JS_FALSE;
3100
3101 /* Pop and test the loop condition generated by JSOP_FOR*. */
3102 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
3103 if (beq < 0)
3104 return JS_FALSE;
3105 }
3106 } else {
3107 if (!pn2->pn_kid1) {
3108 /* No initializer: emit an annotated nop for the decompiler. */
3109 op = JSOP_NOP;
3110 } else {
3111 if (!js_EmitTree(cx, cg, pn2->pn_kid1))
3112 return JS_FALSE;
3113 op = JSOP_POP;
3114 }
3115 noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
3116 if (noteIndex < 0 ||
3117 js_Emit1(cx, cg, op) < 0) {
3118 return JS_FALSE;
3119 }
3120
3121 top = CG_OFFSET(cg);
3122 SET_STATEMENT_TOP(&stmtInfo, top);
3123 if (!pn2->pn_kid2) {
3124 /* No loop condition: flag this fact in the source notes. */
3125 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, 0))
3126 return JS_FALSE;
3127 } else {
3128 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
3129 return JS_FALSE;
3130 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
3131 CG_OFFSET(cg) - top)) {
3132 return JS_FALSE;
3133 }
3134 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
3135 if (beq < 0)
3136 return JS_FALSE;
3137 }
3138
3139 /* Set pn3 (used below) here to avoid spurious gcc warnings. */
3140 pn3 = pn2->pn_kid3;
3141 }
3142
3143 /* Emit code for the loop body. */
3144 if (!js_EmitTree(cx, cg, pn->pn_right))
3145 return JS_FALSE;
3146
3147 if (pn2->pn_type != TOK_IN) {
3148 /* Set the second note offset so we can find the update part. */
3149 JS_ASSERT(noteIndex != -1);
3150 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3151 CG_OFFSET(cg) - top)) {
3152 return JS_FALSE;
3153 }
3154
3155 if (pn3) {
3156 /* Set loop and enclosing "update" offsets, for continue. */
3157 stmt = &stmtInfo;
3158 do {
3159 stmt->update = CG_OFFSET(cg);
3160 } while ((stmt = stmt->down) != NULL &&
3161 stmt->type == STMT_LABEL);
3162
3163 if (!js_EmitTree(cx, cg, pn3))
3164 return JS_FALSE;
3165 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3166 return JS_FALSE;
3167
3168 /* Restore the absolute line number for source note readers. */
3169 off = (ptrdiff_t) pn->pn_pos.end.lineno;
3170 if (CG_CURRENT_LINE(cg) != (uintN) off) {
3171 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
3172 return JS_FALSE;
3173 CG_CURRENT_LINE(cg) = (uintN) off;
3174 }
3175 }
3176
3177 /* The third note offset helps us find the loop-closing jump. */
3178 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
3179 CG_OFFSET(cg) - top)) {
3180 return JS_FALSE;
3181 }
3182 }
3183
3184 /* Emit the loop-closing jump and fixup all jump offsets. */
3185 jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
3186 if (jmp < 0)
3187 return JS_FALSE;
3188 if (beq > 0)
3189 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
3190 if (pn2->pn_type == TOK_IN) {
3191 /* Set the SRC_WHILE note offset so we can find the closing jump. */
3192 JS_ASSERT(noteIndex != -1);
3193 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, jmp - beq))
3194 return JS_FALSE;
3195 }
3196
3197 /* Now fixup all breaks and continues (before for/in's final POP2). */
3198 if (!js_PopStatementCG(cx, cg))
3199 return JS_FALSE;
3200
3201 if (pn2->pn_type == TOK_IN) {
3202 if (js_Emit1(cx, cg, JSOP_POP2) < 0)
3203 return JS_FALSE;
3204 }
3205 break;
3206
3207 case TOK_BREAK:
3208 stmt = cg->treeContext.topStmt;
3209 atom = pn->pn_atom;
3210 if (atom) {
3211 ale = js_IndexAtom(cx, atom, &cg->atomList);
3212 if (!ale)
3213 return JS_FALSE;
3214 while (stmt->type != STMT_LABEL || stmt->label != atom)
3215 stmt = stmt->down;
3216 noteType = SRC_BREAK2LABEL;
3217 } else {
3218 ale = NULL;
3219 while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
3220 stmt = stmt->down;
3221 noteType = SRC_NULL;
3222 }
3223
3224 if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
3225 return JS_FALSE;
3226 break;
3227
3228 case TOK_CONTINUE:
3229 stmt = cg->treeContext.topStmt;
3230 atom = pn->pn_atom;
3231 if (atom) {
3232 /* Find the loop statement enclosed by the matching label. */
3233 JSStmtInfo *loop = NULL;
3234 ale = js_IndexAtom(cx, atom, &cg->atomList);
3235 if (!ale)
3236 return JS_FALSE;
3237 while (stmt->type != STMT_LABEL || stmt->label != atom) {
3238 if (STMT_IS_LOOP(stmt))
3239 loop = stmt;
3240 stmt = stmt->down;
3241 }
3242 stmt = loop;
3243 noteType = SRC_CONT2LABEL;
3244 } else {
3245 ale = NULL;
3246 while (!STMT_IS_LOOP(stmt))
3247 stmt = stmt->down;
3248 noteType = SRC_CONTINUE;
3249 }
3250
3251 if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
3252 return JS_FALSE;
3253 break;
3254
3255 case TOK_WITH:
3256 if (!js_EmitTree(cx, cg, pn->pn_left))
3257 return JS_FALSE;
3258 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
3259 if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
3260 return JS_FALSE;
3261 if (!js_EmitTree(cx, cg, pn->pn_right))
3262 return JS_FALSE;
3263 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
3264 return JS_FALSE;
3265 ok = js_PopStatementCG(cx, cg);
3266 break;
3267
3268 #if JS_HAS_EXCEPTIONS
3269
3270 case TOK_TRY:
3271 {
3272 ptrdiff_t start, end, catchStart, finallyCatch, catchJump;
3273 JSParseNode *iter;
3274 intN depth;
3275
3276 /* Quell GCC overwarnings. */
3277 end = catchStart = finallyCatch = catchJump = -1;
3278
3279 /* Emit JSOP_GOTO that points to the first op after the catch/finally blocks */
3280 #define EMIT_CATCH_GOTO(cx, cg, jmp) \
3281 EMIT_BACKPATCH_OP(cx, cg, stmtInfo.catchJump, JSOP_BACKPATCH, jmp)
3282
3283 /* Emit JSOP_GOSUB that points to the finally block. */
3284 #define EMIT_FINALLY_GOSUB(cx, cg, jmp) \
3285 EMIT_BACKPATCH_OP(cx, cg, stmtInfo.gosub, JSOP_BACKPATCH_PUSH, jmp)
3286
3287 /*
3288 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
3289 * for later fixup.
3290 *
3291 * When a finally block is `active' (STMT_FINALLY on the treeContext),
3292 * non-local jumps (including jumps-over-catches) result in a GOSUB
3293 * being written into the bytecode stream and fixed-up later (c.f.
3294 * EMIT_BACKPATCH_OP and BackPatch).
3295 */
3296 js_PushStatement(&cg->treeContext, &stmtInfo,
3297 pn->pn_kid3 ? STMT_FINALLY : STMT_BLOCK,
3298 CG_OFFSET(cg));
3299
3300 /*
3301 * About JSOP_SETSP: an exception can be thrown while the stack is in
3302 * an unbalanced state, and this imbalance causes problems with things
3303 * like function invocation later on.
3304 *
3305 * To fix this, we compute the `balanced' stack depth upon try entry,
3306 * and then restore the stack to this depth when we hit the first catch
3307 * or finally block. We can't just zero the stack, because things like
3308 * for/in and with that are active upon entry to the block keep state
3309 * variables on the stack.
3310 */
3311 depth = cg->stackDepth;
3312
3313 /* Mark try location for decompilation, then emit try block. */
3314 if (js_Emit1(cx, cg, JSOP_TRY) < 0)
3315 return JS_FALSE;
3316 start = CG_OFFSET(cg);
3317 if (!js_EmitTree(cx, cg, pn->pn_kid1))
3318 return JS_FALSE;
3319
3320 /* GOSUB to finally, if present. */
3321 if (pn->pn_kid3) {
3322 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
3323 return JS_FALSE;
3324 EMIT_FINALLY_GOSUB(cx, cg, jmp);
3325 if (jmp < 0)
3326 return JS_FALSE;
3327 }
3328
3329 /* Emit (hidden) jump over catch and/or finally. */
3330 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
3331 return JS_FALSE;
3332 EMIT_CATCH_GOTO(cx, cg, jmp);
3333 if (jmp < 0)
3334 return JS_FALSE;
3335
3336 end = CG_OFFSET(cg);
3337
3338 /* If this try has a catch block, emit it. */
3339 iter = pn->pn_kid2;
3340 if (iter) {
3341 catchStart = end;
3342
3343 /*
3344 * The emitted code for a catch block looks like:
3345 *
3346 * [ popscope ] only if 2nd+ catch block
3347 * name Object
3348 * pushobj
3349 * newinit
3350 * exception
3351 * initcatchvar <atom>
3352 * enterwith
3353 * [< catchguard code >] if there's a catchguard
3354 * [ifeq <offset to next catch block>] " "
3355 * < catch block contents >
3356 * leavewith
3357 * goto <end of catch blocks> non-local; finally applies
3358 *
3359 * If there's no catch block without a catchguard, the last
3360 * <offset to next catch block> points to rethrow code. This
3361 * code will GOSUB to the finally code if appropriate, and is
3362 * also used for the catch-all trynote for capturing exceptions
3363 * thrown from catch{} blocks.
3364 */
3365 for (;;) {
3366 JSStmtInfo stmtInfo2;
3367 JSParseNode *disc;
3368 ptrdiff_t guardnote;
3369
3370 if (!UpdateLineNumberNotes(cx, cg, iter))
3371 return JS_FALSE;
3372
3373 if (catchJump != -1) {
3374 /* Fix up and clean up previous catch block. */
3375 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, catchJump);
3376
3377 /* Compensate for the [leavewith]. */
3378 cg->stackDepth++;
3379 JS_ASSERT((uintN) cg->stackDepth <= cg->maxStackDepth);
3380
3381 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
3382 js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0) {
3383 return JS_FALSE;
3384 }
3385 } else {
3386 /* Set stack to original depth (see SETSP comment above). */
3387 EMIT_ATOM_INDEX_OP(JSOP_SETSP, (jsatomid)depth);
3388 cg->stackDepth = depth;
3389 }
3390
3391 /* Non-negative guardnote offset is length of catchguard. */
3392 guardnote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
3393 if (guardnote < 0 ||
3394 js_Emit1(cx, cg, JSOP_NOP) < 0) {
3395 return JS_FALSE;
3396 }
3397
3398 /* Construct the scope holder and push it on. */
3399 ale = js_IndexAtom(cx, cx->runtime->atomState.ObjectAtom,
3400 &cg->atomList);
3401 if (!ale)
3402 return JS_FALSE;
3403 EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
3404
3405 if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0 ||
3406 js_Emit1(cx, cg, JSOP_NEWINIT) < 0 ||
3407 js_Emit1(cx, cg, JSOP_EXCEPTION) < 0) {
3408 return JS_FALSE;
3409 }
3410
3411 /* initcatchvar <atomIndex> */
3412 disc = iter->pn_kid1;
3413 ale = js_IndexAtom(cx, disc->pn_atom, &cg->atomList);
3414 if (!ale)
3415 return JS_FALSE;
3416
3417 EMIT_ATOM_INDEX_OP(JSOP_INITCATCHVAR, ALE_INDEX(ale));
3418 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
3419 js_Emit1(cx, cg, JSOP_ENTERWITH) < 0) {
3420 return JS_FALSE;
3421 }
3422
3423 /* boolean_expr */
3424 if (disc->pn_expr) {
3425 ptrdiff_t guardstart = CG_OFFSET(cg);
3426 if (!js_EmitTree(cx, cg, disc->pn_expr))
3427 return JS_FALSE;
3428 if (!js_SetSrcNoteOffset(cx, cg, guardnote, 0,
3429 CG_OFFSET(cg) - guardstart)) {
3430 return JS_FALSE;
3431 }
3432 /* ifeq <next block> */
3433 catchJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
3434 if (catchJump < 0)
3435 return JS_FALSE;
3436 }
3437
3438 /* Emit catch block. */
3439 js_PushStatement(&cg->treeContext, &stmtInfo2, STMT_CATCH,
3440 CG_OFFSET(cg));
3441 stmtInfo2.label = disc->pn_atom;
3442 if (!js_EmitTree(cx, cg, iter->pn_kid3))
3443 return JS_FALSE;
3444 js_PopStatementCG(cx, cg);
3445
3446 /*
3447 * Jump over the remaining catch blocks.
3448 * This counts as a non-local jump, so do the finally thing.
3449 */
3450
3451 /* leavewith, annotated so the decompiler knows to pop */
3452 off = cg->stackDepth - 1;
3453 if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0 ||
3454 js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0) {
3455 return JS_FALSE;
3456 }
3457
3458 /* gosub <finally>, if required */
3459 if (pn->pn_kid3) {
3460 EMIT_FINALLY_GOSUB(cx, cg, jmp);
3461 if (jmp < 0)
3462 return JS_FALSE;
3463 }
3464
3465 /* This will get fixed up to jump to after catch/finally. */
3466 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
3467 return JS_FALSE;
3468 EMIT_CATCH_GOTO(cx, cg, jmp);
3469 if (jmp < 0)
3470 return JS_FALSE;
3471 if (!iter->pn_kid2) /* leave iter at last catch */
3472 break;
3473 iter = iter->pn_kid2;
3474 }
3475 }
3476
3477 /*
3478 * We use a [setsp],[gosub],rethrow block for rethrowing when
3479 * there's no unguarded catch, and also for running finally code
3480 * while letting an uncaught exception pass through.
3481 */
3482 if (pn->pn_kid3 ||
3483 (catchJump != -1 && iter->pn_kid1->pn_expr)) {
3484 /*
3485 * Emit another stack fixup, because the catch could itself
3486 * throw an exception in an unbalanced state, and the finally
3487 * may need to call functions. If there is no finally, only
3488 * guarded catches, the rethrow code below nevertheless needs
3489 * stack fixup.
3490 */
3491 finallyCatch = CG_OFFSET(cg);
3492 EMIT_ATOM_INDEX_OP(JSOP_SETSP, (jsatomid)depth);
3493 cg->stackDepth = depth;
3494
3495 /* Last discriminant jumps to rethrow if none match. */
3496 if (catchJump != -1 && iter->pn_kid1->pn_expr)
3497 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, catchJump);
3498
3499 if (pn->pn_kid3) {
3500 EMIT_FINALLY_GOSUB(cx, cg, jmp);
3501 if (jmp < 0)
3502 return JS_FALSE;
3503 cg->stackDepth = depth;
3504 }
3505
3506 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
3507 js_Emit1(cx, cg, JSOP_EXCEPTION) < 0 ||
3508 js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
3509 js_Emit1(cx, cg, JSOP_THROW) < 0) {
3510 return JS_FALSE;
3511 }
3512 JS_ASSERT(cg->stackDepth == depth);
3513 }
3514
3515 /*
3516 * If we have a finally, it belongs here, and we have to fix up the
3517 * gosubs that might have been emitted before non-local jumps.
3518 */
3519 if (pn->pn_kid3) {
3520 if (!BackPatch(cx, cg, stmtInfo.gosub, CG_NEXT(cg), JSOP_GOSUB))
3521 return JS_FALSE;
3522
3523 /*
3524 * The stack budget must be balanced at this point, and we need
3525 * one more slot for the JSOP_RETSUB return address pushed by a
3526 * JSOP_GOSUB opcode that calls this finally clause.
3527 */
3528 JS_ASSERT(cg->stackDepth == depth);
3529 if ((uintN)++cg->stackDepth > cg->maxStackDepth)
3530 cg->maxStackDepth = cg->stackDepth;
3531
3532 /* Now indicate that we're emitting a subroutine body. */
3533 stmtInfo.type = STMT_SUBROUTINE;
3534 if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3))
3535 return JS_FALSE;
3536 if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
3537 !js_EmitTree(cx, cg, pn->pn_kid3) ||
3538 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
3539 return JS_FALSE;
3540 }
3541 }
3542 js_PopStatementCG(cx, cg);
3543
3544 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
3545 js_Emit1(cx, cg, JSOP_NOP) < 0) {
3546 return JS_FALSE;
3547 }
3548
3549 /* Fix up the end-of-try/catch jumps to come here. */
3550 if (!BackPatch(cx, cg, stmtInfo.catchJump, CG_NEXT(cg), JSOP_GOTO))
3551 return JS_FALSE;
3552
3553 /*
3554 * Add the try note last, to let post-order give us the right ordering
3555 * (first to last for a given nesting level, inner to outer by level).
3556 */
3557 if (pn->pn_kid2) {
3558 JS_ASSERT(end != -1 && catchStart != -1);
3559 if (!js_NewTryNote(cx, cg, start, end, catchStart))
3560 return JS_FALSE;
3561 }
3562
3563 /*
3564 * If we've got a finally, mark try+catch region with additional
3565 * trynote to catch exceptions (re)thrown from a catch block or
3566 * for the try{}finally{} case.
3567 */
3568 if (pn->pn_kid3) {
3569 JS_ASSERT(finallyCatch != -1);
3570 if (!js_NewTryNote(cx, cg, start, finallyCatch, finallyCatch))
3571 return JS_FALSE;
3572 }
3573 break;
3574 }
3575
3576 #endif /* JS_HAS_EXCEPTIONS */
3577
3578 case TOK_VAR:
3579 off = noteIndex = -1;
3580 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
3581 JS_ASSERT(pn2->pn_type == TOK_NAME);
3582 if (!LookupArgOrVar(cx, &cg->treeContext, pn2))
3583 return JS_FALSE;
3584 op = pn2->pn_op;
3585 if (op == JSOP_ARGUMENTS) {
3586 JS_ASSERT(!pn2->pn_expr); /* JSOP_ARGUMENTS => no initializer */
3587 #ifdef __GNUC__
3588 atomIndex = 0; /* quell GCC overwarning */
3589 #endif
3590 } else {
3591 if (pn2->pn_slot >= 0) {
3592 atomIndex = (jsatomid) pn2->pn_slot;
3593 } else {
3594 ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
3595 if (!ale)
3596 return JS_FALSE;
3597 atomIndex = ALE_INDEX(ale);
3598 }
3599
3600 if ((js_CodeSpec[op].format & JOF_TYPEMASK) == JOF_CONST &&
3601 (!(cg->treeContext.flags & TCF_IN_FUNCTION) ||
3602 (cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT))) {
3603 /* Emit a prolog bytecode to predefine the variable. */
3604 CG_SWITCH_TO_PROLOG(cg);
3605 if (!UpdateLineNumberNotes(cx, cg, pn2))
3606 return JS_FALSE;
3607 EMIT_ATOM_INDEX_OP(pn->pn_op, atomIndex);
3608 CG_SWITCH_TO_MAIN(cg);
3609 }
3610
3611 if (pn2->pn_expr) {
3612 if (op == JSOP_SETNAME)
3613 EMIT_ATOM_INDEX_OP(JSOP_BINDNAME, atomIndex);
3614 pn3 = pn2->pn_expr;
3615 if (pn->pn_op == JSOP_DEFCONST &&
3616 !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom,
3617 pn3)) {
3618 return JS_FALSE;
3619 }
3620 if (!js_EmitTree(cx, cg, pn3))
3621 return JS_FALSE;
3622 }
3623 }
3624
3625 /*
3626 * 'for (var x in o) ...' and 'for (var x = i in o) ...' call the
3627 * TOK_VAR case, but only the initialized case (a strange one that
3628 * falls out of ECMA-262's grammar) wants to run past this point.
3629 * Both cases must conditionally emit a JSOP_DEFVAR, above. Note
3630 * that the parser error-checks to ensure that pn->pn_count is 1.
3631 *
3632 * XXX Narcissus keeps track of variable declarations in the node
3633 * for the script being compiled, so there's no need to share any
3634 * conditional prolog code generation there. We could do likewise,
3635 * but it's a big change, requiring extra allocation, so probably
3636 * not worth the trouble for SpiderMonkey.
3637 */
3638 if ((pn->pn_extra & PNX_FORINVAR) && !pn2->pn_expr)
3639 break;
3640
3641 if (pn2 == pn->pn_head &&
3642 js_NewSrcNote(cx, cg,
3643 (pn->pn_op == JSOP_DEFCONST)
3644 ? SRC_CONST
3645 : SRC_VAR) < 0) {
3646 return JS_FALSE;
3647 }
3648 if (op == JSOP_ARGUMENTS) {
3649 if (js_Emit1(cx, cg, op) < 0)
3650 return JS_FALSE;
3651 } else {
3652 EMIT_ATOM_INDEX_OP(op, atomIndex);
3653 }
3654 tmp = CG_OFFSET(cg);
3655 if (noteIndex >= 0) {
3656 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
3657 return JS_FALSE;
3658 }
3659 if (!pn2->pn_next)
3660 break;
3661 off = tmp;
3662 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3663 if (noteIndex < 0 ||
3664 js_Emit1(cx, cg, JSOP_POP) < 0) {
3665 return JS_FALSE;
3666 }
3667 }
3668 if (pn->pn_extra & PNX_POPVAR) {
3669 if (js_Emit1(cx, cg, JSOP_POP) < 0)
3670 return JS_FALSE;
3671 }
3672 break;
3673
3674 case TOK_RETURN:
3675 /* Push a return value */
3676 pn2 = pn->pn_kid;
3677 if (pn2) {
3678 if (!js_EmitTree(cx, cg, pn2))
3679 return JS_FALSE;
3680 } else {
3681 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
3682 return JS_FALSE;
3683 }
3684
3685 /*
3686 * EmitNonLocalJumpFixup mutates op to JSOP_RETRVAL after emitting a
3687 * JSOP_SETRVAL if there are open try blocks having finally clauses.
3688 * We can't simply transfer control flow to our caller in that case,
3689 * because we must gosub to those clauses from inner to outer, with
3690 * the correct stack pointer (i.e., after popping any with, for/in,
3691 * etc., slots nested inside the finally's try).
3692 */
3693 op = JSOP_RETURN;
3694 if (!EmitNonLocalJumpFixup(cx, cg, NULL, &op))
3695 return JS_FALSE;
3696 if (js_Emit1(cx, cg, op) < 0)
3697 return JS_FALSE;
3698 break;
3699
3700 case TOK_LC:
3701 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_BLOCK, top);
3702 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3703 if (!js_EmitTree(cx, cg, pn2))
3704 return JS_FALSE;
3705 }
3706 ok = js_PopStatementCG(cx, cg);
3707 break;
3708
3709 case TOK_SEMI:
3710 pn2 = pn->pn_kid;
3711 if (pn2) {
3712 /*
3713 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
3714 * debugger, and eval frames may need the value of the ultimate
3715 * expression statement as the script's result, despite the fact
3716 * that it appears useless to the compiler.
3717 */
3718 useful = wantval = !cx->fp->fun ||
3719 !cx->fp->fun->interpreted ||
3720 (cx->fp->flags & JSFRAME_SPECIAL);
3721 if (!useful) {
3722 if (!CheckSideEffects(cx, &cg->treeContext, pn2, &useful))
3723 return JS_FALSE;
3724 }
3725 if (!useful) {
3726 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
3727 if (!js_ReportCompileErrorNumber(cx, NULL, cg,
3728 JSREPORT_WARNING |
3729 JSREPORT_STRICT,
3730 JSMSG_USELESS_EXPR)) {
3731 return JS_FALSE;
3732 }
3733 } else {
3734 if (!js_EmitTree(cx, cg, pn2))
3735 return JS_FALSE;
3736 if (js_Emit1(cx, cg, wantval ? JSOP_POPV : JSOP_POP) < 0)
3737 return JS_FALSE;
3738 }
3739 }
3740 break;
3741
3742 case TOK_COLON:
3743 /* Emit an annotated nop so we know to decompile a label. */
3744 atom = pn->pn_atom;
3745 ale = js_IndexAtom(cx, atom, &cg->atomList);
3746 if (!ale)
3747 return JS_FALSE;
3748 pn2 = pn->pn_expr;
3749 noteIndex = js_NewSrcNote2(cx, cg,
3750 (pn2->pn_type == TOK_LC)
3751 ? SRC_LABELBRACE
3752 : SRC_LABEL,
3753 (ptrdiff_t) ALE_INDEX(ale));
3754 if (noteIndex < 0 ||
3755 js_Emit1(cx, cg, JSOP_NOP) < 0) {
3756 return JS_FALSE;
3757 }
3758
3759 /* Emit code for the labeled statement. */
3760 js_PushStatement(&cg->treeContext, &stmtInfo, STMT_LABEL,
3761 CG_OFFSET(cg));
3762 stmtInfo.label = atom;
3763 if (!js_EmitTree(cx, cg, pn2))
3764 return JS_FALSE;
3765 if (!js_PopStatementCG(cx, cg))
3766 return JS_FALSE;
3767
3768 /* If the statement was compound, emit a note for the end brace. */
3769 if (pn2->pn_type == TOK_LC) {
3770 if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
3771 js_Emit1(cx, cg, JSOP_NOP) < 0) {
3772 return JS_FALSE;
3773 }
3774 }
3775 break;
3776
3777 case TOK_COMMA:
3778 /*
3779 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
3780 * These notes help the decompiler bracket the bytecodes generated
3781 * from each sub-expression that follows a comma.
3782 */
3783 off = noteIndex = -1;
3784 for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
3785 if (!js_EmitTree(cx, cg, pn2))
3786 return JS_FALSE;
3787 tmp = CG_OFFSET(cg);
3788 if (noteIndex >= 0) {
3789 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
3790 return JS_FALSE;
3791 }
3792 if (!pn2->pn_next)
3793 break;
3794 off = tmp;
3795 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3796 if (noteIndex < 0 ||
3797 js_Emit1(cx, cg, JSOP_POP) < 0) {
3798 return JS_FALSE;
3799 }
3800 }
3801 break;
3802
3803 case TOK_ASSIGN:
3804 /*
3805 * Check left operand type and generate specialized code for it.
3806 * Specialize to avoid ECMA "reference type" values on the operand
3807 * stack, which impose pervasive runtime "GetValue" costs.
3808 */
3809 pn2 = pn->pn_left;
3810 JS_ASSERT(pn2->pn_type != TOK_RP);
3811 atomIndex = (jsatomid) -1; /* Suppress warning. */
3812 switch (pn2->pn_type) {
3813 case TOK_NAME:
3814 if (!LookupArgOrVar(cx, &cg->treeContext, pn2))
3815 return JS_FALSE;
3816 if (pn2->pn_slot >= 0) {
3817 atomIndex = (jsatomid) pn2->pn_slot;
3818 } else {
3819 ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
3820 if (!ale)
3821 return JS_FALSE;
3822 atomIndex = ALE_INDEX(ale);
3823 EMIT_ATOM_INDEX_OP(JSOP_BINDNAME, atomIndex);
3824 }
3825 break;
3826 case TOK_DOT:
3827 if (!js_EmitTree(cx, cg, pn2->pn_expr))
3828 return JS_FALSE;
3829 ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
3830 if (!ale)
3831 return JS_FALSE;
3832 atomIndex = ALE_INDEX(ale);
3833 break;
3834 case TOK_LB:
3835 JS_ASSERT(pn->pn_arity == PN_BINARY);
3836 if (!js_EmitTree(cx, cg, pn2->pn_left))
3837 return JS_FALSE;
3838 if (!js_EmitTree(cx, cg, pn2->pn_right))
3839 return JS_FALSE;
3840 break;
3841 #if JS_HAS_LVALUE_RETURN
3842 case TOK_LP:
3843 if (!js_EmitTree(cx, cg, pn2))
3844 return JS_FALSE;
3845 break;
3846 #endif
3847 default:
3848 JS_ASSERT(0);
3849 }
3850
3851 op = pn->pn_op;
3852 #if JS_HAS_GETTER_SETTER
3853 if (op == JSOP_GETTER || op == JSOP_SETTER) {
3854 /* We'll emit these prefix bytecodes after emitting the r.h.s. */
3855 } else
3856 #endif
3857 /* If += or similar, dup the left operand and get its value. */
3858 if (op != JSOP_NOP) {
3859 switch (pn2->pn_type) {
3860 case TOK_NAME:
3861 if (pn2->pn_op != JSOP_SETNAME) {
3862 EMIT_ATOM_INDEX_OP((pn2->pn_op == JSOP_SETGVAR)
3863 ? JSOP_GETGVAR
3864 : (pn2->pn_op == JSOP_SETARG)
3865 ? JSOP_GETARG
3866 : JSOP_GETVAR,
3867 atomIndex);
3868 break;
3869 }
3870 /* FALL THROUGH */
3871 case TOK_DOT:
3872 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3873 return JS_FALSE;
3874 EMIT_ATOM_INDEX_OP(JSOP_GETPROP, atomIndex);
3875 break;
3876 case TOK_LB:
3877 #if JS_HAS_LVALUE_RETURN
3878 case TOK_LP:
3879 #endif
3880 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
3881 return JS_FALSE;
3882 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3883 return JS_FALSE;
3884 break;
3885 default:;
3886 }
3887 }
3888
3889 /* Now emit the right operand (it may affect the namespace). */
3890 if (!js_EmitTree(cx, cg, pn->pn_right))
3891 return JS_FALSE;
3892
3893 /* If += etc., emit the binary operator with a decompiler note. */
3894 if (op != JSOP_NOP) {
3895 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0 ||
3896 js_Emit1(cx, cg, op) < 0) {
3897 return JS_FALSE;
3898 }
3899 }
3900
3901 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
3902 if (pn2->pn_type != TOK_NAME) {
3903 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3904 return JS_FALSE;
3905 }
3906
3907 /* Finally, emit the specialized assignment bytecode. */
3908 switch (pn2->pn_type) {
3909 case TOK_NAME:
3910 if (pn2->pn_slot < 0 || !(pn2->pn_attrs & JSPROP_READONLY)) {
3911 case TOK_DOT:
3912 EMIT_ATOM_INDEX_OP(pn2->pn_op, atomIndex);
3913 }
3914 break;
3915 case TOK_LB:
3916 #if JS_HAS_LVALUE_RETURN
3917 case TOK_LP:
3918 #endif
3919 if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
3920 return JS_FALSE;
3921 break;
3922 default:;
3923 }
3924 break;
3925
3926 case TOK_HOOK:
3927 /* Emit the condition, then branch if false to the else part. */
3928 if (!js_EmitTree(cx, cg, pn->pn_kid1))
3929 return JS_FALSE;
3930 noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
3931 if (noteIndex < 0)
3932 return JS_FALSE;
3933 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
3934 if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
3935 return JS_FALSE;
3936
3937 /* Jump around else, fixup the branch, emit else, fixup jump. */
3938 jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
3939 if (jmp < 0)
3940 return JS_FALSE;
3941 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
3942 if (!js_EmitTree(cx, cg, pn->pn_kid3))
3943 return JS_FALSE;
3944 CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
3945 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
3946 return JS_FALSE;
3947
3948 /*
3949 * Because each branch pushes a single value, but our stack budgeting
3950 * analysis ignores branches, we now have two values accounted for in
3951 * cg->stackDepth. Execution will follow only one path, so we must
3952 * decrement cg->stackDepth here. Failing to do this will foil code,
3953 * such as the try/catch/finally exception handling code generator,
3954 * that samples cg->stackDepth for use at runtime (JSOP_SETSP).
3955 */
3956 JS_ASSERT(cg->stackDepth > 1);
3957 cg->stackDepth--;
3958 break;
3959
3960 case TOK_OR:
3961 case TOK_AND:
3962 /*
3963 * JSOP_OR converts the operand on the stack to boolean, and if true,
3964 * leaves the original operand value on the stack and jumps; otherwise
3965 * it pops and falls into the next bytecode, which evaluates the right
3966 * operand. The jump goes around the right operand evaluation.
3967 *
3968 * JSOP_AND converts the operand on the stack to boolean, and if false,
3969 * leaves the original operand value on the stack and jumps; otherwise
3970 * it pops and falls into the right operand's bytecode.
3971 *
3972 * Avoid tail recursion for long ||...|| expressions and long &&...&&
3973 * expressions or long mixtures of ||'s and &&'s that can easily blow
3974 * the stack, by forward-linking and then backpatching all the JSOP_OR
3975 * and JSOP_AND bytecodes' immediate jump-offset operands.
3976 */
3977 pn3 = pn;
3978 if (!js_EmitTree(cx, cg, pn->pn_left))
3979 return JS_FALSE;
3980 top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
3981 if (top < 0)
3982 return JS_FALSE;
3983 jmp = top;
3984 pn2 = pn->pn_right;
3985 while (pn2->pn_type == TOK_OR || pn2->pn_type == TOK_AND) {
3986 pn = pn2;
3987 if (!js_EmitTree(cx, cg, pn->pn_left))
3988 return JS_FALSE;
3989 off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
3990 if (off < 0)
3991 return JS_FALSE;
3992 if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
3993 return JS_FALSE;
3994 jmp = off;
3995 pn2 = pn->pn_right;
3996 }
3997 if (!js_EmitTree(cx, cg, pn2))
3998 return JS_FALSE;
3999 off = CG_OFFSET(cg);
4000 do {
4001 pc = CG_CODE(cg, top);
4002 tmp = GetJumpOffset(cg, pc);
4003 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
4004 *pc = pn3->pn_op;
4005 top += tmp;
4006 } while ((pn3 = pn3->pn_right) != pn2);
4007 break;
4008
4009 case TOK_BITOR:
4010 case TOK_BITXOR:
4011 case TOK_BITAND:
4012 case TOK_EQOP:
4013 case TOK_RELOP:
4014 #if JS_HAS_IN_OPERATOR
4015 case TOK_IN:
4016 #endif
4017 #if JS_HAS_INSTANCEOF
4018 case TOK_INSTANCEOF:
4019 #endif
4020 case TOK_SHOP:
4021 case TOK_PLUS:
4022 case TOK_MINUS:
4023 case TOK_STAR:
4024 case TOK_DIVOP:
4025 if (pn->pn_arity == PN_LIST) {
4026 /* Left-associative operator chain: avoid too much recursion. */
4027 pn2 = pn->pn_head;
4028 if (!js_EmitTree(cx, cg, pn2))
4029 return JS_FALSE;
4030 op = pn->pn_op;
4031 while ((pn2 = pn2->pn_next) != NULL) {
4032 if (!js_EmitTree(cx, cg, pn2))
4033 return JS_FALSE;
4034 if (js_Emit1(cx, cg, op) < 0)
4035 return JS_FALSE;
4036 }
4037 } else {
4038 /* Binary operators that evaluate both operands unconditionally. */
4039 if (!js_EmitTree(cx, cg, pn->pn_left))
4040 return JS_FALSE;
4041 if (!js_EmitTree(cx, cg, pn->pn_right))
4042 return JS_FALSE;
4043 if (js_Emit1(cx, cg, pn->pn_op) < 0)
4044 return JS_FALSE;
4045 }
4046 break;
4047
4048 #if JS_HAS_EXCEPTIONS
4049 case TOK_THROW:
4050 #endif
4051 case TOK_UNARYOP:
4052 /* Unary op, including unary +/-. */
4053 if (!js_EmitTree(cx, cg, pn->pn_kid))
4054 return JS_FALSE;
4055 if (js_Emit1(cx, cg, pn->pn_op) < 0)
4056 return JS_FALSE;
4057 break;
4058
4059 case TOK_INC:
4060 case TOK_DEC:
4061 /* Emit lvalue-specialized code for ++/-- operators. */
4062 pn2 = pn->pn_kid;
4063 JS_ASSERT(pn2->pn_type != TOK_RP);
4064 op = pn->pn_op;
4065 switch (pn2->pn_type) {
4066 case TOK_NAME:
4067 pn2->pn_op = op;
4068 if (!LookupArgOrVar(cx, &cg->treeContext, pn2))
4069 return JS_FALSE;
4070 op = pn2->pn_op;
4071 if (pn2->pn_slot >= 0) {
4072 if (pn2->pn_attrs & JSPROP_READONLY) {
4073 /* Incrementing a declared const: just get its value. */
4074 op = ((js_CodeSpec[op].format & JOF_TYPEMASK) == JOF_CONST)
4075 ? JSOP_GETGVAR
4076 : JSOP_GETVAR;
4077 }
4078 atomIndex = (jsatomid) pn2->pn_slot;
4079 EMIT_ATOM_INDEX_OP(op, atomIndex);
4080 } else {
4081 if (!EmitAtomOp(cx, pn2, op, cg))
4082 return JS_FALSE;
4083 }
4084 break;
4085 case TOK_DOT:
4086 if (!EmitPropOp(cx, pn2, op, cg))
4087 return JS_FALSE;
4088 break;
4089 case TOK_LB:
4090 if (!EmitElemOp(cx, pn2, op, cg))
4091 return JS_FALSE;
4092 break;
4093 #if JS_HAS_LVALUE_RETURN
4094 case TOK_LP:
4095 if (!js_EmitTree(cx, cg, pn2))
4096 return JS_FALSE;
4097 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
4098 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
4099 return JS_FALSE;
4100 }
4101 if (js_Emit1(cx, cg, op) < 0)
4102 return JS_FALSE;
4103 break;
4104 #endif
4105 default:
4106 JS_ASSERT(0);
4107 }
4108 break;
4109
4110 case TOK_DELETE:
4111 /* Under ECMA 3, deleting a non-reference returns true. */
4112 pn2 = pn->pn_kid;
4113 switch (pn2->pn_type) {
4114 case TOK_NAME:
4115 pn2->pn_op = JSOP_DELNAME;
4116 if (!LookupArgOrVar(cx, &cg->treeContext, pn2))
4117 return JS_FALSE;
4118 op = pn2->pn_op;
4119 if (op == JSOP_FALSE) {
4120 if (js_Emit1(cx, cg, op) < 0)
4121 return JS_FALSE;
4122 } else {
4123 if (!EmitAtomOp(cx, pn2, op, cg))
4124 return JS_FALSE;
4125 }
4126 break;
4127 case TOK_DOT:
4128 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg))
4129 return JS_FALSE;
4130 break;
4131 case TOK_LB:
4132 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
4133 return JS_FALSE;
4134 break;
4135 default:
4136 if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
4137 return JS_FALSE;
4138 }
4139 break;
4140
4141 case TOK_DOT:
4142 /*
4143 * Pop a stack operand, convert it to object, get a property named by
4144 * this bytecode's immediate-indexed atom operand, and push its value
4145 * (not a reference to it). This bytecode sets the virtual machine's
4146 * "obj" register to the left operand's ToObject conversion result,
4147 * for use by JSOP_PUSHOBJ.
4148 */
4149 ok = EmitPropOp(cx, pn, pn->pn_op, cg);
4150 break;
4151
4152 case TOK_LB:
4153 /*
4154 * Pop two operands, convert the left one to object and the right one
4155 * to property name (atom or tagged int), get the named property, and
4156 * push its value. Set the "obj" register to the result of ToObject
4157 * on the left operand.
4158 */
4159 ok = EmitElemOp(cx, pn, pn->pn_op, cg);
4160 break;
4161
4162 case TOK_NEW:
4163 case TOK_LP:
4164 /*
4165 * Emit function call or operator new (constructor call) code.
4166 * First, emit code for the left operand to evaluate the callable or
4167 * constructable object expression.
4168 */
4169 pn2 = pn->pn_head;
4170 if (!js_EmitTree(cx, cg, pn2))
4171 return JS_FALSE;
4172
4173 /* Remember start of callable-object bytecode for decompilation hint. */
4174 off = pn2->pn_offset;
4175
4176 /*
4177 * Push the virtual machine's "obj" register, which was set by a name,
4178 * property, or element get (or set) bytecode.
4179 */
4180 if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
4181 return JS_FALSE;
4182
4183 /*
4184 * Emit code for each argument in order, then emit the JSOP_*CALL or
4185 * JSOP_NEW bytecode with a two-byte immediate telling how many args
4186 * were pushed on the operand stack.
4187 */
4188 for (pn2 = pn2->pn_next; pn2; pn2 = pn2->pn_next) {
4189 if (!js_EmitTree(cx, cg, pn2))
4190 return JS_FALSE;
4191 }
4192 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
4193 return JS_FALSE;
4194 argc = pn->pn_count - 1;
4195 if (js_Emit3(cx, cg, pn->pn_op, ARGC_HI(argc), ARGC_LO(argc)) < 0)
4196 return JS_FALSE;
4197 break;
4198
4199 #if JS_HAS_INITIALIZERS
4200 case TOK_RB:
4201 /*
4202 * Emit code for [a, b, c] of the form:
4203 * t = new Array; t[0] = a; t[1] = b; t[2] = c; t;
4204 * but use a stack slot for t and avoid dup'ing and popping it via
4205 * the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
4206 */
4207 ale = js_IndexAtom(cx, cx->runtime->atomState.ArrayAtom,
4208 &cg->atomList);
4209 if (!ale)
4210 return JS_FALSE;
4211 EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
4212 if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
4213 return JS_FALSE;
4214 if (js_Emit1(cx, cg, JSOP_NEWINIT) < 0)
4215 return JS_FALSE;
4216
4217 pn2 = pn->pn_head;
4218 #if JS_HAS_SHARP_VARS
4219 if (pn2 && pn2->pn_type == TOK_DEFSHARP) {
4220 EMIT_ATOM_INDEX_OP(JSOP_DEFSHARP, (jsatomid)pn2->pn_num);
4221 pn2 = pn2->pn_next;
4222 }
4223 #endif
4224
4225 for (atomIndex = 0; pn2; pn2 = pn2->pn_next) {
4226 /* PrimaryExpr enforced ATOM_INDEX_LIMIT, so in-line optimize. */
4227 JS_ASSERT(atomIndex < ATOM_INDEX_LIMIT);
4228 if (atomIndex == 0) {
4229 if (js_Emit1(cx, cg, JSOP_ZERO) < 0)
4230 return JS_FALSE;
4231 } else if (atomIndex == 1) {
4232 if (js_Emit1(cx, cg, JSOP_ONE) < 0)
4233 return JS_FALSE;
4234 } else {
4235 EMIT_ATOM_INDEX_OP(JSOP_UINT16, (jsatomid)atomIndex);
4236 }
4237
4238 /* Sub-optimal: holes in a sparse initializer are void-filled. */
4239 if (pn2->pn_type == TOK_COMMA) {
4240 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
4241 return JS_FALSE;
4242 } else {
4243 if (!js_EmitTree(cx, cg, pn2))
4244 return JS_FALSE;
4245 }
4246 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
4247 return JS_FALSE;
4248
4249 atomIndex++;
4250 }
4251
4252 if (pn->pn_extra & PNX_ENDCOMMA) {
4253 /* Emit a source note so we know to decompile an extra comma. */
4254 if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
4255 return JS_FALSE;
4256 }
4257
4258 /* Emit an op for sharp array cleanup and decompilation. */
4259 if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
4260 return JS_FALSE;
4261 break;
4262
4263 case TOK_RC:
4264 /*
4265 * Emit code for {p:a, '%q':b, 2:c} of the form:
4266 * t = new Object; t.p = a; t['%q'] = b; t[2] = c; t;
4267 * but use a stack slot for t and avoid dup'ing and popping it via
4268 * the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
4269 */
4270 ale = js_IndexAtom(cx, cx->runtime->atomState.ObjectAtom,
4271 &cg->atomList);
4272 if (!ale)
4273 return JS_FALSE;
4274 EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
4275
4276 if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
4277 return JS_FALSE;
4278 if (js_Emit1(cx, cg, JSOP_NEWINIT) < 0)
4279 return JS_FALSE;
4280
4281 pn2 = pn->pn_head;
4282 #if JS_HAS_SHARP_VARS
4283 if (pn2 && pn2->pn_type == TOK_DEFSHARP) {
4284 EMIT_ATOM_INDEX_OP(JSOP_DEFSHARP, (jsatomid)pn2->pn_num);
4285 pn2 = pn2->pn_next;
4286 }
4287 #endif
4288
4289 for (; pn2; pn2 = pn2->pn_next) {
4290 /* Emit an index for t[2], else map an atom for t.p or t['%q']. */
4291 pn3 = pn2->pn_left;
4292 switch (pn3->pn_type) {
4293 case TOK_NUMBER:
4294 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
4295 return JS_FALSE;
4296 break;
4297 case TOK_NAME:
4298 case TOK_STRING:
4299 ale = js_IndexAtom(cx, pn3->pn_atom, &cg->atomList);
4300 if (!ale)
4301 return JS_FALSE;
4302 break;
4303 default:
4304 JS_ASSERT(0);
4305 }
4306
4307 /* Emit code for the property initializer. */
4308 if (!js_EmitTree(cx, cg, pn2->pn_right))
4309 return JS_FALSE;
4310
4311 #if JS_HAS_GETTER_SETTER
4312 op = pn2->pn_op;
4313 if (op == JSOP_GETTER || op == JSOP_SETTER) {
4314 if (js_Emit1(cx, cg, op) < 0)
4315 return JS_FALSE;
4316 }
4317 #endif
4318 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
4319 if (pn3->pn_type == TOK_NUMBER) {
4320 if (js_NewSrcNote2(cx, cg, SRC_LABEL, 0) < 0)
4321 return JS_FALSE;
4322 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
4323 return JS_FALSE;
4324 } else {
4325 EMIT_ATOM_INDEX_OP(JSOP_INITPROP, ALE_INDEX(ale));
4326 }
4327 }
4328
4329 /* Emit an op for sharpArray cleanup and decompilation. */
4330 if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
4331 return JS_FALSE;
4332 break;
4333
4334 #if JS_HAS_SHARP_VARS
4335 case TOK_DEFSHARP:
4336 if (!js_EmitTree(cx, cg, pn->pn_kid))
4337 return JS_FALSE;
4338 EMIT_ATOM_INDEX_OP(JSOP_DEFSHARP, (jsatomid) pn->pn_num);
4339 break;
4340
4341 case TOK_USESHARP:
4342 EMIT_ATOM_INDEX_OP(JSOP_USESHARP, (jsatomid) pn->pn_num);
4343 break;
4344 #endif /* JS_HAS_SHARP_VARS */
4345 #endif /* JS_HAS_INITIALIZERS */
4346
4347 case TOK_RP:
4348 /*
4349 * The node for (e) has e as its kid, enabling users who want to nest
4350 * assignment expressions in conditions to avoid the error correction
4351 * done by Condition (from x = y to x == y) by double-parenthesizing.
4352 */
4353 if (!js_EmitTree(cx, cg, pn->pn_kid))
4354 return JS_FALSE;
4355 if (js_Emit1(cx, cg, JSOP_GROUP) < 0)
4356 return JS_FALSE;
4357 break;
4358
4359 case TOK_NAME:
4360 if (!LookupArgOrVar(cx, &cg->treeContext, pn))
4361 return JS_FALSE;
4362 op = pn->pn_op;
4363 if (op == JSOP_ARGUMENTS) {
4364 if (js_Emit1(cx, cg, op) < 0)
4365 return JS_FALSE;
4366 break;
4367 }
4368 if (pn->pn_slot >= 0) {
4369 atomIndex = (jsatomid) pn->pn_slot;
4370 EMIT_ATOM_INDEX_OP(op, atomIndex);
4371 break;
4372 }
4373 /* FALL THROUGH */
4374 case TOK_STRING:
4375 case TOK_OBJECT:
4376 /*
4377 * The scanner and parser associate JSOP_NAME with TOK_NAME, although
4378 * other bytecodes may result instead (JSOP_BINDNAME/JSOP_SETNAME,
4379 * JSOP_FORNAME, etc.). Among JSOP_*NAME* variants, only JSOP_NAME
4380 * may generate the first operand of a call or new expression, so only
4381 * it sets the "obj" virtual machine register to the object along the
4382 * scope chain in which the name was found.
4383 *
4384 * Token types for STRING and OBJECT have corresponding bytecode ops
4385 * in pn_op and emit the same format as NAME, so they share this code.
4386 */
4387 ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
4388 break;
4389
4390 case TOK_NUMBER:
4391 ok = EmitNumberOp(cx, pn->pn_dval, cg);
4392 break;
4393
4394 case TOK_PRIMARY:
4395 if (js_Emit1(cx, cg, pn->pn_op) < 0)
4396 return JS_FALSE;
4397 break;
4398
4399 #if JS_HAS_DEBUGGER_KEYWORD
4400 case TOK_DEBUGGER:
4401 if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
4402 return JS_FALSE;
4403 break;
4404 #endif /* JS_HAS_DEBUGGER_KEYWORD */
4405
4406 default:
4407 JS_ASSERT(0);
4408 }
4409
4410 if (ok && --cg->emitLevel == 0 && cg->spanDeps)
4411 ok = OptimizeSpanDeps(cx, cg);
4412
4413 return ok;
4414 }
4415
4416 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
4417 {"null", 0, 0, 0},
4418 {"if", 0, 0, 0},
4419 {"if-else", 1, 0, 1},
4420 {"while", 1, 0, 1},
4421 {"for", 3, 1, 1},
4422 {"continue", 0, 0, 0},
4423 {"var", 0, 0, 0},
4424 {"pcdelta", 1, 0, 1},
4425 {"assignop", 0, 0, 0},
4426 {"cond", 1, 0, 1},
4427 {"reserved0", 0, 0, 0},
4428 {"hidden", 0, 0, 0},
4429 {"pcbase", 1, 0, -1},
4430 {"label", 1, 0, 0},
4431 {"labelbrace", 1, 0, 0},
4432 {"endbrace", 0, 0, 0},
4433 {"break2label", 1, 0, 0},
4434 {"cont2label", 1, 0, 0},
4435 {"switch", 2, 0, 1},
4436 {"funcdef", 1, 0, 0},
4437 {"catch", 1, 11, 1},
4438 {"const", 0, 0, 0},
4439 {"newline", 0, 0, 0},
4440 {"setline", 1, 0, 0},
4441 {"xdelta", 0, 0, 0},
4442 };
4443
4444 static intN
AllocSrcNote(JSContext * cx,JSCodeGenerator * cg)4445 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
4446 {
4447 intN index;
4448 JSArenaPool *pool;
4449 size_t size;
4450
4451 index = CG_NOTE_COUNT(cg);
4452 if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
4453 pool = cg->notePool;
4454 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
4455 if (!CG_NOTES(cg)) {
4456 /* Allocate the first note array lazily; leave noteMask alone. */
4457 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
4458 } else {
4459 /* Grow by doubling note array size; update noteMask on success. */
4460 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
4461 if (CG_NOTES(cg))
4462 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
4463 }
4464 if (!CG_NOTES(cg)) {
4465 JS_ReportOutOfMemory(cx);
4466 return -1;
4467 }
4468 }
4469
4470 CG_NOTE_COUNT(cg) = index + 1;
4471 return index;
4472 }
4473
4474 intN
js_NewSrcNote(JSContext * cx,JSCodeGenerator * cg,JSSrcNoteType type)4475 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
4476 {
4477 intN index, n;
4478 jssrcnote *sn;
4479 ptrdiff_t offset, delta, xdelta;
4480
4481 /*
4482 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
4483 * incrementing CG_NOTE_COUNT(cg).
4484 */
4485 index = AllocSrcNote(cx, cg);
4486 if (index < 0)
4487 return -1;
4488 sn = &CG_NOTES(cg)[index];
4489
4490 /*
4491 * Compute delta from the last annotated bytecode's offset. If it's too
4492 * big to fit in sn, allocate one or more xdelta notes and reset sn.
4493 */
4494 offset = CG_OFFSET(cg);
4495 delta = offset - CG_LAST_NOTE_OFFSET(cg);
4496 CG_LAST_NOTE_OFFSET(cg) = offset;
4497 if (delta >= SN_DELTA_LIMIT) {
4498 do {
4499 xdelta = JS_MIN(delta, SN_XDELTA_MASK);
4500 SN_MAKE_XDELTA(sn, xdelta);
4501 delta -= xdelta;
4502 index = AllocSrcNote(cx, cg);
4503 if (index < 0)
4504 return -1;
4505 sn = &CG_NOTES(cg)[index];
4506 } while (delta >= SN_DELTA_LIMIT);
4507 }
4508
4509 /*
4510 * Initialize type and delta, then allocate the minimum number of notes
4511 * needed for type's arity. Usually, we won't need more, but if an offset
4512 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
4513 */
4514 SN_MAKE_NOTE(sn, type, delta);
4515 for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
4516 if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
4517 return -1;
4518 }
4519 return index;
4520 }
4521
4522 intN
js_NewSrcNote2(JSContext * cx,JSCodeGenerator * cg,JSSrcNoteType type,ptrdiff_t offset)4523 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
4524 ptrdiff_t offset)
4525 {
4526 intN index;
4527
4528 index = js_NewSrcNote(cx, cg, type);
4529 if (index >= 0) {
4530 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
4531 return -1;
4532 }
4533 return index;
4534 }
4535
4536 intN
js_NewSrcNote3(JSContext * cx,JSCodeGenerator * cg,JSSrcNoteType type,ptrdiff_t offset1,ptrdiff_t offset2)4537 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
4538 ptrdiff_t offset1, ptrdiff_t offset2)
4539 {
4540 intN index;
4541
4542 index = js_NewSrcNote(cx, cg, type);
4543 if (index >= 0) {
4544 if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
4545 return -1;
4546 if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
4547 return -1;
4548 }
4549 return index;
4550 }
4551
4552 static JSBool
GrowSrcNotes(JSContext * cx,JSCodeGenerator * cg)4553 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
4554 {
4555 JSArenaPool *pool;
4556 size_t size;
4557
4558 /* Grow by doubling note array size; update noteMask on success. */
4559 pool = cg->notePool;
4560 size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
4561 JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
4562 if (!CG_NOTES(cg)) {
4563 JS_ReportOutOfMemory(cx);
4564 return JS_FALSE;
4565 }
4566 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
4567 return JS_TRUE;
4568 }
4569
4570 jssrcnote *
js_AddToSrcNoteDelta(JSContext * cx,JSCodeGenerator * cg,jssrcnote * sn,ptrdiff_t delta)4571 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
4572 ptrdiff_t delta)
4573 {
4574 ptrdiff_t base, limit, newdelta, diff;
4575 intN index;
4576
4577 /*
4578 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
4579 * main script note deltas, and only by a small positive amount.
4580 */
4581 JS_ASSERT(cg->current == &cg->main);
4582 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
4583
4584 base = SN_DELTA(sn);
4585 limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
4586 newdelta = base + delta;
4587 if (newdelta < limit) {
4588 SN_SET_DELTA(sn, newdelta);
4589 } else {
4590 index = sn - cg->main.notes;
4591 if ((cg->main.noteCount & cg->main.noteMask) == 0) {
4592 if (!GrowSrcNotes(cx, cg))
4593 return NULL;
4594 sn = cg->main.notes + index;
4595 }
4596 diff = cg->main.noteCount - index;
4597 cg->main.noteCount++;
4598 memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
4599 SN_MAKE_XDELTA(sn, delta);
4600 sn++;
4601 }
4602 return sn;
4603 }
4604
4605 uintN
js_SrcNoteLength(jssrcnote * sn)4606 js_SrcNoteLength(jssrcnote *sn)
4607 {
4608 uintN arity;
4609 jssrcnote *base;
4610
4611 arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
4612 for (base = sn++; arity; sn++, arity--) {
4613 if (*sn & SN_3BYTE_OFFSET_FLAG)
4614 sn += 2;
4615 }
4616 return sn - base;
4617 }
4618
4619 JS_FRIEND_API(ptrdiff_t)
js_GetSrcNoteOffset(jssrcnote * sn,uintN which)4620 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
4621 {
4622 /* Find the offset numbered which (i.e., skip exactly which offsets). */
4623 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
4624 JS_ASSERT(which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
4625 for (sn++; which; sn++, which--) {
4626 if (*sn & SN_3BYTE_OFFSET_FLAG)
4627 sn += 2;
4628 }
4629 if (*sn & SN_3BYTE_OFFSET_FLAG) {
4630 return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
4631 | (sn[1] << 8)
4632 | sn[2]);
4633 }
4634 return (ptrdiff_t)*sn;
4635 }
4636
4637 JSBool
js_SetSrcNoteOffset(JSContext * cx,JSCodeGenerator * cg,uintN index,uintN which,ptrdiff_t offset)4638 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
4639 uintN which, ptrdiff_t offset)
4640 {
4641 jssrcnote *sn;
4642 ptrdiff_t diff;
4643
4644 if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
4645 ReportStatementTooLarge(cx, cg);
4646 return JS_FALSE;
4647 }
4648
4649 /* Find the offset numbered which (i.e., skip exactly which offsets). */
4650 sn = &CG_NOTES(cg)[index];
4651 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
4652 JS_ASSERT(which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
4653 for (sn++; which; sn++, which--) {
4654 if (*sn & SN_3BYTE_OFFSET_FLAG)
4655 sn += 2;
4656 }
4657
4658 /* See if the new offset requires three bytes. */
4659 if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
4660 /* Maybe this offset was already set to a three-byte value. */
4661 if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
4662 /* Losing, need to insert another two bytes for this offset. */
4663 index = PTRDIFF(sn, CG_NOTES(cg), jssrcnote);
4664
4665 /*
4666 * Simultaneously test to see if the source note array must grow to
4667 * accomodate either the first or second byte of additional storage
4668 * required by this 3-byte offset.
4669 */
4670 if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
4671 if (!GrowSrcNotes(cx, cg))
4672 return JS_FALSE;
4673 sn = CG_NOTES(cg) + index;
4674 }
4675 CG_NOTE_COUNT(cg) += 2;
4676
4677 diff = CG_NOTE_COUNT(cg) - (index + 3);
4678 JS_ASSERT(diff >= 0);
4679 if (diff > 0)
4680 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
4681 }
4682 *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
4683 *sn++ = (jssrcnote)(offset >> 8);
4684 }
4685 *sn = (jssrcnote)offset;
4686 return JS_TRUE;
4687 }
4688
4689 #ifdef DEBUG_brendan
4690 #define NBINS 10
4691 static uint32 hist[NBINS];
4692
DumpSrcNoteSizeHist()4693 void DumpSrcNoteSizeHist()
4694 {
4695 static FILE *fp;
4696 int i, n;
4697
4698 if (!fp) {
4699 fp = fopen("/tmp/srcnotes.hist", "w");
4700 if (!fp)
4701 return;
4702 setlinebuf(fp);
4703 }
4704 fprintf(fp, "SrcNote size histogram:\n");
4705 for (i = 0; i < NBINS; i++) {
4706 fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
4707 for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
4708 fputc('*', fp);
4709 fputc('\n', fp);
4710 }
4711 fputc('\n', fp);
4712 }
4713 #endif
4714
4715 /*
4716 * Fill in the storage at notes with prolog and main srcnotes; the space at
4717 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
4718 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
4719 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
4720 */
4721 JSBool
js_FinishTakingSrcNotes(JSContext * cx,JSCodeGenerator * cg,jssrcnote * notes)4722 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
4723 {
4724 uintN prologCount, mainCount, totalCount;
4725 ptrdiff_t offset, delta;
4726 jssrcnote *sn;
4727
4728 JS_ASSERT(cg->current == &cg->main);
4729
4730 prologCount = cg->prolog.noteCount;
4731 if (prologCount && cg->prolog.currentLine != cg->firstLine) {
4732 CG_SWITCH_TO_PROLOG(cg);
4733 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
4734 return JS_FALSE;
4735 prologCount = cg->prolog.noteCount;
4736 CG_SWITCH_TO_MAIN(cg);
4737 } else {
4738 /*
4739 * Either no prolog srcnotes, or no line number change over prolog.
4740 * We don't need a SRC_SETLINE, but we may need to adjust the offset
4741 * of the first main note, by adding to its delta and possibly even
4742 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
4743 * that came at and after the last annotated bytecode.
4744 */
4745 offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
4746 JS_ASSERT(offset >= 0);
4747 if (offset > 0) {
4748 /* NB: Use as much of the first main note's delta as we can. */
4749 sn = cg->main.notes;
4750 delta = SN_IS_XDELTA(sn)
4751 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
4752 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
4753 if (offset < delta)
4754 delta = offset;
4755 for (;;) {
4756 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
4757 return JS_FALSE;
4758 offset -= delta;
4759 if (offset == 0)
4760 break;
4761 delta = JS_MIN(offset, SN_XDELTA_MASK);
4762 sn = cg->main.notes;
4763 }
4764 }
4765 }
4766
4767 mainCount = cg->main.noteCount;
4768 totalCount = prologCount + mainCount;
4769 if (prologCount)
4770 memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
4771 memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
4772 SN_MAKE_TERMINATOR(¬es[totalCount]);
4773
4774 #ifdef DEBUG_brendan
4775 { int bin = JS_CeilingLog2(totalCount);
4776 if (bin >= NBINS)
4777 bin = NBINS - 1;
4778 ++hist[bin];
4779 }
4780 #endif
4781 return JS_TRUE;
4782 }
4783
4784 JSBool
js_AllocTryNotes(JSContext * cx,JSCodeGenerator * cg)4785 js_AllocTryNotes(JSContext *cx, JSCodeGenerator *cg)
4786 {
4787 size_t size, incr;
4788 ptrdiff_t delta;
4789
4790 size = TRYNOTE_SIZE(cg->treeContext.tryCount);
4791 if (size <= cg->tryNoteSpace)
4792 return JS_TRUE;
4793
4794 /*
4795 * Allocate trynotes from cx->tempPool.
4796 * XXX Too much growing and we bloat, as other tempPool allocators block
4797 * in-place growth, and we never recycle old free space in an arena.
4798 * YYY But once we consume an entire arena, we'll realloc it, letting the
4799 * malloc heap recycle old space, while still freeing _en masse_ via the
4800 * arena pool.
4801 */
4802 if (!cg->tryBase) {
4803 size = JS_ROUNDUP(size, TRYNOTE_SIZE(TRYNOTE_CHUNK));
4804 JS_ARENA_ALLOCATE_CAST(cg->tryBase, JSTryNote *, &cx->tempPool, size);
4805 if (!cg->tryBase)
4806 return JS_FALSE;
4807 cg->tryNoteSpace = size;
4808 cg->tryNext = cg->tryBase;
4809 } else {
4810 delta = PTRDIFF((char *)cg->tryNext, (char *)cg->tryBase, char);
4811 incr = size - cg->tryNoteSpace;
4812 incr = JS_ROUNDUP(incr, TRYNOTE_SIZE(TRYNOTE_CHUNK));
4813 size = cg->tryNoteSpace;
4814 JS_ARENA_GROW_CAST(cg->tryBase, JSTryNote *, &cx->tempPool, size, incr);
4815 if (!cg->tryBase)
4816 return JS_FALSE;
4817 cg->tryNoteSpace = size + incr;
4818 cg->tryNext = (JSTryNote *)((char *)cg->tryBase + delta);
4819 }
4820 return JS_TRUE;
4821 }
4822
4823 JSTryNote *
js_NewTryNote(JSContext * cx,JSCodeGenerator * cg,ptrdiff_t start,ptrdiff_t end,ptrdiff_t catchStart)4824 js_NewTryNote(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t start,
4825 ptrdiff_t end, ptrdiff_t catchStart)
4826 {
4827 JSTryNote *tn;
4828
4829 JS_ASSERT(cg->tryBase <= cg->tryNext);
4830 JS_ASSERT(catchStart >= 0);
4831 tn = cg->tryNext++;
4832 tn->start = start;
4833 tn->length = end - start;
4834 tn->catchStart = catchStart;
4835 return tn;
4836 }
4837
4838 void
js_FinishTakingTryNotes(JSContext * cx,JSCodeGenerator * cg,JSTryNote * notes)4839 js_FinishTakingTryNotes(JSContext *cx, JSCodeGenerator *cg, JSTryNote *notes)
4840 {
4841 uintN count;
4842
4843 count = PTRDIFF(cg->tryNext, cg->tryBase, JSTryNote);
4844 if (!count)
4845 return;
4846
4847 memcpy(notes, cg->tryBase, TRYNOTE_SIZE(count));
4848 notes[count].start = 0;
4849 notes[count].length = CG_OFFSET(cg);
4850 notes[count].catchStart = 0;
4851 }
4852