1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sw=4 et tw=78:
3  *
4  * ***** BEGIN LICENSE BLOCK *****
5  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6  *
7  * The contents of this file are subject to the Mozilla Public License Version
8  * 1.1 (the "License"); you may not use this file except in compliance with
9  * the License. You may obtain a copy of the License at
10  * http://www.mozilla.org/MPL/
11  *
12  * Software distributed under the License is distributed on an "AS IS" basis,
13  * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14  * for the specific language governing rights and limitations under the
15  * License.
16  *
17  * The Original Code is Mozilla Communicator client code, released
18  * March 31, 1998.
19  *
20  * The Initial Developer of the Original Code is
21  * Netscape Communications Corporation.
22  * Portions created by the Initial Developer are Copyright (C) 1998
23  * the Initial Developer. All Rights Reserved.
24  *
25  * Contributor(s):
26  *
27  * Alternatively, the contents of this file may be used under the terms of
28  * either of the GNU General Public License Version 2 or later (the "GPL"),
29  * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30  * in which case the provisions of the GPL or the LGPL are applicable instead
31  * of those above. If you wish to allow use of your version of this file only
32  * under the terms of either the GPL or the LGPL, and not to allow others to
33  * use your version of this file under the terms of the MPL, indicate your
34  * decision by deleting the provisions above and replace them with the notice
35  * and other provisions required by the GPL or the LGPL. If you do not delete
36  * the provisions above, a recipient may use your version of this file under
37  * the terms of any one of the MPL, the GPL or the LGPL.
38  *
39  * ***** END LICENSE BLOCK ***** */
40 
41 /*
42  * JS bytecode generation.
43  */
44 #include "jsstddef.h"
45 #ifdef HAVE_MEMORY_H
46 #include <memory.h>
47 #endif
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsarena.h" /* Added by JSIFY */
51 #include "jsutil.h" /* Added by JSIFY */
52 #include "jsbit.h"
53 #include "jsprf.h"
54 #include "jsapi.h"
55 #include "jsatom.h"
56 #include "jscntxt.h"
57 #include "jsconfig.h"
58 #include "jsemit.h"
59 #include "jsfun.h"
60 #include "jsnum.h"
61 #include "jsopcode.h"
62 #include "jsparse.h"
63 #include "jsregexp.h"
64 #include "jsscan.h"
65 #include "jsscope.h"
66 #include "jsscript.h"
67 
68 /* Allocation chunk counts, must be powers of two in general. */
69 #define BYTECODE_CHUNK  256     /* code allocation increment */
70 #define SRCNOTE_CHUNK   64      /* initial srcnote allocation increment */
71 #define TRYNOTE_CHUNK   64      /* trynote allocation increment */
72 
73 /* Macros to compute byte sizes from typed element counts. */
74 #define BYTECODE_SIZE(n)        ((n) * sizeof(jsbytecode))
75 #define SRCNOTE_SIZE(n)         ((n) * sizeof(jssrcnote))
76 #define TRYNOTE_SIZE(n)         ((n) * sizeof(JSTryNote))
77 
78 JS_FRIEND_API(JSBool)
js_InitCodeGenerator(JSContext * cx,JSCodeGenerator * cg,JSArenaPool * codePool,JSArenaPool * notePool,const char * filename,uintN lineno,JSPrincipals * principals)79 js_InitCodeGenerator(JSContext *cx, JSCodeGenerator *cg,
80                      JSArenaPool *codePool, JSArenaPool *notePool,
81                      const char *filename, uintN lineno,
82                      JSPrincipals *principals)
83 {
84     memset(cg, 0, sizeof *cg);
85     TREE_CONTEXT_INIT(&cg->treeContext);
86     cg->treeContext.flags |= TCF_COMPILING;
87     cg->codePool = codePool;
88     cg->notePool = notePool;
89     cg->codeMark = JS_ARENA_MARK(codePool);
90     cg->noteMark = JS_ARENA_MARK(notePool);
91     cg->tempMark = JS_ARENA_MARK(&cx->tempPool);
92     cg->current = &cg->main;
93     cg->filename = filename;
94     cg->firstLine = cg->prolog.currentLine = cg->main.currentLine = lineno;
95     cg->principals = principals;
96     ATOM_LIST_INIT(&cg->atomList);
97     cg->prolog.noteMask = cg->main.noteMask = SRCNOTE_CHUNK - 1;
98     ATOM_LIST_INIT(&cg->constList);
99     return JS_TRUE;
100 }
101 
102 JS_FRIEND_API(void)
js_FinishCodeGenerator(JSContext * cx,JSCodeGenerator * cg)103 js_FinishCodeGenerator(JSContext *cx, JSCodeGenerator *cg)
104 {
105     TREE_CONTEXT_FINISH(&cg->treeContext);
106     JS_ARENA_RELEASE(cg->codePool, cg->codeMark);
107     JS_ARENA_RELEASE(cg->notePool, cg->noteMark);
108     JS_ARENA_RELEASE(&cx->tempPool, cg->tempMark);
109 }
110 
111 static ptrdiff_t
EmitCheck(JSContext * cx,JSCodeGenerator * cg,JSOp op,ptrdiff_t delta)112 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
113 {
114     jsbytecode *base, *limit, *next;
115     ptrdiff_t offset, length;
116     size_t incr, size;
117 
118     base = CG_BASE(cg);
119     next = CG_NEXT(cg);
120     limit = CG_LIMIT(cg);
121     offset = PTRDIFF(next, base, jsbytecode);
122     if (next + delta > limit) {
123         length = offset + delta;
124         length = (length <= BYTECODE_CHUNK)
125                  ? BYTECODE_CHUNK
126                  : JS_BIT(JS_CeilingLog2(length));
127         incr = BYTECODE_SIZE(length);
128         if (!base) {
129             JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
130         } else {
131             size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
132             incr -= size;
133             JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
134         }
135         if (!base) {
136             JS_ReportOutOfMemory(cx);
137             return -1;
138         }
139         CG_BASE(cg) = base;
140         CG_LIMIT(cg) = base + length;
141         CG_NEXT(cg) = base + offset;
142     }
143     return offset;
144 }
145 
146 static void
UpdateDepth(JSContext * cx,JSCodeGenerator * cg,ptrdiff_t target)147 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
148 {
149     jsbytecode *pc;
150     const JSCodeSpec *cs;
151     intN nuses;
152 
153     pc = CG_CODE(cg, target);
154     cs = &js_CodeSpec[pc[0]];
155     nuses = cs->nuses;
156     if (nuses < 0)
157         nuses = 2 + GET_ARGC(pc);       /* stack: fun, this, [argc arguments] */
158     cg->stackDepth -= nuses;
159     JS_ASSERT(cg->stackDepth >= 0);
160     if (cg->stackDepth < 0) {
161         char numBuf[12];
162         JS_snprintf(numBuf, sizeof numBuf, "%d", target);
163         JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
164                                      js_GetErrorMessage, NULL,
165                                      JSMSG_STACK_UNDERFLOW,
166                                      cg->filename ? cg->filename : "stdin",
167                                      numBuf);
168     }
169     cg->stackDepth += cs->ndefs;
170     if ((uintN)cg->stackDepth > cg->maxStackDepth)
171         cg->maxStackDepth = cg->stackDepth;
172 }
173 
174 ptrdiff_t
js_Emit1(JSContext * cx,JSCodeGenerator * cg,JSOp op)175 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
176 {
177     ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
178 
179     if (offset >= 0) {
180         *CG_NEXT(cg)++ = (jsbytecode)op;
181         UpdateDepth(cx, cg, offset);
182     }
183     return offset;
184 }
185 
186 ptrdiff_t
js_Emit2(JSContext * cx,JSCodeGenerator * cg,JSOp op,jsbytecode op1)187 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
188 {
189     ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
190 
191     if (offset >= 0) {
192         jsbytecode *next = CG_NEXT(cg);
193         next[0] = (jsbytecode)op;
194         next[1] = op1;
195         CG_NEXT(cg) = next + 2;
196         UpdateDepth(cx, cg, offset);
197     }
198     return offset;
199 }
200 
201 ptrdiff_t
js_Emit3(JSContext * cx,JSCodeGenerator * cg,JSOp op,jsbytecode op1,jsbytecode op2)202 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
203          jsbytecode op2)
204 {
205     ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
206 
207     if (offset >= 0) {
208         jsbytecode *next = CG_NEXT(cg);
209         next[0] = (jsbytecode)op;
210         next[1] = op1;
211         next[2] = op2;
212         CG_NEXT(cg) = next + 3;
213         UpdateDepth(cx, cg, offset);
214     }
215     return offset;
216 }
217 
218 ptrdiff_t
js_EmitN(JSContext * cx,JSCodeGenerator * cg,JSOp op,size_t extra)219 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
220 {
221     ptrdiff_t length = 1 + (ptrdiff_t)extra;
222     ptrdiff_t offset = EmitCheck(cx, cg, op, length);
223 
224     if (offset >= 0) {
225         jsbytecode *next = CG_NEXT(cg);
226         *next = (jsbytecode)op;
227         memset(next + 1, 0, BYTECODE_SIZE(extra));
228         CG_NEXT(cg) = next + length;
229         UpdateDepth(cx, cg, offset);
230     }
231     return offset;
232 }
233 
234 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
235 const char js_with_statement_str[] = "with statement";
236 const char js_finally_block_str[]  = "finally block";
237 const char js_script_str[]         = "script";
238 
239 static const char *statementName[] = {
240     "label statement",       /* LABEL */
241     "if statement",          /* IF */
242     "else statement",        /* ELSE */
243     "switch statement",      /* SWITCH */
244     "block",                 /* BLOCK */
245     js_with_statement_str,   /* WITH */
246     "catch block",           /* CATCH */
247     "try block",             /* TRY */
248     js_finally_block_str,    /* FINALLY */
249     js_finally_block_str,    /* SUBROUTINE */
250     "do loop",               /* DO_LOOP */
251     "for loop",              /* FOR_LOOP */
252     "for/in loop",           /* FOR_IN_LOOP */
253     "while loop",            /* WHILE_LOOP */
254 };
255 
256 static const char *
StatementName(JSCodeGenerator * cg)257 StatementName(JSCodeGenerator *cg)
258 {
259     if (!cg->treeContext.topStmt)
260         return js_script_str;
261     return statementName[cg->treeContext.topStmt->type];
262 }
263 
264 static void
ReportStatementTooLarge(JSContext * cx,JSCodeGenerator * cg)265 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
266 {
267     JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
268                          StatementName(cg));
269 }
270 
271 /**
272   Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
273   and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
274   into unconditional (gotos and gosubs), and conditional jumps or branches
275   (which pop a value, test it, and jump depending on its value).  Most jumps
276   have just one immediate operand, a signed offset from the jump opcode's pc
277   to the target bytecode.  The lookup and table switch opcodes may contain
278   many jump offsets.
279 
280   Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
281   fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
282   suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
283   the extended form of the JSOP_OR branch opcode).  The unextended or short
284   formats have 16-bit signed immediate offset operands, the extended or long
285   formats have 32-bit signed immediates.  The span-dependency problem consists
286   of selecting as few long instructions as possible, or about as few -- since
287   jumps can span other jumps, extending one jump may cause another to need to
288   be extended.
289 
290   Most JS scripts are short, so need no extended jumps.  We optimize for this
291   case by generating short jumps until we know a long jump is needed.  After
292   that point, we keep generating short jumps, but each jump's 16-bit immediate
293   offset operand is actually an unsigned index into cg->spanDeps, an array of
294   JSSpanDep structs.  Each struct tells the top offset in the script of the
295   opcode, the "before" offset of the jump (which will be the same as top for
296   simplex jumps, but which will index further into the bytecode array for a
297   non-initial jump offset in a lookup or table switch), the after "offset"
298   adjusted during span-dependent instruction selection (initially the same
299   value as the "before" offset), and the jump target (more below).
300 
301   Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
302   ensure that all bytecode generated so far can be inspected to discover where
303   the jump offset immediate operands lie within CG_CODE(cg).  But the bonus is
304   that we generate span-dependency records sorted by their offsets, so we can
305   binary-search when trying to find a JSSpanDep for a given bytecode offset,
306   or the nearest JSSpanDep at or above a given pc.
307 
308   To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
309   65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand.  This
310   tells us that we need to binary-search for the cg->spanDeps entry by the
311   jump opcode's bytecode offset (sd->before).
312 
313   Jump targets need to be maintained in a data structure that lets us look
314   up an already-known target by its address (jumps may have a common target),
315   and that also lets us update the addresses (script-relative, a.k.a. absolute
316   offsets) of targets that come after a jump target (for when a jump below
317   that target needs to be extended).  We use an AVL tree, implemented using
318   recursion, but with some tricky optimizations to its height-balancing code
319   (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
320 
321   A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
322   positive sign, even though they link "backward" (i.e., toward lower bytecode
323   address).  We don't want to waste space and search time in the AVL tree for
324   such temporary backpatch deltas, so we use a single-bit wildcard scheme to
325   tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
326   in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
327   target, or is still awaiting backpatching.
328 
329   Note that backpatch chains would present a problem for BuildSpanDepTable,
330   which inspects bytecode to build cg->spanDeps on demand, when the first
331   short jump offset overflows.  To solve this temporary problem, we emit a
332   proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
333   nuses/ndefs counts help keep the stack balanced, but whose opcode format
334   distinguishes its backpatch delta immediate operand from a normal jump
335   offset.
336  */
337 static int
BalanceJumpTargets(JSJumpTarget ** jtp)338 BalanceJumpTargets(JSJumpTarget **jtp)
339 {
340     JSJumpTarget *jt, *jt2, *root;
341     int dir, otherDir, heightChanged;
342     JSBool doubleRotate;
343 
344     jt = *jtp;
345     JS_ASSERT(jt->balance != 0);
346 
347     if (jt->balance < -1) {
348         dir = JT_RIGHT;
349         doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
350     } else if (jt->balance > 1) {
351         dir = JT_LEFT;
352         doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
353     } else {
354         return 0;
355     }
356 
357     otherDir = JT_OTHER_DIR(dir);
358     if (doubleRotate) {
359         jt2 = jt->kids[otherDir];
360         *jtp = root = jt2->kids[dir];
361 
362         jt->kids[otherDir] = root->kids[dir];
363         root->kids[dir] = jt;
364 
365         jt2->kids[dir] = root->kids[otherDir];
366         root->kids[otherDir] = jt2;
367 
368         heightChanged = 1;
369         root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
370         root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
371         root->balance = 0;
372     } else {
373         *jtp = root = jt->kids[otherDir];
374         jt->kids[otherDir] = root->kids[dir];
375         root->kids[dir] = jt;
376 
377         heightChanged = (root->balance != 0);
378         jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
379     }
380 
381     return heightChanged;
382 }
383 
384 typedef struct AddJumpTargetArgs {
385     JSContext           *cx;
386     JSCodeGenerator     *cg;
387     ptrdiff_t           offset;
388     JSJumpTarget        *node;
389 } AddJumpTargetArgs;
390 
391 static int
AddJumpTarget(AddJumpTargetArgs * args,JSJumpTarget ** jtp)392 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
393 {
394     JSJumpTarget *jt;
395     int balanceDelta;
396 
397     jt = *jtp;
398     if (!jt) {
399         JSCodeGenerator *cg = args->cg;
400 
401         jt = cg->jtFreeList;
402         if (jt) {
403             cg->jtFreeList = jt->kids[JT_LEFT];
404         } else {
405             JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
406                                    sizeof *jt);
407             if (!jt) {
408                 JS_ReportOutOfMemory(args->cx);
409                 return 0;
410             }
411         }
412         jt->offset = args->offset;
413         jt->balance = 0;
414         jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
415         cg->numJumpTargets++;
416         args->node = jt;
417         *jtp = jt;
418         return 1;
419     }
420 
421     if (jt->offset == args->offset) {
422         args->node = jt;
423         return 0;
424     }
425 
426     if (args->offset < jt->offset)
427         balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
428     else
429         balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
430     if (!args->node)
431         return 0;
432 
433     jt->balance += balanceDelta;
434     return (balanceDelta && jt->balance)
435            ? 1 - BalanceJumpTargets(jtp)
436            : 0;
437 }
438 
439 #ifdef DEBUG_brendan
AVLCheck(JSJumpTarget * jt)440 static int AVLCheck(JSJumpTarget *jt)
441 {
442     int lh, rh;
443 
444     if (!jt) return 0;
445     JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
446     lh = AVLCheck(jt->kids[JT_LEFT]);
447     rh = AVLCheck(jt->kids[JT_RIGHT]);
448     JS_ASSERT(jt->balance == rh - lh);
449     return 1 + JS_MAX(lh, rh);
450 }
451 #endif
452 
453 static JSBool
SetSpanDepTarget(JSContext * cx,JSCodeGenerator * cg,JSSpanDep * sd,ptrdiff_t off)454 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
455                  ptrdiff_t off)
456 {
457     AddJumpTargetArgs args;
458 
459     if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
460         ReportStatementTooLarge(cx, cg);
461         return JS_FALSE;
462     }
463 
464     args.cx = cx;
465     args.cg = cg;
466     args.offset = sd->top + off;
467     args.node = NULL;
468     AddJumpTarget(&args, &cg->jumpTargets);
469     if (!args.node)
470         return JS_FALSE;
471 
472 #ifdef DEBUG_brendan
473     AVLCheck(cg->jumpTargets);
474 #endif
475 
476     SD_SET_TARGET(sd, args.node);
477     return JS_TRUE;
478 }
479 
480 #define SPANDEPS_MIN            256
481 #define SPANDEPS_SIZE(n)        ((n) * sizeof(JSSpanDep))
482 #define SPANDEPS_SIZE_MIN       SPANDEPS_SIZE(SPANDEPS_MIN)
483 
484 static JSBool
AddSpanDep(JSContext * cx,JSCodeGenerator * cg,jsbytecode * pc,jsbytecode * pc2,ptrdiff_t off)485 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
486            ptrdiff_t off)
487 {
488     uintN index;
489     JSSpanDep *sdbase, *sd;
490     size_t size;
491 
492     index = cg->numSpanDeps;
493     if (index + 1 == 0) {
494         ReportStatementTooLarge(cx, cg);
495         return JS_FALSE;
496     }
497 
498     if ((index & (index - 1)) == 0 &&
499         (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
500         if (!sdbase) {
501             size = SPANDEPS_SIZE_MIN;
502             JS_ARENA_ALLOCATE_CAST(sdbase, JSSpanDep *, &cx->tempPool, size);
503         } else {
504             size = SPANDEPS_SIZE(index);
505             JS_ARENA_GROW_CAST(sdbase, JSSpanDep *, &cx->tempPool, size, size);
506         }
507         if (!sdbase)
508             return JS_FALSE;
509         cg->spanDeps = sdbase;
510     }
511 
512     cg->numSpanDeps = index + 1;
513     sd = cg->spanDeps + index;
514     sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
515     sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode);
516 
517     if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
518         /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
519         if (off != 0) {
520             JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
521             if (off > BPDELTA_MAX) {
522                 ReportStatementTooLarge(cx, cg);
523                 return JS_FALSE;
524             }
525         }
526         SD_SET_BPDELTA(sd, off);
527     } else if (off == 0) {
528         /* Jump offset will be patched directly, without backpatch chaining. */
529         SD_SET_TARGET(sd, NULL);
530     } else {
531         /* The jump offset in off is non-zero, therefore it's already known. */
532         if (!SetSpanDepTarget(cx, cg, sd, off))
533             return JS_FALSE;
534     }
535 
536     if (index > SPANDEP_INDEX_MAX)
537         index = SPANDEP_INDEX_HUGE;
538     SET_SPANDEP_INDEX(pc2, index);
539     return JS_TRUE;
540 }
541 
542 static JSBool
BuildSpanDepTable(JSContext * cx,JSCodeGenerator * cg)543 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
544 {
545     jsbytecode *pc, *end;
546     JSOp op;
547     const JSCodeSpec *cs;
548     ptrdiff_t len, off;
549 
550     pc = CG_BASE(cg) + cg->spanDepTodo;
551     end = CG_NEXT(cg);
552     while (pc < end) {
553         op = (JSOp)*pc;
554         cs = &js_CodeSpec[op];
555         len = (ptrdiff_t)cs->length;
556 
557         switch (cs->format & JOF_TYPEMASK) {
558           case JOF_JUMP:
559             off = GET_JUMP_OFFSET(pc);
560             if (!AddSpanDep(cx, cg, pc, pc, off))
561                 return JS_FALSE;
562             break;
563 
564           case JOF_TABLESWITCH:
565           {
566             jsbytecode *pc2;
567             jsint i, low, high;
568 
569             pc2 = pc;
570             off = GET_JUMP_OFFSET(pc2);
571             if (!AddSpanDep(cx, cg, pc, pc2, off))
572                 return JS_FALSE;
573             pc2 += JUMP_OFFSET_LEN;
574             low = GET_JUMP_OFFSET(pc2);
575             pc2 += JUMP_OFFSET_LEN;
576             high = GET_JUMP_OFFSET(pc2);
577             pc2 += JUMP_OFFSET_LEN;
578             for (i = low; i <= high; i++) {
579                 off = GET_JUMP_OFFSET(pc2);
580                 if (!AddSpanDep(cx, cg, pc, pc2, off))
581                     return JS_FALSE;
582                 pc2 += JUMP_OFFSET_LEN;
583             }
584             len = 1 + pc2 - pc;
585             break;
586           }
587 
588           case JOF_LOOKUPSWITCH:
589           {
590             jsbytecode *pc2;
591             jsint npairs;
592 
593             pc2 = pc;
594             off = GET_JUMP_OFFSET(pc2);
595             if (!AddSpanDep(cx, cg, pc, pc2, off))
596                 return JS_FALSE;
597             pc2 += JUMP_OFFSET_LEN;
598             npairs = (jsint) GET_ATOM_INDEX(pc2);
599             pc2 += ATOM_INDEX_LEN;
600             while (npairs) {
601                 pc2 += ATOM_INDEX_LEN;
602                 off = GET_JUMP_OFFSET(pc2);
603                 if (!AddSpanDep(cx, cg, pc, pc2, off))
604                     return JS_FALSE;
605                 pc2 += JUMP_OFFSET_LEN;
606                 npairs--;
607             }
608             len = 1 + pc2 - pc;
609             break;
610           }
611         }
612 
613         JS_ASSERT(len > 0);
614         pc += len;
615     }
616 
617     return JS_TRUE;
618 }
619 
620 static JSSpanDep *
GetSpanDep(JSCodeGenerator * cg,jsbytecode * pc)621 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
622 {
623     uintN index;
624     ptrdiff_t offset;
625     int lo, hi, mid;
626     JSSpanDep *sd;
627 
628     index = GET_SPANDEP_INDEX(pc);
629     if (index != SPANDEP_INDEX_HUGE)
630         return cg->spanDeps + index;
631 
632     offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
633     lo = 0;
634     hi = cg->numSpanDeps - 1;
635     while (lo <= hi) {
636         mid = (lo + hi) / 2;
637         sd = cg->spanDeps + mid;
638         if (sd->before == offset)
639             return sd;
640         if (sd->before < offset)
641             lo = mid + 1;
642         else
643             hi = mid - 1;
644     }
645 
646     JS_ASSERT(0);
647     return NULL;
648 }
649 
650 static JSBool
SetBackPatchDelta(JSContext * cx,JSCodeGenerator * cg,jsbytecode * pc,ptrdiff_t delta)651 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
652                   ptrdiff_t delta)
653 {
654     JSSpanDep *sd;
655 
656     JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
657     if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
658         SET_JUMP_OFFSET(pc, delta);
659         return JS_TRUE;
660     }
661 
662     if (delta > BPDELTA_MAX) {
663         ReportStatementTooLarge(cx, cg);
664         return JS_FALSE;
665     }
666 
667     if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
668         return JS_FALSE;
669 
670     sd = GetSpanDep(cg, pc);
671     JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
672     SD_SET_BPDELTA(sd, delta);
673     return JS_TRUE;
674 }
675 
676 static void
UpdateJumpTargets(JSJumpTarget * jt,ptrdiff_t pivot,ptrdiff_t delta)677 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
678 {
679     if (jt->offset > pivot) {
680         jt->offset += delta;
681         if (jt->kids[JT_LEFT])
682             UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
683     }
684     if (jt->kids[JT_RIGHT])
685         UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
686 }
687 
688 static JSSpanDep *
FindNearestSpanDep(JSCodeGenerator * cg,ptrdiff_t offset,int lo,JSSpanDep * guard)689 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
690                    JSSpanDep *guard)
691 {
692     int num, hi, mid;
693     JSSpanDep *sdbase, *sd;
694 
695     num = cg->numSpanDeps;
696     JS_ASSERT(num > 0);
697     hi = num - 1;
698     sdbase = cg->spanDeps;
699     while (lo <= hi) {
700         mid = (lo + hi) / 2;
701         sd = sdbase + mid;
702         if (sd->before == offset)
703             return sd;
704         if (sd->before < offset)
705             lo = mid + 1;
706         else
707             hi = mid - 1;
708     }
709     if (lo == num)
710         return guard;
711     sd = sdbase + lo;
712     JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
713     return sd;
714 }
715 
716 static void
FreeJumpTargets(JSCodeGenerator * cg,JSJumpTarget * jt)717 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
718 {
719     if (jt->kids[JT_LEFT])
720         FreeJumpTargets(cg, jt->kids[JT_LEFT]);
721     if (jt->kids[JT_RIGHT])
722         FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
723     jt->kids[JT_LEFT] = cg->jtFreeList;
724     cg->jtFreeList = jt;
725 }
726 
727 static JSBool
OptimizeSpanDeps(JSContext * cx,JSCodeGenerator * cg)728 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
729 {
730     jsbytecode *pc, *oldpc, *base, *limit, *next;
731     JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
732     ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
733     JSBool done;
734     JSOp op;
735     uint32 type;
736     size_t size, incr;
737     jssrcnote *sn, *snlimit;
738     JSSrcNoteSpec *spec;
739     uintN i, n, noteIndex;
740     JSTryNote *tn, *tnlimit;
741 #ifdef DEBUG_brendan
742     int passes = 0;
743 #endif
744 
745     base = CG_BASE(cg);
746     sdbase = cg->spanDeps;
747     sdlimit = sdbase + cg->numSpanDeps;
748     offset = CG_OFFSET(cg);
749     growth = 0;
750 
751     do {
752         done = JS_TRUE;
753         delta = 0;
754         top = pivot = -1;
755         sdtop = NULL;
756         pc = NULL;
757         op = JSOP_NOP;
758         type = 0;
759 #ifdef DEBUG_brendan
760         passes++;
761 #endif
762 
763         for (sd = sdbase; sd < sdlimit; sd++) {
764             JS_ASSERT(JT_HAS_TAG(sd->target));
765             sd->offset += delta;
766 
767             if (sd->top != top) {
768                 sdtop = sd;
769                 top = sd->top;
770                 JS_ASSERT(top == sd->before);
771                 pivot = sd->offset;
772                 pc = base + top;
773                 op = (JSOp) *pc;
774                 type = (js_CodeSpec[op].format & JOF_TYPEMASK);
775                 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
776                     /*
777                      * We already extended all the jump offset operands for
778                      * the opcode at sd->top.  Jumps and branches have only
779                      * one jump offset operand, but switches have many, all
780                      * of which are adjacent in cg->spanDeps.
781                      */
782                     continue;
783                 }
784 
785                 JS_ASSERT(type == JOF_JUMP ||
786                           type == JOF_TABLESWITCH ||
787                           type == JOF_LOOKUPSWITCH);
788             }
789 
790             if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
791                 span = SD_SPAN(sd, pivot);
792                 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
793                     ptrdiff_t deltaFromTop = 0;
794 
795                     done = JS_FALSE;
796 
797                     switch (op) {
798                       case JSOP_GOTO:         op = JSOP_GOTOX; break;
799                       case JSOP_IFEQ:         op = JSOP_IFEQX; break;
800                       case JSOP_IFNE:         op = JSOP_IFNEX; break;
801                       case JSOP_OR:           op = JSOP_ORX; break;
802                       case JSOP_AND:          op = JSOP_ANDX; break;
803                       case JSOP_GOSUB:        op = JSOP_GOSUBX; break;
804                       case JSOP_CASE:         op = JSOP_CASEX; break;
805                       case JSOP_DEFAULT:      op = JSOP_DEFAULTX; break;
806                       case JSOP_TABLESWITCH:  op = JSOP_TABLESWITCHX; break;
807                       case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
808                       default:
809                         ReportStatementTooLarge(cx, cg);
810                         return JS_FALSE;
811                     }
812                     *pc = (jsbytecode) op;
813 
814                     for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
815                         if (sd2 <= sd) {
816                             /*
817                              * sd2->offset already includes delta as it stood
818                              * before we entered this loop, but it must also
819                              * include the delta relative to top due to all the
820                              * extended jump offset immediates for the opcode
821                              * starting at top, which we extend in this loop.
822                              *
823                              * If there is only one extended jump offset, then
824                              * sd2->offset won't change and this for loop will
825                              * iterate once only.
826                              */
827                             sd2->offset += deltaFromTop;
828                             deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
829                         } else {
830                             /*
831                              * sd2 comes after sd, and won't be revisited by
832                              * the outer for loop, so we have to increase its
833                              * offset by delta, not merely by deltaFromTop.
834                              */
835                             sd2->offset += delta;
836                         }
837 
838                         delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
839                         UpdateJumpTargets(cg->jumpTargets, sd2->offset,
840                                           JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
841                     }
842                     sd = sd2 - 1;
843                 }
844             }
845         }
846 
847         growth += delta;
848     } while (!done);
849 
850     if (growth) {
851 #ifdef DEBUG_brendan
852         printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
853                cg->filename ? cg->filename : "stdin", cg->firstLine,
854                growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
855                passes, offset + growth, offset, growth);
856 #endif
857 
858         /*
859          * Ensure that we have room for the extended jumps, but don't round up
860          * to a power of two -- we're done generating code, so we cut to fit.
861          */
862         limit = CG_LIMIT(cg);
863         length = offset + growth;
864         next = base + length;
865         if (next > limit) {
866             JS_ASSERT(length > BYTECODE_CHUNK);
867             size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
868             incr = BYTECODE_SIZE(length) - size;
869             JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
870             if (!base) {
871                 JS_ReportOutOfMemory(cx);
872                 return JS_FALSE;
873             }
874             CG_BASE(cg) = base;
875             CG_LIMIT(cg) = next = base + length;
876         }
877         CG_NEXT(cg) = next;
878 
879         /*
880          * Set up a fake span dependency record to guard the end of the code
881          * being generated.  This guard record is returned as a fencepost by
882          * FindNearestSpanDep if there is no real spandep at or above a given
883          * unextended code offset.
884          */
885         guard.top = -1;
886         guard.offset = offset + growth;
887         guard.before = offset;
888         guard.target = NULL;
889     }
890 
891     /*
892      * Now work backwards through the span dependencies, copying chunks of
893      * bytecode between each extended jump toward the end of the grown code
894      * space, and restoring immediate offset operands for all jump bytecodes.
895      * The first chunk of bytecodes, starting at base and ending at the first
896      * extended jump offset (NB: this chunk includes the operation bytecode
897      * just before that immediate jump offset), doesn't need to be copied.
898      */
899     JS_ASSERT(sd == sdlimit);
900     top = -1;
901     while (--sd >= sdbase) {
902         if (sd->top != top) {
903             top = sd->top;
904             op = (JSOp) base[top];
905             type = (js_CodeSpec[op].format & JOF_TYPEMASK);
906 
907             for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
908                 continue;
909             sd2++;
910             pivot = sd2->offset;
911             JS_ASSERT(top == sd2->before);
912         }
913 
914         oldpc = base + sd->before;
915         span = SD_SPAN(sd, pivot);
916 
917         /*
918          * If this jump didn't need to be extended, restore its span immediate
919          * offset operand now, overwriting the index of sd within cg->spanDeps
920          * that was stored temporarily after *pc when BuildSpanDepTable ran.
921          *
922          * Note that span might fit in 16 bits even for an extended jump op,
923          * if the op has multiple span operands, not all of which overflowed
924          * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
925          * range for a short jump, but others are not).
926          */
927         if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
928             JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
929             SET_JUMP_OFFSET(oldpc, span);
930             continue;
931         }
932 
933         /*
934          * Set up parameters needed to copy the next run of bytecode starting
935          * at offset (which is a cursor into the unextended, original bytecode
936          * vector), down to sd->before (a cursor of the same scale as offset,
937          * it's the index of the original jump pc).  Reuse delta to count the
938          * nominal number of bytes to copy.
939          */
940         pc = base + sd->offset;
941         delta = offset - sd->before;
942         JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
943 
944         /*
945          * Don't bother copying the jump offset we're about to reset, but do
946          * copy the bytecode at oldpc (which comes just before its immediate
947          * jump offset operand), on the next iteration through the loop, by
948          * including it in offset's new value.
949          */
950         offset = sd->before + 1;
951         size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
952         if (size) {
953             memmove(pc + 1 + JUMPX_OFFSET_LEN,
954                     oldpc + 1 + JUMP_OFFSET_LEN,
955                     size);
956         }
957 
958         SET_JUMPX_OFFSET(pc, span);
959     }
960 
961     if (growth) {
962         /*
963          * Fix source note deltas.  Don't hardwire the delta fixup adjustment,
964          * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
965          * at each sd that moved.  The future may bring different offset sizes
966          * for span-dependent instruction operands.  However, we fix only main
967          * notes here, not prolog notes -- we know that prolog opcodes are not
968          * span-dependent, and aren't likely ever to be.
969          */
970         offset = growth = 0;
971         sd = sdbase;
972         for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
973              sn < snlimit;
974              sn = SN_NEXT(sn)) {
975             /*
976              * Recall that the offset of a given note includes its delta, and
977              * tells the offset of the annotated bytecode from the main entry
978              * point of the script.
979              */
980             offset += SN_DELTA(sn);
981             while (sd < sdlimit && sd->before < offset) {
982                 /*
983                  * To compute the delta to add to sn, we need to look at the
984                  * spandep after sd, whose offset - (before + growth) tells by
985                  * how many bytes sd's instruction grew.
986                  */
987                 sd2 = sd + 1;
988                 if (sd2 == sdlimit)
989                     sd2 = &guard;
990                 delta = sd2->offset - (sd2->before + growth);
991                 if (delta > 0) {
992                     JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
993                     sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
994                     if (!sn)
995                         return JS_FALSE;
996                     snlimit = cg->main.notes + cg->main.noteCount;
997                     growth += delta;
998                 }
999                 sd++;
1000             }
1001 
1002             /*
1003              * If sn has span-dependent offset operands, check whether each
1004              * covers further span-dependencies, and increase those operands
1005              * accordingly.  Some source notes measure offset not from the
1006              * annotated pc, but from that pc plus some small bias.  NB: we
1007              * assume that spec->offsetBias can't itself span span-dependent
1008              * instructions!
1009              */
1010             spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1011             if (spec->isSpanDep) {
1012                 pivot = offset + spec->offsetBias;
1013                 n = spec->arity;
1014                 for (i = 0; i < n; i++) {
1015                     span = js_GetSrcNoteOffset(sn, i);
1016                     if (span == 0)
1017                         continue;
1018                     target = pivot + span * spec->isSpanDep;
1019                     sd2 = FindNearestSpanDep(cg, target,
1020                                              (target >= pivot)
1021                                              ? sd - sdbase
1022                                              : 0,
1023                                              &guard);
1024 
1025                     /*
1026                      * Increase target by sd2's before-vs-after offset delta,
1027                      * which is absolute (i.e., relative to start of script,
1028                      * as is target).  Recompute the span by subtracting its
1029                      * adjusted pivot from target.
1030                      */
1031                     target += sd2->offset - sd2->before;
1032                     span = target - (pivot + growth);
1033                     span *= spec->isSpanDep;
1034                     noteIndex = sn - cg->main.notes;
1035                     if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1036                         return JS_FALSE;
1037                     sn = cg->main.notes + noteIndex;
1038                     snlimit = cg->main.notes + cg->main.noteCount;
1039                 }
1040             }
1041         }
1042         cg->main.lastNoteOffset += growth;
1043 
1044         /*
1045          * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1046          * not clear how we can beat that).
1047          */
1048         for (tn = cg->tryBase, tnlimit = cg->tryNext; tn < tnlimit; tn++) {
1049             /*
1050              * First, look for the nearest span dependency at/above tn->start.
1051              * There may not be any such spandep, in which case the guard will
1052              * be returned.
1053              */
1054             offset = tn->start;
1055             sd = FindNearestSpanDep(cg, offset, 0, &guard);
1056             delta = sd->offset - sd->before;
1057             tn->start = offset + delta;
1058 
1059             /*
1060              * Next, find the nearest spandep at/above tn->start + tn->length.
1061              * Use its delta minus tn->start's delta to increase tn->length.
1062              */
1063             length = tn->length;
1064             sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1065             if (sd2 != sd)
1066                 tn->length = length + sd2->offset - sd2->before - delta;
1067 
1068             /*
1069              * Finally, adjust tn->catchStart upward only if it is non-zero,
1070              * and provided there are spandeps below it that grew.
1071              */
1072             offset = tn->catchStart;
1073             if (offset != 0) {
1074                 sd = FindNearestSpanDep(cg, offset, sd2 - sdbase, &guard);
1075                 tn->catchStart = offset + sd->offset - sd->before;
1076             }
1077         }
1078     }
1079 
1080 #ifdef DEBUG_brendan
1081   {
1082     uintN bigspans = 0;
1083     top = -1;
1084     for (sd = sdbase; sd < sdlimit; sd++) {
1085         offset = sd->offset;
1086 
1087         /* NB: sd->top cursors into the original, unextended bytecode vector. */
1088         if (sd->top != top) {
1089             JS_ASSERT(top == -1 ||
1090                       !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1091                       bigspans != 0);
1092             bigspans = 0;
1093             top = sd->top;
1094             JS_ASSERT(top == sd->before);
1095             op = (JSOp) base[offset];
1096             type = (js_CodeSpec[op].format & JOF_TYPEMASK);
1097             JS_ASSERT(type == JOF_JUMP ||
1098                       type == JOF_JUMPX ||
1099                       type == JOF_TABLESWITCH ||
1100                       type == JOF_TABLESWITCHX ||
1101                       type == JOF_LOOKUPSWITCH ||
1102                       type == JOF_LOOKUPSWITCHX);
1103             pivot = offset;
1104         }
1105 
1106         pc = base + offset;
1107         if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1108             span = GET_JUMPX_OFFSET(pc);
1109             if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1110                 bigspans++;
1111             } else {
1112                 JS_ASSERT(type == JOF_TABLESWITCHX ||
1113                           type == JOF_LOOKUPSWITCHX);
1114             }
1115         } else {
1116             span = GET_JUMP_OFFSET(pc);
1117         }
1118         JS_ASSERT(SD_SPAN(sd, pivot) == span);
1119     }
1120     JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1121   }
1122 #endif
1123 
1124     /*
1125      * Reset so we optimize at most once -- cg may be used for further code
1126      * generation of successive, independent, top-level statements.  No jump
1127      * can span top-level statements, because JS lacks goto.
1128      */
1129     size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1130     JS_ArenaFreeAllocation(&cx->tempPool, cg->spanDeps,
1131                            JS_MAX(size, SPANDEPS_SIZE_MIN));
1132     cg->spanDeps = NULL;
1133     FreeJumpTargets(cg, cg->jumpTargets);
1134     cg->jumpTargets = NULL;
1135     cg->numSpanDeps = cg->numJumpTargets = 0;
1136     cg->spanDepTodo = CG_OFFSET(cg);
1137     return JS_TRUE;
1138 }
1139 
1140 static JSBool
EmitJump(JSContext * cx,JSCodeGenerator * cg,JSOp op,ptrdiff_t off)1141 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1142 {
1143     JSBool extend;
1144     ptrdiff_t jmp;
1145     jsbytecode *pc;
1146 
1147     extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1148     if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1149         return JS_FALSE;
1150 
1151     jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1152     if (jmp >= 0 && (extend || cg->spanDeps)) {
1153         pc = CG_CODE(cg, jmp);
1154         if (!AddSpanDep(cx, cg, pc, pc, off))
1155             return JS_FALSE;
1156     }
1157     return jmp;
1158 }
1159 
1160 static ptrdiff_t
GetJumpOffset(JSCodeGenerator * cg,jsbytecode * pc)1161 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1162 {
1163     JSSpanDep *sd;
1164     JSJumpTarget *jt;
1165     ptrdiff_t top;
1166 
1167     if (!cg->spanDeps)
1168         return GET_JUMP_OFFSET(pc);
1169 
1170     sd = GetSpanDep(cg, pc);
1171     jt = sd->target;
1172     if (!JT_HAS_TAG(jt))
1173         return JT_TO_BPDELTA(jt);
1174 
1175     top = sd->top;
1176     while (--sd >= cg->spanDeps && sd->top == top)
1177         continue;
1178     sd++;
1179     return JT_CLR_TAG(jt)->offset - sd->offset;
1180 }
1181 
1182 JSBool
js_SetJumpOffset(JSContext * cx,JSCodeGenerator * cg,jsbytecode * pc,ptrdiff_t off)1183 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1184                  ptrdiff_t off)
1185 {
1186     if (!cg->spanDeps) {
1187         if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1188             SET_JUMP_OFFSET(pc, off);
1189             return JS_TRUE;
1190         }
1191 
1192         if (!BuildSpanDepTable(cx, cg))
1193             return JS_FALSE;
1194     }
1195 
1196     return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1197 }
1198 
1199 JSBool
js_InStatement(JSTreeContext * tc,JSStmtType type)1200 js_InStatement(JSTreeContext *tc, JSStmtType type)
1201 {
1202     JSStmtInfo *stmt;
1203 
1204     for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1205         if (stmt->type == type)
1206             return JS_TRUE;
1207     }
1208     return JS_FALSE;
1209 }
1210 
1211 JSBool
js_IsGlobalReference(JSTreeContext * tc,JSAtom * atom,JSBool * loopyp)1212 js_IsGlobalReference(JSTreeContext *tc, JSAtom *atom, JSBool *loopyp)
1213 {
1214     JSStmtInfo *stmt;
1215     JSObject *obj;
1216     JSScope *scope;
1217 
1218     *loopyp = JS_FALSE;
1219     for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1220         if (stmt->type == STMT_WITH)
1221             return JS_FALSE;
1222         if (STMT_IS_LOOP(stmt)) {
1223             *loopyp = JS_TRUE;
1224             continue;
1225         }
1226         if (stmt->flags & SIF_SCOPE) {
1227             obj = ATOM_TO_OBJECT(stmt->atom);
1228             JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1229             scope = OBJ_SCOPE(obj);
1230             if (SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom)))
1231                 return JS_FALSE;
1232         }
1233     }
1234     return JS_TRUE;
1235 }
1236 
1237 void
js_PushStatement(JSTreeContext * tc,JSStmtInfo * stmt,JSStmtType type,ptrdiff_t top)1238 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1239                  ptrdiff_t top)
1240 {
1241     stmt->type = type;
1242     stmt->flags = 0;
1243     SET_STATEMENT_TOP(stmt, top);
1244     stmt->atom = NULL;
1245     stmt->down = tc->topStmt;
1246     tc->topStmt = stmt;
1247     if (STMT_LINKS_SCOPE(stmt)) {
1248         stmt->downScope = tc->topScopeStmt;
1249         tc->topScopeStmt = stmt;
1250     } else {
1251         stmt->downScope = NULL;
1252     }
1253 }
1254 
1255 void
js_PushBlockScope(JSTreeContext * tc,JSStmtInfo * stmt,JSAtom * blockAtom,ptrdiff_t top)1256 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSAtom *blockAtom,
1257                   ptrdiff_t top)
1258 {
1259     JSObject *blockObj;
1260 
1261     js_PushStatement(tc, stmt, STMT_BLOCK, top);
1262     stmt->flags |= SIF_SCOPE;
1263     blockObj = ATOM_TO_OBJECT(blockAtom);
1264     blockObj->slots[JSSLOT_PARENT] = OBJECT_TO_JSVAL(tc->blockChain);
1265     stmt->downScope = tc->topScopeStmt;
1266     tc->topScopeStmt = stmt;
1267     tc->blockChain = blockObj;
1268     stmt->atom = blockAtom;
1269 }
1270 
1271 /*
1272  * Emit a backpatch op with offset pointing to the previous jump of this type,
1273  * so that we can walk back up the chain fixing up the op and jump offset.
1274  */
1275 static ptrdiff_t
EmitBackPatchOp(JSContext * cx,JSCodeGenerator * cg,JSOp op,ptrdiff_t * lastp)1276 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1277 {
1278     ptrdiff_t offset, delta;
1279 
1280     offset = CG_OFFSET(cg);
1281     delta = offset - *lastp;
1282     *lastp = offset;
1283     JS_ASSERT(delta > 0);
1284     return EmitJump(cx, cg, op, delta);
1285 }
1286 
1287 /*
1288  * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1289  * big-endian order, used for arg and var numbers as well as for atomIndexes.
1290  * NB: We use cx and cg from our caller's lexical environment, and return
1291  * false on error.
1292  */
1293 #define EMIT_UINT16_IMM_OP(op, i)                                             \
1294     JS_BEGIN_MACRO                                                            \
1295         if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0)             \
1296             return JS_FALSE;                                                  \
1297     JS_END_MACRO
1298 
1299 /* Emit additional bytecode(s) for non-local jumps. */
1300 static JSBool
EmitNonLocalJumpFixup(JSContext * cx,JSCodeGenerator * cg,JSStmtInfo * toStmt,JSOp * returnop)1301 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1302                       JSOp *returnop)
1303 {
1304     intN depth;
1305     JSStmtInfo *stmt;
1306     ptrdiff_t jmp;
1307 
1308     /*
1309      * Return from within a try block that has a finally clause must be split
1310      * into two ops: JSOP_SETRVAL, to pop the r.v. and store it in fp->rval;
1311      * and JSOP_RETRVAL, which makes control flow go back to the caller, who
1312      * picks up fp->rval as usual.  Otherwise, the stack will be unbalanced
1313      * when executing the finally clause.
1314      *
1315      * We mutate *returnop once only if we find an enclosing try-block (viz,
1316      * STMT_FINALLY) to ensure that we emit just one JSOP_SETRVAL before one
1317      * or more JSOP_GOSUBs and other fixup opcodes emitted by this function.
1318      * Our caller (the TOK_RETURN case of js_EmitTree) then emits *returnop.
1319      * The fixup opcodes and gosubs must interleave in the proper order, from
1320      * inner statement to outer, so that finally clauses run at the correct
1321      * stack depth.
1322      */
1323     if (returnop) {
1324         JS_ASSERT(*returnop == JSOP_RETURN);
1325         for (stmt = cg->treeContext.topStmt; stmt != toStmt;
1326              stmt = stmt->down) {
1327             if (stmt->type == STMT_FINALLY ||
1328                 ((cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT) &&
1329                  STMT_MAYBE_SCOPE(stmt))) {
1330                 if (js_Emit1(cx, cg, JSOP_SETRVAL) < 0)
1331                     return JS_FALSE;
1332                 *returnop = JSOP_RETRVAL;
1333                 break;
1334             }
1335         }
1336 
1337         /*
1338          * If there are no try-with-finally blocks open around this return
1339          * statement, we can generate a return forthwith and skip generating
1340          * any fixup code.
1341          */
1342         if (*returnop == JSOP_RETURN)
1343             return JS_TRUE;
1344     }
1345 
1346     /*
1347      * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1348      * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1349      * end of a with statement, so we save cg->stackDepth here and restore it
1350      * just before a successful return.
1351      */
1352     depth = cg->stackDepth;
1353     for (stmt = cg->treeContext.topStmt; stmt != toStmt; stmt = stmt->down) {
1354         switch (stmt->type) {
1355           case STMT_FINALLY:
1356             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1357                 return JS_FALSE;
1358             jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt));
1359             if (jmp < 0)
1360                 return JS_FALSE;
1361             break;
1362 
1363           case STMT_WITH:
1364             /* There's a With object on the stack that we need to pop. */
1365             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1366                 return JS_FALSE;
1367             if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1368                 return JS_FALSE;
1369             break;
1370 
1371           case STMT_FOR_IN_LOOP:
1372             /*
1373              * The iterator and the object being iterated need to be popped.
1374              */
1375             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1376                 return JS_FALSE;
1377             if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1378                 return JS_FALSE;
1379             break;
1380 
1381           case STMT_SUBROUTINE:
1382             /*
1383              * There's a [exception or hole, retsub pc-index] pair on the
1384              * stack that we need to pop.
1385              */
1386             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1387                 return JS_FALSE;
1388             if (js_Emit1(cx, cg, JSOP_POP2) < 0)
1389                 return JS_FALSE;
1390             break;
1391 
1392           default:;
1393         }
1394 
1395         if (stmt->flags & SIF_SCOPE) {
1396             uintN i;
1397 
1398             /* There is a Block object with locals on the stack to pop. */
1399             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1400                 return JS_FALSE;
1401             i = OBJ_BLOCK_COUNT(cx, ATOM_TO_OBJECT(stmt->atom));
1402             EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1403         }
1404     }
1405 
1406     cg->stackDepth = depth;
1407     return JS_TRUE;
1408 }
1409 
1410 static ptrdiff_t
EmitGoto(JSContext * cx,JSCodeGenerator * cg,JSStmtInfo * toStmt,ptrdiff_t * lastp,JSAtomListElement * label,JSSrcNoteType noteType)1411 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1412          ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1413 {
1414     intN index;
1415 
1416     if (!EmitNonLocalJumpFixup(cx, cg, toStmt, NULL))
1417         return -1;
1418 
1419     if (label)
1420         index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1421     else if (noteType != SRC_NULL)
1422         index = js_NewSrcNote(cx, cg, noteType);
1423     else
1424         index = 0;
1425     if (index < 0)
1426         return -1;
1427 
1428     return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1429 }
1430 
1431 static JSBool
BackPatch(JSContext * cx,JSCodeGenerator * cg,ptrdiff_t last,jsbytecode * target,jsbytecode op)1432 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1433           jsbytecode *target, jsbytecode op)
1434 {
1435     jsbytecode *pc, *stop;
1436     ptrdiff_t delta, span;
1437 
1438     pc = CG_CODE(cg, last);
1439     stop = CG_CODE(cg, -1);
1440     while (pc != stop) {
1441         delta = GetJumpOffset(cg, pc);
1442         span = PTRDIFF(target, pc, jsbytecode);
1443         CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1444 
1445         /*
1446          * Set *pc after jump offset in case bpdelta didn't overflow, but span
1447          * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1448          * and need to see the JSOP_BACKPATCH* op at *pc).
1449          */
1450         *pc = op;
1451         pc -= delta;
1452     }
1453     return JS_TRUE;
1454 }
1455 
1456 void
js_PopStatement(JSTreeContext * tc)1457 js_PopStatement(JSTreeContext *tc)
1458 {
1459     JSStmtInfo *stmt;
1460     JSObject *blockObj;
1461 
1462     stmt = tc->topStmt;
1463     tc->topStmt = stmt->down;
1464     if (STMT_LINKS_SCOPE(stmt)) {
1465         tc->topScopeStmt = stmt->downScope;
1466         if (stmt->flags & SIF_SCOPE) {
1467             blockObj = ATOM_TO_OBJECT(stmt->atom);
1468             tc->blockChain = JSVAL_TO_OBJECT(blockObj->slots[JSSLOT_PARENT]);
1469         }
1470     }
1471 }
1472 
1473 JSBool
js_PopStatementCG(JSContext * cx,JSCodeGenerator * cg)1474 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1475 {
1476     JSStmtInfo *stmt;
1477 
1478     stmt = cg->treeContext.topStmt;
1479     if (!STMT_IS_TRYING(stmt) &&
1480         (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1481          !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1482                     JSOP_GOTO))) {
1483         return JS_FALSE;
1484     }
1485     js_PopStatement(&cg->treeContext);
1486     return JS_TRUE;
1487 }
1488 
1489 JSBool
js_DefineCompileTimeConstant(JSContext * cx,JSCodeGenerator * cg,JSAtom * atom,JSParseNode * pn)1490 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1491                              JSParseNode *pn)
1492 {
1493     jsdouble dval;
1494     jsint ival;
1495     JSAtom *valueAtom;
1496     JSAtomListElement *ale;
1497 
1498     /* XXX just do numbers for now */
1499     if (pn->pn_type == TOK_NUMBER) {
1500         dval = pn->pn_dval;
1501         valueAtom = (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival))
1502                     ? js_AtomizeInt(cx, ival, 0)
1503                     : js_AtomizeDouble(cx, dval, 0);
1504         if (!valueAtom)
1505             return JS_FALSE;
1506         ale = js_IndexAtom(cx, atom, &cg->constList);
1507         if (!ale)
1508             return JS_FALSE;
1509         ALE_SET_VALUE(ale, ATOM_KEY(valueAtom));
1510     }
1511     return JS_TRUE;
1512 }
1513 
1514 JSStmtInfo *
js_LexicalLookup(JSTreeContext * tc,JSAtom * atom,jsint * slotp,JSBool letdecl)1515 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSBool letdecl)
1516 {
1517     JSStmtInfo *stmt;
1518     JSObject *obj;
1519     JSScope *scope;
1520     JSScopeProperty *sprop;
1521     jsval v;
1522 
1523     for (stmt = tc->topScopeStmt; stmt; stmt = stmt->downScope) {
1524         if (stmt->type == STMT_WITH) {
1525             /* Ignore with statements enclosing a single let declaration. */
1526             if (letdecl)
1527                 continue;
1528             break;
1529         }
1530 
1531         /* Skip "maybe scope" statements that don't contain let bindings. */
1532         if (!(stmt->flags & SIF_SCOPE))
1533             continue;
1534 
1535         obj = ATOM_TO_OBJECT(stmt->atom);
1536         JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1537         scope = OBJ_SCOPE(obj);
1538         sprop = SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom));
1539         if (sprop) {
1540             JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1541 
1542             if (slotp) {
1543                 /*
1544                  * Use LOCKED_OBJ_GET_SLOT since we know obj is single-
1545                  * threaded and owned by this compiler activation.
1546                  */
1547                 v = LOCKED_OBJ_GET_SLOT(obj, JSSLOT_BLOCK_DEPTH);
1548                 JS_ASSERT(JSVAL_IS_INT(v) && JSVAL_TO_INT(v) >= 0);
1549                 *slotp = JSVAL_TO_INT(v) + sprop->shortid;
1550             }
1551             return stmt;
1552         }
1553     }
1554 
1555     if (slotp)
1556         *slotp = -1;
1557     return stmt;
1558 }
1559 
1560 JSBool
js_LookupCompileTimeConstant(JSContext * cx,JSCodeGenerator * cg,JSAtom * atom,jsval * vp)1561 js_LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1562                              jsval *vp)
1563 {
1564     JSBool ok;
1565     JSStackFrame *fp;
1566     JSStmtInfo *stmt;
1567     jsint slot;
1568     JSAtomListElement *ale;
1569     JSObject *obj, *pobj;
1570     JSProperty *prop;
1571     uintN attrs;
1572 
1573     /*
1574      * fp chases cg down the stack, but only until we reach the outermost cg.
1575      * This enables propagating consts from top-level into switch cases in a
1576      * function compiled along with the top-level script.  All stack frames
1577      * with matching code generators should be flagged with JSFRAME_COMPILING;
1578      * we check sanity here.
1579      */
1580     *vp = JSVAL_VOID;
1581     ok = JS_TRUE;
1582     fp = cx->fp;
1583     do {
1584         JS_ASSERT(fp->flags & JSFRAME_COMPILING);
1585 
1586         obj = fp->varobj;
1587         if (obj == fp->scopeChain) {
1588             /* XXX this will need revising when 'let const' is added. */
1589             stmt = js_LexicalLookup(&cg->treeContext, atom, &slot, JS_FALSE);
1590             if (stmt)
1591                 return JS_TRUE;
1592 
1593             ATOM_LIST_SEARCH(ale, &cg->constList, atom);
1594             if (ale) {
1595                 *vp = ALE_VALUE(ale);
1596                 return JS_TRUE;
1597             }
1598 
1599             /*
1600              * Try looking in the variable object for a direct property that
1601              * is readonly and permanent.  We know such a property can't be
1602              * shadowed by another property on obj's prototype chain, or a
1603              * with object or catch variable; nor can prop's value be changed,
1604              * nor can prop be deleted.
1605              */
1606             prop = NULL;
1607             if (OBJ_GET_CLASS(cx, obj) == &js_FunctionClass) {
1608                 ok = js_LookupHiddenProperty(cx, obj, ATOM_TO_JSID(atom),
1609                                              &pobj, &prop);
1610                 if (!ok)
1611                     break;
1612                 if (prop) {
1613 #ifdef DEBUG
1614                     JSScopeProperty *sprop = (JSScopeProperty *)prop;
1615 
1616                     /*
1617                      * Any hidden property must be a formal arg or local var,
1618                      * which will shadow a global const of the same name.
1619                      */
1620                     JS_ASSERT(sprop->getter == js_GetArgument ||
1621                               sprop->getter == js_GetLocalVariable);
1622 #endif
1623                     OBJ_DROP_PROPERTY(cx, pobj, prop);
1624                     break;
1625                 }
1626             }
1627 
1628             ok = OBJ_LOOKUP_PROPERTY(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop);
1629             if (ok) {
1630                 if (pobj == obj &&
1631                     (fp->flags & (JSFRAME_EVAL | JSFRAME_COMPILE_N_GO))) {
1632                     /*
1633                      * We're compiling code that will be executed immediately,
1634                      * not re-executed against a different scope chain and/or
1635                      * variable object.  Therefore we can get constant values
1636                      * from our variable object here.
1637                      */
1638                     ok = OBJ_GET_ATTRIBUTES(cx, obj, ATOM_TO_JSID(atom), prop,
1639                                             &attrs);
1640                     if (ok && !(~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)))
1641                         ok = OBJ_GET_PROPERTY(cx, obj, ATOM_TO_JSID(atom), vp);
1642                 }
1643                 if (prop)
1644                     OBJ_DROP_PROPERTY(cx, pobj, prop);
1645             }
1646             if (!ok || prop)
1647                 break;
1648         }
1649         fp = fp->down;
1650     } while ((cg = cg->parent) != NULL);
1651     return ok;
1652 }
1653 
1654 /*
1655  * Allocate an index invariant for all activations of the code being compiled
1656  * in cg, that can be used to store and fetch a reference to a cloned RegExp
1657  * object that shares the same JSRegExp private data created for the object
1658  * literal in pn->pn_atom.  We need clones to hold lastIndex and other direct
1659  * properties that should not be shared among threads sharing a precompiled
1660  * function or script.
1661  *
1662  * If the code being compiled is function code, allocate a reserved slot in
1663  * the cloned function object that shares its precompiled script with other
1664  * cloned function objects and with the compiler-created clone-parent.  There
1665  * are fun->nregexps such reserved slots in each function object cloned from
1666  * fun->object.  NB: during compilation, funobj slots must never be allocated,
1667  * because js_AllocSlot could hand out one of the slots that should be given
1668  * to a regexp clone.
1669  *
1670  * If the code being compiled is global code, reserve the fp->vars slot at
1671  * ALE_INDEX(ale), by ensuring that cg->treeContext.numGlobalVars is at least
1672  * one more than this index.  For global code, fp->vars is parallel to the
1673  * global script->atomMap.vector array, but possibly shorter for the common
1674  * case (where var declarations and regexp literals cluster toward the front
1675  * of the script or function body).
1676  *
1677  * Global variable name literals in script->atomMap have fast-global slot
1678  * numbers (stored as int-tagged jsvals) in the corresponding fp->vars array
1679  * element.  The atomIndex for a regexp object literal thus also addresses an
1680  * fp->vars element that is not used by any optimized global variable, so we
1681  * use that GC-scanned element to keep the regexp object clone alive, as well
1682  * as to lazily create and find it at run-time for the JSOP_REGEXP bytecode.
1683  *
1684  * In no case can cx->fp->varobj be a Call object here, because that implies
1685  * we are compiling eval code, in which case (cx->fp->flags & JSFRAME_EVAL)
1686  * is true, and js_GetToken will have already selected JSOP_OBJECT instead of
1687  * JSOP_REGEXP, to avoid all this RegExp object cloning business.
1688  *
1689  * Why clone regexp objects?  ECMA specifies that when a regular expression
1690  * literal is scanned, a RegExp object is created.  In the spec, compilation
1691  * and execution happen indivisibly, but in this implementation and many of
1692  * its embeddings, code is precompiled early and re-executed in multiple
1693  * threads, or using multiple global objects, or both, for efficiency.
1694  *
1695  * In such cases, naively following ECMA leads to wrongful sharing of RegExp
1696  * objects, which makes for collisions on the lastIndex property (especially
1697  * for global regexps) and on any ad-hoc properties.  Also, __proto__ and
1698  * __parent__ refer to the pre-compilation prototype and global objects, a
1699  * pigeon-hole problem for instanceof tests.
1700  */
1701 static JSBool
IndexRegExpClone(JSContext * cx,JSParseNode * pn,JSAtomListElement * ale,JSCodeGenerator * cg)1702 IndexRegExpClone(JSContext *cx, JSParseNode *pn, JSAtomListElement *ale,
1703                  JSCodeGenerator *cg)
1704 {
1705     JSObject *varobj, *reobj;
1706     JSClass *clasp;
1707     JSFunction *fun;
1708     JSRegExp *re;
1709     uint16 *countPtr;
1710     uintN cloneIndex;
1711 
1712     JS_ASSERT(!(cx->fp->flags & (JSFRAME_EVAL | JSFRAME_COMPILE_N_GO)));
1713 
1714     varobj = cx->fp->varobj;
1715     clasp = OBJ_GET_CLASS(cx, varobj);
1716     if (clasp == &js_FunctionClass) {
1717         fun = (JSFunction *) JS_GetPrivate(cx, varobj);
1718         countPtr = &fun->u.i.nregexps;
1719         cloneIndex = *countPtr;
1720     } else {
1721         JS_ASSERT(clasp != &js_CallClass);
1722         countPtr = &cg->treeContext.numGlobalVars;
1723         cloneIndex = ALE_INDEX(ale);
1724     }
1725 
1726     if ((cloneIndex + 1) >> 16) {
1727         JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1728                              JSMSG_NEED_DIET, js_script_str);
1729         return JS_FALSE;
1730     }
1731     if (cloneIndex >= *countPtr)
1732         *countPtr = cloneIndex + 1;
1733 
1734     reobj = ATOM_TO_OBJECT(pn->pn_atom);
1735     JS_ASSERT(OBJ_GET_CLASS(cx, reobj) == &js_RegExpClass);
1736     re = (JSRegExp *) JS_GetPrivate(cx, reobj);
1737     re->cloneIndex = cloneIndex;
1738     return JS_TRUE;
1739 }
1740 
1741 /*
1742  * Emit a bytecode and its 2-byte constant (atom) index immediate operand.
1743  * If the atomIndex requires more than 2 bytes, emit a prefix op whose 24-bit
1744  * immediate operand indexes the atom in script->atomMap.
1745  *
1746  * If op has JOF_NAME mode, emit JSOP_FINDNAME to find and push the object in
1747  * the scope chain in which the literal name was found, followed by the name
1748  * as a string.  This enables us to use the JOF_ELEM counterpart to op.
1749  *
1750  * Otherwise, if op has JOF_PROP mode, emit JSOP_LITERAL before op, to push
1751  * the atom's value key.  For JOF_PROP ops, the object being operated on has
1752  * already been pushed, and JSOP_LITERAL will push the id, leaving the stack
1753  * in the proper state for a JOF_ELEM counterpart.
1754  *
1755  * Otherwise, emit JSOP_LITOPX to push the atom index, then perform a special
1756  * dispatch on op, but getting op's atom index from the stack instead of from
1757  * an unsigned 16-bit immediate operand.
1758  */
1759 static JSBool
EmitAtomIndexOp(JSContext * cx,JSOp op,jsatomid atomIndex,JSCodeGenerator * cg)1760 EmitAtomIndexOp(JSContext *cx, JSOp op, jsatomid atomIndex, JSCodeGenerator *cg)
1761 {
1762     uint32 mode;
1763     JSOp prefixOp;
1764     ptrdiff_t off;
1765     jsbytecode *pc;
1766 
1767     if (atomIndex >= JS_BIT(16)) {
1768         mode = (js_CodeSpec[op].format & JOF_MODEMASK);
1769         if (op != JSOP_SETNAME) {
1770             prefixOp = ((mode != JOF_NAME && mode != JOF_PROP) ||
1771 #if JS_HAS_XML_SUPPORT
1772                         op == JSOP_GETMETHOD ||
1773                         op == JSOP_SETMETHOD ||
1774 #endif
1775                         op == JSOP_SETCONST)
1776                        ? JSOP_LITOPX
1777                        : (mode == JOF_NAME)
1778                        ? JSOP_FINDNAME
1779                        : JSOP_LITERAL;
1780             off = js_EmitN(cx, cg, prefixOp, 3);
1781             if (off < 0)
1782                 return JS_FALSE;
1783             pc = CG_CODE(cg, off);
1784             SET_LITERAL_INDEX(pc, atomIndex);
1785         }
1786 
1787         switch (op) {
1788           case JSOP_DECNAME:    op = JSOP_DECELEM; break;
1789           case JSOP_DECPROP:    op = JSOP_DECELEM; break;
1790           case JSOP_DELNAME:    op = JSOP_DELELEM; break;
1791           case JSOP_DELPROP:    op = JSOP_DELELEM; break;
1792           case JSOP_FORNAME:    op = JSOP_FORELEM; break;
1793           case JSOP_FORPROP:    op = JSOP_FORELEM; break;
1794           case JSOP_GETPROP:    op = JSOP_GETELEM; break;
1795           case JSOP_GETXPROP:   op = JSOP_GETXELEM; break;
1796           case JSOP_IMPORTPROP: op = JSOP_IMPORTELEM; break;
1797           case JSOP_INCNAME:    op = JSOP_INCELEM; break;
1798           case JSOP_INCPROP:    op = JSOP_INCELEM; break;
1799           case JSOP_INITPROP:   op = JSOP_INITELEM; break;
1800           case JSOP_NAME:       op = JSOP_GETELEM; break;
1801           case JSOP_NAMEDEC:    op = JSOP_ELEMDEC; break;
1802           case JSOP_NAMEINC:    op = JSOP_ELEMINC; break;
1803           case JSOP_PROPDEC:    op = JSOP_ELEMDEC; break;
1804           case JSOP_PROPINC:    op = JSOP_ELEMINC; break;
1805           case JSOP_BINDNAME:   return JS_TRUE;
1806           case JSOP_SETNAME:    op = JSOP_SETELEM; break;
1807           case JSOP_SETPROP:    op = JSOP_SETELEM; break;
1808 #if JS_HAS_EXPORT_IMPORT
1809           case JSOP_EXPORTNAME:
1810             ReportStatementTooLarge(cx, cg);
1811             return JS_FALSE;
1812 #endif
1813           default:
1814 #if JS_HAS_XML_SUPPORT
1815             JS_ASSERT(mode == 0 || op == JSOP_SETCONST ||
1816                       op == JSOP_GETMETHOD || op == JSOP_SETMETHOD);
1817 #else
1818             JS_ASSERT(mode == 0 || op == JSOP_SETCONST);
1819 #endif
1820             break;
1821         }
1822 
1823         return js_Emit1(cx, cg, op) >= 0;
1824     }
1825 
1826     EMIT_UINT16_IMM_OP(op, atomIndex);
1827     return JS_TRUE;
1828 }
1829 
1830 /*
1831  * Slight sugar for EmitAtomIndexOp, again accessing cx and cg from the macro
1832  * caller's lexical environment, and embedding a false return on error.
1833  * XXXbe hey, who checks for fun->nvars and fun->nargs overflow?!
1834  */
1835 #define EMIT_ATOM_INDEX_OP(op, atomIndex)                                     \
1836     JS_BEGIN_MACRO                                                            \
1837         if (!EmitAtomIndexOp(cx, op, atomIndex, cg))                          \
1838             return JS_FALSE;                                                  \
1839     JS_END_MACRO
1840 
1841 static JSBool
EmitAtomOp(JSContext * cx,JSParseNode * pn,JSOp op,JSCodeGenerator * cg)1842 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1843 {
1844     JSAtomListElement *ale;
1845 
1846     ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
1847     if (!ale)
1848         return JS_FALSE;
1849     if (op == JSOP_REGEXP && !IndexRegExpClone(cx, pn, ale, cg))
1850         return JS_FALSE;
1851     return EmitAtomIndexOp(cx, op, ALE_INDEX(ale), cg);
1852 }
1853 
1854 /*
1855  * This routine tries to optimize name gets and sets to stack slot loads and
1856  * stores, given the variables object and scope chain in cx's top frame, the
1857  * compile-time context in tc, and a TOK_NAME node pn.  It returns false on
1858  * error, true on success.
1859  *
1860  * The caller can inspect pn->pn_slot for a non-negative slot number to tell
1861  * whether optimization occurred, in which case BindNameToSlot also updated
1862  * pn->pn_op.  If pn->pn_slot is still -1 on return, pn->pn_op nevertheless
1863  * may have been optimized, e.g., from JSOP_NAME to JSOP_ARGUMENTS.  Whether
1864  * or not pn->pn_op was modified, if this function finds an argument or local
1865  * variable name, pn->pn_attrs will contain the property's attributes after a
1866  * successful return.
1867  *
1868  * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1869  * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1870  * in js_EmitTree.
1871  */
1872 static JSBool
BindNameToSlot(JSContext * cx,JSTreeContext * tc,JSParseNode * pn,JSBool letdecl)1873 BindNameToSlot(JSContext *cx, JSTreeContext *tc, JSParseNode *pn,
1874                JSBool letdecl)
1875 {
1876     JSAtom *atom;
1877     JSStmtInfo *stmt;
1878     jsint slot;
1879     JSOp op;
1880     JSStackFrame *fp;
1881     JSObject *obj, *pobj;
1882     JSClass *clasp;
1883     JSBool optimizeGlobals;
1884     JSPropertyOp getter;
1885     uintN attrs;
1886     JSAtomListElement *ale;
1887     JSProperty *prop;
1888     JSScopeProperty *sprop;
1889 
1890     JS_ASSERT(pn->pn_type == TOK_NAME);
1891     if (pn->pn_slot >= 0 || pn->pn_op == JSOP_ARGUMENTS)
1892         return JS_TRUE;
1893 
1894     /* QNAME references can never be optimized to use arg/var storage. */
1895     if (pn->pn_op == JSOP_QNAMEPART)
1896         return JS_TRUE;
1897 
1898     /*
1899      * We can't optimize if we are compiling a with statement and its body,
1900      * or we're in a catch block whose exception variable has the same name
1901      * as this node.  FIXME: we should be able to optimize catch vars to be
1902      * block-locals.
1903      */
1904     atom = pn->pn_atom;
1905     stmt = js_LexicalLookup(tc, atom, &slot, letdecl);
1906     if (stmt) {
1907         if (stmt->type == STMT_WITH)
1908             return JS_TRUE;
1909 
1910         JS_ASSERT(stmt->flags & SIF_SCOPE);
1911         JS_ASSERT(slot >= 0);
1912         op = pn->pn_op;
1913         switch (op) {
1914           case JSOP_NAME:     op = JSOP_GETLOCAL; break;
1915           case JSOP_SETNAME:  op = JSOP_SETLOCAL; break;
1916           case JSOP_INCNAME:  op = JSOP_INCLOCAL; break;
1917           case JSOP_NAMEINC:  op = JSOP_LOCALINC; break;
1918           case JSOP_DECNAME:  op = JSOP_DECLOCAL; break;
1919           case JSOP_NAMEDEC:  op = JSOP_LOCALDEC; break;
1920           case JSOP_FORNAME:  op = JSOP_FORLOCAL; break;
1921           case JSOP_DELNAME:  op = JSOP_FALSE; break;
1922           default: JS_ASSERT(0);
1923         }
1924         if (op != pn->pn_op) {
1925             pn->pn_op = op;
1926             pn->pn_slot = slot;
1927         }
1928         return JS_TRUE;
1929     }
1930 
1931     /*
1932      * A Script object can be used to split an eval into a compile step done
1933      * at construction time, and an execute step done separately, possibly in
1934      * a different scope altogether.  We therefore cannot do any name-to-slot
1935      * optimizations, but must lookup names at runtime.  Note that script_exec
1936      * ensures that its caller's frame has a Call object, so arg and var name
1937      * lookups will succeed.
1938      */
1939     fp = cx->fp;
1940     if (fp->flags & JSFRAME_SCRIPT_OBJECT)
1941         return JS_TRUE;
1942 
1943     /*
1944      * We can't optimize if var and closure (a local function not in a larger
1945      * expression and not at top-level within another's body) collide.
1946      * XXX suboptimal: keep track of colliding names and deoptimize only those
1947      */
1948     if (tc->flags & TCF_FUN_CLOSURE_VS_VAR)
1949         return JS_TRUE;
1950 
1951     /*
1952      * We can't optimize if we're not compiling a function body, whether via
1953      * eval, or directly when compiling a function statement or expression.
1954      */
1955     obj = fp->varobj;
1956     clasp = OBJ_GET_CLASS(cx, obj);
1957     if (clasp != &js_FunctionClass && clasp != &js_CallClass) {
1958         /* Check for an eval or debugger frame. */
1959         if (fp->flags & JSFRAME_SPECIAL)
1960             return JS_TRUE;
1961 
1962         /*
1963          * Optimize global variable accesses if there are at least 100 uses
1964          * in unambiguous contexts, or failing that, if least half of all the
1965          * uses of global vars/consts/functions are in loops.
1966          */
1967         optimizeGlobals = (tc->globalUses >= 100 ||
1968                            (tc->loopyGlobalUses &&
1969                             tc->loopyGlobalUses >= tc->globalUses / 2));
1970         if (!optimizeGlobals)
1971             return JS_TRUE;
1972     } else {
1973         optimizeGlobals = JS_FALSE;
1974     }
1975 
1976     /*
1977      * We can't optimize if we are in an eval called inside a with statement.
1978      */
1979     if (fp->scopeChain != obj)
1980         return JS_TRUE;
1981 
1982     op = pn->pn_op;
1983     getter = NULL;
1984 #ifdef __GNUC__
1985     attrs = slot = 0;   /* quell GCC overwarning */
1986 #endif
1987     if (optimizeGlobals) {
1988         /*
1989          * We are optimizing global variables, and there is no pre-existing
1990          * global property named atom.  If atom was declared via const or var,
1991          * optimize pn to access fp->vars using the appropriate JOF_QVAR op.
1992          */
1993         ATOM_LIST_SEARCH(ale, &tc->decls, atom);
1994         if (!ale) {
1995             /* Use precedes declaration, or name is never declared. */
1996             return JS_TRUE;
1997         }
1998 
1999         attrs = (ALE_JSOP(ale) == JSOP_DEFCONST)
2000                 ? JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT
2001                 : JSPROP_ENUMERATE | JSPROP_PERMANENT;
2002 
2003         /* Index atom so we can map fast global number to name. */
2004         JS_ASSERT(tc->flags & TCF_COMPILING);
2005         ale = js_IndexAtom(cx, atom, &((JSCodeGenerator *) tc)->atomList);
2006         if (!ale)
2007             return JS_FALSE;
2008 
2009         /* Defend against tc->numGlobalVars 16-bit overflow. */
2010         slot = ALE_INDEX(ale);
2011         if ((slot + 1) >> 16)
2012             return JS_TRUE;
2013 
2014         if ((uint16)(slot + 1) > tc->numGlobalVars)
2015             tc->numGlobalVars = (uint16)(slot + 1);
2016     } else {
2017         /*
2018          * We may be able to optimize name to stack slot. Look for an argument
2019          * or variable property in the function, or its call object, not found
2020          * in any prototype object.  Rewrite pn_op and update pn accordingly.
2021          * NB: We know that JSOP_DELNAME on an argument or variable evaluates
2022          * to false, due to JSPROP_PERMANENT.
2023          */
2024         if (!js_LookupHiddenProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop))
2025             return JS_FALSE;
2026         sprop = (JSScopeProperty *) prop;
2027         if (sprop) {
2028             if (pobj == obj) {
2029                 getter = sprop->getter;
2030                 attrs = sprop->attrs;
2031                 slot = (sprop->flags & SPROP_HAS_SHORTID) ? sprop->shortid : -1;
2032             }
2033             OBJ_DROP_PROPERTY(cx, pobj, prop);
2034         }
2035     }
2036 
2037     if (optimizeGlobals || getter) {
2038         if (optimizeGlobals) {
2039             switch (op) {
2040               case JSOP_NAME:     op = JSOP_GETGVAR; break;
2041               case JSOP_SETNAME:  op = JSOP_SETGVAR; break;
2042               case JSOP_SETCONST: /* NB: no change */ break;
2043               case JSOP_INCNAME:  op = JSOP_INCGVAR; break;
2044               case JSOP_NAMEINC:  op = JSOP_GVARINC; break;
2045               case JSOP_DECNAME:  op = JSOP_DECGVAR; break;
2046               case JSOP_NAMEDEC:  op = JSOP_GVARDEC; break;
2047               case JSOP_FORNAME:  /* NB: no change */ break;
2048               case JSOP_DELNAME:  /* NB: no change */ break;
2049               default: JS_ASSERT(0);
2050             }
2051         } else if (getter == js_GetLocalVariable ||
2052                    getter == js_GetCallVariable) {
2053             switch (op) {
2054               case JSOP_NAME:     op = JSOP_GETVAR; break;
2055               case JSOP_SETNAME:  op = JSOP_SETVAR; break;
2056               case JSOP_SETCONST: op = JSOP_SETVAR; break;
2057               case JSOP_INCNAME:  op = JSOP_INCVAR; break;
2058               case JSOP_NAMEINC:  op = JSOP_VARINC; break;
2059               case JSOP_DECNAME:  op = JSOP_DECVAR; break;
2060               case JSOP_NAMEDEC:  op = JSOP_VARDEC; break;
2061               case JSOP_FORNAME:  op = JSOP_FORVAR; break;
2062               case JSOP_DELNAME:  op = JSOP_FALSE; break;
2063               default: JS_ASSERT(0);
2064             }
2065         } else if (getter == js_GetArgument ||
2066                    (getter == js_CallClass.getProperty &&
2067                     fp->fun && (uintN) slot < fp->fun->nargs)) {
2068             switch (op) {
2069               case JSOP_NAME:     op = JSOP_GETARG; break;
2070               case JSOP_SETNAME:  op = JSOP_SETARG; break;
2071               case JSOP_INCNAME:  op = JSOP_INCARG; break;
2072               case JSOP_NAMEINC:  op = JSOP_ARGINC; break;
2073               case JSOP_DECNAME:  op = JSOP_DECARG; break;
2074               case JSOP_NAMEDEC:  op = JSOP_ARGDEC; break;
2075               case JSOP_FORNAME:  op = JSOP_FORARG; break;
2076               case JSOP_DELNAME:  op = JSOP_FALSE; break;
2077               default: JS_ASSERT(0);
2078             }
2079         }
2080         if (op != pn->pn_op) {
2081             pn->pn_op = op;
2082             pn->pn_slot = slot;
2083         }
2084         pn->pn_attrs = attrs;
2085     }
2086 
2087     if (pn->pn_slot < 0) {
2088         /*
2089          * We couldn't optimize pn, so it's not a global or local slot name.
2090          * Now we must check for the predefined arguments variable.  It may be
2091          * overridden by assignment, in which case the function is heavyweight
2092          * and the interpreter will look up 'arguments' in the function's call
2093          * object.
2094          */
2095         if (pn->pn_op == JSOP_NAME &&
2096             atom == cx->runtime->atomState.argumentsAtom) {
2097             pn->pn_op = JSOP_ARGUMENTS;
2098             return JS_TRUE;
2099         }
2100 
2101         tc->flags |= TCF_FUN_USES_NONLOCALS;
2102     }
2103     return JS_TRUE;
2104 }
2105 
2106 /*
2107  * If pn contains a useful expression, return true with *answer set to true.
2108  * If pn contains a useless expression, return true with *answer set to false.
2109  * Return false on error.
2110  *
2111  * The caller should initialize *answer to false and invoke this function on
2112  * an expression statement or similar subtree to decide whether the tree could
2113  * produce code that has any side effects.  For an expression statement, we
2114  * define useless code as code with no side effects, because the main effect,
2115  * the value left on the stack after the code executes, will be discarded by a
2116  * pop bytecode.
2117  */
2118 static JSBool
CheckSideEffects(JSContext * cx,JSTreeContext * tc,JSParseNode * pn,JSBool * answer)2119 CheckSideEffects(JSContext *cx, JSTreeContext *tc, JSParseNode *pn,
2120                  JSBool *answer)
2121 {
2122     JSBool ok;
2123     JSFunction *fun;
2124     JSParseNode *pn2;
2125 
2126     ok = JS_TRUE;
2127     if (!pn || *answer)
2128         return ok;
2129 
2130     switch (pn->pn_arity) {
2131       case PN_FUNC:
2132         /*
2133          * A named function is presumed useful: we can't yet know that it is
2134          * not called.  The side effects are the creation of a scope object
2135          * to parent this function object, and the binding of the function's
2136          * name in that scope object.  See comments at case JSOP_NAMEDFUNOBJ:
2137          * in jsinterp.c.
2138          */
2139         fun = (JSFunction *) JS_GetPrivate(cx, ATOM_TO_OBJECT(pn->pn_funAtom));
2140         if (fun->atom)
2141             *answer = JS_TRUE;
2142         break;
2143 
2144       case PN_LIST:
2145         if (pn->pn_type == TOK_NEW ||
2146             pn->pn_type == TOK_LP ||
2147             pn->pn_type == TOK_LB ||
2148             pn->pn_type == TOK_RB ||
2149             pn->pn_type == TOK_RC) {
2150             /*
2151              * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2152              * are presumed to be useful, because they may have side effects
2153              * even if their main effect (their return value) is discarded.
2154              *
2155              * TOK_LB binary trees of 3 or more nodes are flattened into lists
2156              * to avoid too much recursion.  All such lists must be presumed
2157              * to be useful because each index operation could invoke a getter
2158              * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2159              * does not apply here: arguments[i][j] might invoke a getter).
2160              *
2161              * Array and object initializers (TOK_RB and TOK_RC lists) must be
2162              * considered useful, because they are sugar for constructor calls
2163              * (to Array and Object, respectively).
2164              */
2165             *answer = JS_TRUE;
2166         } else {
2167             for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2168                 ok &= CheckSideEffects(cx, tc, pn2, answer);
2169         }
2170         break;
2171 
2172       case PN_TERNARY:
2173         ok = CheckSideEffects(cx, tc, pn->pn_kid1, answer) &&
2174              CheckSideEffects(cx, tc, pn->pn_kid2, answer) &&
2175              CheckSideEffects(cx, tc, pn->pn_kid3, answer);
2176         break;
2177 
2178       case PN_BINARY:
2179         if (pn->pn_type == TOK_ASSIGN) {
2180             /*
2181              * Assignment is presumed to be useful, even if the next operation
2182              * is another assignment overwriting this one's ostensible effect,
2183              * because the left operand may be a property with a setter that
2184              * has side effects.
2185              *
2186              * The only exception is assignment of a useless value to a const
2187              * declared in the function currently being compiled.
2188              */
2189             pn2 = pn->pn_left;
2190             if (pn2->pn_type != TOK_NAME) {
2191                 *answer = JS_TRUE;
2192             } else {
2193                 if (!BindNameToSlot(cx, tc, pn2, JS_FALSE))
2194                     return JS_FALSE;
2195                 if (!CheckSideEffects(cx, tc, pn->pn_right, answer))
2196                     return JS_FALSE;
2197                 if (!*answer &&
2198                     (pn2->pn_slot < 0 || !(pn2->pn_attrs & JSPROP_READONLY))) {
2199                     *answer = JS_TRUE;
2200                 }
2201             }
2202         } else {
2203             if (pn->pn_type == TOK_LB) {
2204                 pn2 = pn->pn_left;
2205                 if (pn2->pn_type == TOK_NAME &&
2206                     !BindNameToSlot(cx, tc, pn2, JS_FALSE)) {
2207                     return JS_FALSE;
2208                 }
2209                 if (pn2->pn_op != JSOP_ARGUMENTS) {
2210                     /*
2211                      * Any indexed property reference could call a getter with
2212                      * side effects, except for arguments[i] where arguments is
2213                      * unambiguous.
2214                      */
2215                     *answer = JS_TRUE;
2216                 }
2217             }
2218             ok = CheckSideEffects(cx, tc, pn->pn_left, answer) &&
2219                  CheckSideEffects(cx, tc, pn->pn_right, answer);
2220         }
2221         break;
2222 
2223       case PN_UNARY:
2224         if (pn->pn_type == TOK_INC || pn->pn_type == TOK_DEC ||
2225             pn->pn_type == TOK_THROW ||
2226 #if JS_HAS_GENERATORS
2227             pn->pn_type == TOK_YIELD ||
2228 #endif
2229             pn->pn_type == TOK_DEFSHARP) {
2230             /* All these operations have effects that we must commit. */
2231             *answer = JS_TRUE;
2232         } else if (pn->pn_type == TOK_DELETE) {
2233             pn2 = pn->pn_kid;
2234             switch (pn2->pn_type) {
2235               case TOK_NAME:
2236               case TOK_DOT:
2237 #if JS_HAS_XML_SUPPORT
2238               case TOK_DBLDOT:
2239 #endif
2240 #if JS_HAS_LVALUE_RETURN
2241               case TOK_LP:
2242 #endif
2243               case TOK_LB:
2244                 /* All these delete addressing modes have effects too. */
2245                 *answer = JS_TRUE;
2246                 break;
2247               default:
2248                 ok = CheckSideEffects(cx, tc, pn2, answer);
2249                 break;
2250             }
2251         } else {
2252             ok = CheckSideEffects(cx, tc, pn->pn_kid, answer);
2253         }
2254         break;
2255 
2256       case PN_NAME:
2257         /*
2258          * Take care to avoid trying to bind a label name (labels, both for
2259          * statements and property values in object initialisers, have pn_op
2260          * defaulted to JSOP_NOP).
2261          */
2262         if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2263             if (!BindNameToSlot(cx, tc, pn, JS_FALSE))
2264                 return JS_FALSE;
2265             if (pn->pn_slot < 0 && pn->pn_op != JSOP_ARGUMENTS) {
2266                 /*
2267                  * Not an argument or local variable use, so this expression
2268                  * could invoke a getter that has side effects.
2269                  */
2270                 *answer = JS_TRUE;
2271             }
2272         }
2273         pn2 = pn->pn_expr;
2274         if (pn->pn_type == TOK_DOT) {
2275             if (pn2->pn_type == TOK_NAME &&
2276                 !BindNameToSlot(cx, tc, pn2, JS_FALSE)) {
2277                 return JS_FALSE;
2278             }
2279             if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2280                   pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2281                 /*
2282                  * Any dotted property reference could call a getter, except
2283                  * for arguments.length where arguments is unambiguous.
2284                  */
2285                 *answer = JS_TRUE;
2286             }
2287         }
2288         ok = CheckSideEffects(cx, tc, pn2, answer);
2289         break;
2290 
2291       case PN_NULLARY:
2292         if (pn->pn_type == TOK_DEBUGGER)
2293             *answer = JS_TRUE;
2294         break;
2295     }
2296     return ok;
2297 }
2298 
2299 /*
2300  * Secret handshake with js_EmitTree's TOK_LP/TOK_NEW case logic, to flag all
2301  * uses of JSOP_GETMETHOD that implicitly qualify the method property's name
2302  * with a function:: prefix.  All other JSOP_GETMETHOD and JSOP_SETMETHOD uses
2303  * must be explicit, so we need a distinct source note (SRC_METHODBASE rather
2304  * than SRC_PCBASE) for round-tripping through the beloved decompiler.
2305  */
2306 #define JSPROP_IMPLICIT_FUNCTION_NAMESPACE      0x100
2307 
2308 static jssrcnote
SrcNoteForPropOp(JSParseNode * pn,JSOp op)2309 SrcNoteForPropOp(JSParseNode *pn, JSOp op)
2310 {
2311     return ((op == JSOP_GETMETHOD &&
2312              !(pn->pn_attrs & JSPROP_IMPLICIT_FUNCTION_NAMESPACE)) ||
2313             op == JSOP_SETMETHOD)
2314            ? SRC_METHODBASE
2315            : SRC_PCBASE;
2316 }
2317 
2318 static JSBool
EmitPropOp(JSContext * cx,JSParseNode * pn,JSOp op,JSCodeGenerator * cg)2319 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2320 {
2321     JSParseNode *pn2, *pndot, *pnup, *pndown;
2322     ptrdiff_t top;
2323 
2324     pn2 = pn->pn_expr;
2325     if (op == JSOP_GETPROP &&
2326         pn->pn_type == TOK_DOT &&
2327         pn2->pn_type == TOK_NAME) {
2328         /* Try to optimize arguments.length into JSOP_ARGCNT. */
2329         if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
2330             return JS_FALSE;
2331         if (pn2->pn_op == JSOP_ARGUMENTS &&
2332             pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2333             return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2334         }
2335     }
2336 
2337     /*
2338      * If the object operand is also a dotted property reference, reverse the
2339      * list linked via pn_expr temporarily so we can iterate over it from the
2340      * bottom up (reversing again as we go), to avoid excessive recursion.
2341      */
2342     if (pn2->pn_type == TOK_DOT) {
2343         pndot = pn2;
2344         pnup = NULL;
2345         top = CG_OFFSET(cg);
2346         for (;;) {
2347             /* Reverse pndot->pn_expr to point up, not down. */
2348             pndot->pn_offset = top;
2349             pndown = pndot->pn_expr;
2350             pndot->pn_expr = pnup;
2351             if (pndown->pn_type != TOK_DOT)
2352                 break;
2353             pnup = pndot;
2354             pndot = pndown;
2355         }
2356 
2357         /* pndown is a primary expression, not a dotted property reference. */
2358         if (!js_EmitTree(cx, cg, pndown))
2359             return JS_FALSE;
2360 
2361         do {
2362             /* Walk back up the list, emitting annotated name ops. */
2363             if (js_NewSrcNote2(cx, cg, SrcNoteForPropOp(pndot, pndot->pn_op),
2364                                CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2365                 return JS_FALSE;
2366             }
2367             if (!EmitAtomOp(cx, pndot, pndot->pn_op, cg))
2368                 return JS_FALSE;
2369 
2370             /* Reverse the pn_expr link again. */
2371             pnup = pndot->pn_expr;
2372             pndot->pn_expr = pndown;
2373             pndown = pndot;
2374         } while ((pndot = pnup) != NULL);
2375     } else {
2376         if (!js_EmitTree(cx, cg, pn2))
2377             return JS_FALSE;
2378     }
2379 
2380     if (js_NewSrcNote2(cx, cg, SrcNoteForPropOp(pn, op),
2381                        CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2382         return JS_FALSE;
2383     }
2384     if (!pn->pn_atom) {
2385         JS_ASSERT(op == JSOP_IMPORTALL);
2386         if (js_Emit1(cx, cg, op) < 0)
2387             return JS_FALSE;
2388     } else {
2389         if (!EmitAtomOp(cx, pn, op, cg))
2390             return JS_FALSE;
2391     }
2392     return JS_TRUE;
2393 }
2394 
2395 static JSBool
EmitElemOp(JSContext * cx,JSParseNode * pn,JSOp op,JSCodeGenerator * cg)2396 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2397 {
2398     ptrdiff_t top;
2399     JSParseNode *left, *right, *next, ltmp, rtmp;
2400     jsint slot;
2401 
2402     top = CG_OFFSET(cg);
2403     if (pn->pn_arity == PN_LIST) {
2404         /* Left-associative operator chain to avoid too much recursion. */
2405         JS_ASSERT(pn->pn_op == JSOP_GETELEM || pn->pn_op == JSOP_IMPORTELEM);
2406         JS_ASSERT(pn->pn_count >= 3);
2407         left = pn->pn_head;
2408         right = PN_LAST(pn);
2409         next = left->pn_next;
2410         JS_ASSERT(next != right);
2411 
2412         /*
2413          * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2414          * one or more index expression and JSOP_GETELEM op pairs.
2415          */
2416         if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2417             if (!BindNameToSlot(cx, &cg->treeContext, left, JS_FALSE))
2418                 return JS_FALSE;
2419             if (left->pn_op == JSOP_ARGUMENTS &&
2420                 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2421                 (jsuint)slot < JS_BIT(16)) {
2422                 left->pn_offset = next->pn_offset = top;
2423                 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2424                 left = next;
2425                 next = left->pn_next;
2426             }
2427         }
2428 
2429         /*
2430          * Check whether we generated JSOP_ARGSUB, just above, and have only
2431          * one more index expression to emit.  Given arguments[0][j], we must
2432          * skip the while loop altogether, falling through to emit code for j
2433          * (in the subtree referenced by right), followed by the annotated op,
2434          * at the bottom of this function.
2435          */
2436         JS_ASSERT(next != right || pn->pn_count == 3);
2437         if (left == pn->pn_head) {
2438             if (!js_EmitTree(cx, cg, left))
2439                 return JS_FALSE;
2440         }
2441         while (next != right) {
2442             if (!js_EmitTree(cx, cg, next))
2443                 return JS_FALSE;
2444             if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2445                 return JS_FALSE;
2446             if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2447                 return JS_FALSE;
2448             next = next->pn_next;
2449         }
2450     } else {
2451         if (pn->pn_arity == PN_NAME) {
2452             /*
2453              * Set left and right so pn appears to be a TOK_LB node, instead
2454              * of a TOK_DOT node.  See the TOK_FOR/IN case in js_EmitTree, and
2455              * EmitDestructuringOps nearer below.  In the destructuring case,
2456              * the base expression (pn_expr) of the name may be null, which
2457              * means we have to emit a JSOP_BINDNAME.
2458              */
2459             left = pn->pn_expr;
2460             if (!left) {
2461                 left = &ltmp;
2462                 left->pn_type = TOK_OBJECT;
2463                 left->pn_op = JSOP_BINDNAME;
2464                 left->pn_arity = PN_NULLARY;
2465                 left->pn_pos = pn->pn_pos;
2466                 left->pn_atom = pn->pn_atom;
2467             }
2468             right = &rtmp;
2469             right->pn_type = TOK_STRING;
2470             JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2471             right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2472                            ? JSOP_QNAMEPART
2473                            : JSOP_STRING;
2474             right->pn_arity = PN_NULLARY;
2475             right->pn_pos = pn->pn_pos;
2476             right->pn_atom = pn->pn_atom;
2477         } else {
2478             JS_ASSERT(pn->pn_arity == PN_BINARY);
2479             left = pn->pn_left;
2480             right = pn->pn_right;
2481         }
2482 
2483         /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2484         if (op == JSOP_GETELEM &&
2485             left->pn_type == TOK_NAME &&
2486             right->pn_type == TOK_NUMBER) {
2487             if (!BindNameToSlot(cx, &cg->treeContext, left, JS_FALSE))
2488                 return JS_FALSE;
2489             if (left->pn_op == JSOP_ARGUMENTS &&
2490                 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2491                 (jsuint)slot < JS_BIT(16)) {
2492                 left->pn_offset = right->pn_offset = top;
2493                 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2494                 return JS_TRUE;
2495             }
2496         }
2497 
2498         if (!js_EmitTree(cx, cg, left))
2499             return JS_FALSE;
2500     }
2501 
2502     /* The right side of the descendant operator is implicitly quoted. */
2503     JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2504               right->pn_op == JSOP_QNAMEPART);
2505     if (!js_EmitTree(cx, cg, right))
2506         return JS_FALSE;
2507     if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2508         return JS_FALSE;
2509     return js_Emit1(cx, cg, op) >= 0;
2510 }
2511 
2512 static JSBool
EmitNumberOp(JSContext * cx,jsdouble dval,JSCodeGenerator * cg)2513 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2514 {
2515     jsint ival;
2516     jsatomid atomIndex;
2517     ptrdiff_t off;
2518     jsbytecode *pc;
2519     JSAtom *atom;
2520     JSAtomListElement *ale;
2521 
2522     if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2523         if (ival == 0)
2524             return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2525         if (ival == 1)
2526             return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2527 
2528         atomIndex = (jsatomid)ival;
2529         if (atomIndex < JS_BIT(16)) {
2530             EMIT_UINT16_IMM_OP(JSOP_UINT16, atomIndex);
2531             return JS_TRUE;
2532         }
2533 
2534         if (atomIndex < JS_BIT(24)) {
2535             off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2536             if (off < 0)
2537                 return JS_FALSE;
2538             pc = CG_CODE(cg, off);
2539             SET_LITERAL_INDEX(pc, atomIndex);
2540             return JS_TRUE;
2541         }
2542 
2543         atom = js_AtomizeInt(cx, ival, 0);
2544     } else {
2545         atom = js_AtomizeDouble(cx, dval, 0);
2546     }
2547     if (!atom)
2548         return JS_FALSE;
2549 
2550     ale = js_IndexAtom(cx, atom, &cg->atomList);
2551     if (!ale)
2552         return JS_FALSE;
2553     return EmitAtomIndexOp(cx, JSOP_NUMBER, ALE_INDEX(ale), cg);
2554 }
2555 
2556 static JSBool
EmitSwitch(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn,JSStmtInfo * stmtInfo)2557 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2558            JSStmtInfo *stmtInfo)
2559 {
2560     JSOp switchOp;
2561     JSBool ok, hasDefault, constPropagated;
2562     ptrdiff_t top, off, defaultOffset;
2563     JSParseNode *pn2, *pn3, *pn4;
2564     uint32 caseCount, tableLength;
2565     JSParseNode **table;
2566     jsdouble d;
2567     jsint i, low, high;
2568     jsval v;
2569     JSAtom *atom;
2570     JSAtomListElement *ale;
2571     intN noteIndex;
2572     size_t switchSize, tableSize;
2573     jsbytecode *pc, *savepc;
2574 #if JS_HAS_BLOCK_SCOPE
2575     JSObject *obj;
2576     jsint count;
2577 #endif
2578 
2579     /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2580     switchOp = JSOP_TABLESWITCH;
2581     ok = JS_TRUE;
2582     hasDefault = constPropagated = JS_FALSE;
2583     defaultOffset = -1;
2584 
2585     /*
2586      * If the switch contains let variables scoped by its body, model the
2587      * resulting block on the stack first, before emitting the discriminant's
2588      * bytecode (in case the discriminant contains a stack-model dependency
2589      * such as a let expression).
2590      */
2591     pn2 = pn->pn_right;
2592 #if JS_HAS_BLOCK_SCOPE
2593     if (pn2->pn_type == TOK_LEXICALSCOPE) {
2594         atom = pn2->pn_atom;
2595         obj = ATOM_TO_OBJECT(atom);
2596         OBJ_SET_BLOCK_DEPTH(cx, obj, cg->stackDepth);
2597 
2598         /*
2599          * Push the body's block scope before discriminant code-gen for proper
2600          * static block scope linkage in case the discriminant contains a let
2601          * expression.  The block's locals must lie under the discriminant on
2602          * the stack so that case-dispatch bytecodes can find the discriminant
2603          * on top of stack.
2604          */
2605         js_PushBlockScope(&cg->treeContext, stmtInfo, atom, -1);
2606         stmtInfo->type = STMT_SWITCH;
2607 
2608         count = OBJ_BLOCK_COUNT(cx, obj);
2609         cg->stackDepth += count;
2610         if ((uintN)cg->stackDepth > cg->maxStackDepth)
2611             cg->maxStackDepth = cg->stackDepth;
2612 
2613         /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2614         ale = js_IndexAtom(cx, atom, &cg->atomList);
2615         if (!ale)
2616             return JS_FALSE;
2617         EMIT_ATOM_INDEX_OP(JSOP_ENTERBLOCK, ALE_INDEX(ale));
2618 
2619         /*
2620          * Pop the switch's statement info around discriminant code-gen.  Note
2621          * how this leaves cg->treeContext.blockChain referencing the switch's
2622          * block scope object, which is necessary for correct block parenting
2623          * in the case where the discriminant contains a let expression.
2624          */
2625         cg->treeContext.topStmt = stmtInfo->down;
2626         cg->treeContext.topScopeStmt = stmtInfo->downScope;
2627     }
2628 #ifdef __GNUC__
2629     else {
2630         atom = NULL;
2631         count = -1;
2632     }
2633 #endif
2634 #endif
2635 
2636     /*
2637      * Emit code for the discriminant first (or nearly first, in the case of a
2638      * switch whose body is a block scope).
2639      */
2640     if (!js_EmitTree(cx, cg, pn->pn_left))
2641         return JS_FALSE;
2642 
2643     /* Switch bytecodes run from here till end of final case. */
2644     top = CG_OFFSET(cg);
2645 #if !JS_HAS_BLOCK_SCOPE
2646     js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2647 #else
2648     if (pn2->pn_type == TOK_LC) {
2649         js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2650     } else {
2651         /* Re-push the switch's statement info record. */
2652         cg->treeContext.topStmt = cg->treeContext.topScopeStmt = stmtInfo;
2653 
2654         /* Set the statement info record's idea of top. */
2655         stmtInfo->update = top;
2656 
2657         /* Advance pn2 to refer to the switch case list. */
2658         pn2 = pn2->pn_expr;
2659     }
2660 #endif
2661 
2662     caseCount = pn2->pn_count;
2663     tableLength = 0;
2664     table = NULL;
2665 
2666     if (caseCount == 0 ||
2667         (caseCount == 1 &&
2668          (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
2669         caseCount = 0;
2670         low = 0;
2671         high = -1;
2672     } else {
2673 #define INTMAP_LENGTH   256
2674         jsbitmap intmap_space[INTMAP_LENGTH];
2675         jsbitmap *intmap = NULL;
2676         int32 intmap_bitlen = 0;
2677 
2678         low  = JSVAL_INT_MAX;
2679         high = JSVAL_INT_MIN;
2680 
2681         for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2682             if (pn3->pn_type == TOK_DEFAULT) {
2683                 hasDefault = JS_TRUE;
2684                 caseCount--;    /* one of the "cases" was the default */
2685                 continue;
2686             }
2687 
2688             JS_ASSERT(pn3->pn_type == TOK_CASE);
2689             if (switchOp == JSOP_CONDSWITCH)
2690                 continue;
2691 
2692             pn4 = pn3->pn_left;
2693             switch (pn4->pn_type) {
2694               case TOK_NUMBER:
2695                 d = pn4->pn_dval;
2696                 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
2697                     pn3->pn_val = INT_TO_JSVAL(i);
2698                 } else {
2699                     atom = js_AtomizeDouble(cx, d, 0);
2700                     if (!atom) {
2701                         ok = JS_FALSE;
2702                         goto release;
2703                     }
2704                     pn3->pn_val = ATOM_KEY(atom);
2705                 }
2706                 break;
2707               case TOK_STRING:
2708                 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
2709                 break;
2710               case TOK_NAME:
2711                 if (!pn4->pn_expr) {
2712                     ok = js_LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
2713                     if (!ok)
2714                         goto release;
2715                     if (!JSVAL_IS_VOID(v)) {
2716                         pn3->pn_val = v;
2717                         constPropagated = JS_TRUE;
2718                         break;
2719                     }
2720                 }
2721                 /* FALL THROUGH */
2722               case TOK_PRIMARY:
2723                 if (pn4->pn_op == JSOP_TRUE) {
2724                     pn3->pn_val = JSVAL_TRUE;
2725                     break;
2726                 }
2727                 if (pn4->pn_op == JSOP_FALSE) {
2728                     pn3->pn_val = JSVAL_FALSE;
2729                     break;
2730                 }
2731                 /* FALL THROUGH */
2732               default:
2733                 switchOp = JSOP_CONDSWITCH;
2734                 continue;
2735             }
2736 
2737             JS_ASSERT(JSVAL_IS_NUMBER(pn3->pn_val) ||
2738                       JSVAL_IS_STRING(pn3->pn_val) ||
2739                       JSVAL_IS_BOOLEAN(pn3->pn_val));
2740 
2741             if (switchOp != JSOP_TABLESWITCH)
2742                 continue;
2743             if (!JSVAL_IS_INT(pn3->pn_val)) {
2744                 switchOp = JSOP_LOOKUPSWITCH;
2745                 continue;
2746             }
2747             i = JSVAL_TO_INT(pn3->pn_val);
2748             if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
2749                 switchOp = JSOP_LOOKUPSWITCH;
2750                 continue;
2751             }
2752             if (i < low)
2753                 low = i;
2754             if (high < i)
2755                 high = i;
2756 
2757             /*
2758              * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
2759              * We bias i by 65536 if it's negative, and hope that's a rare
2760              * case (because it requires a malloc'd bitmap).
2761              */
2762             if (i < 0)
2763                 i += JS_BIT(16);
2764             if (i >= intmap_bitlen) {
2765                 if (!intmap &&
2766                     i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
2767                     intmap = intmap_space;
2768                     intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
2769                 } else {
2770                     /* Just grab 8K for the worst-case bitmap. */
2771                     intmap_bitlen = JS_BIT(16);
2772                     intmap = (jsbitmap *)
2773                         JS_malloc(cx,
2774                                   (JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
2775                                   * sizeof(jsbitmap));
2776                     if (!intmap) {
2777                         JS_ReportOutOfMemory(cx);
2778                         return JS_FALSE;
2779                     }
2780                 }
2781                 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
2782             }
2783             if (JS_TEST_BIT(intmap, i)) {
2784                 switchOp = JSOP_LOOKUPSWITCH;
2785                 continue;
2786             }
2787             JS_SET_BIT(intmap, i);
2788         }
2789 
2790       release:
2791         if (intmap && intmap != intmap_space)
2792             JS_free(cx, intmap);
2793         if (!ok)
2794             return JS_FALSE;
2795 
2796         /*
2797          * Compute table length and select lookup instead if overlarge or
2798          * more than half-sparse.
2799          */
2800         if (switchOp == JSOP_TABLESWITCH) {
2801             tableLength = (uint32)(high - low + 1);
2802             if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
2803                 switchOp = JSOP_LOOKUPSWITCH;
2804         } else if (switchOp == JSOP_LOOKUPSWITCH) {
2805             /*
2806              * Lookup switch supports only atom indexes below 64K limit.
2807              * Conservatively estimate the maximum possible index during
2808              * switch generation and use conditional switch if it exceeds
2809              * the limit.
2810              */
2811             if (caseCount + cg->atomList.count > JS_BIT(16))
2812                 switchOp = JSOP_CONDSWITCH;
2813         }
2814     }
2815 
2816     /*
2817      * Emit a note with two offsets: first tells total switch code length,
2818      * second tells offset to first JSOP_CASE if condswitch.
2819      */
2820     noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
2821     if (noteIndex < 0)
2822         return JS_FALSE;
2823 
2824     if (switchOp == JSOP_CONDSWITCH) {
2825         /*
2826          * 0 bytes of immediate for unoptimized ECMAv2 switch.
2827          */
2828         switchSize = 0;
2829     } else if (switchOp == JSOP_TABLESWITCH) {
2830         /*
2831          * 3 offsets (len, low, high) before the table, 1 per entry.
2832          */
2833         switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
2834     } else {
2835         /*
2836          * JSOP_LOOKUPSWITCH:
2837          * 1 offset (len) and 1 atom index (npairs) before the table,
2838          * 1 atom index and 1 jump offset per entry.
2839          */
2840         switchSize = (size_t)(JUMP_OFFSET_LEN + ATOM_INDEX_LEN +
2841                               (ATOM_INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
2842     }
2843 
2844     /*
2845      * Emit switchOp followed by switchSize bytes of jump or lookup table.
2846      *
2847      * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
2848      * to emit the immediate operand(s) by which bytecode readers such as
2849      * BuildSpanDepTable discover the length of the switch opcode *before*
2850      * calling js_SetJumpOffset (which may call BuildSpanDepTable).  It's
2851      * also important to zero all unknown jump offset immediate operands,
2852      * so they can be converted to span dependencies with null targets to
2853      * be computed later (js_EmitN zeros switchSize bytes after switchOp).
2854      */
2855     if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
2856         return JS_FALSE;
2857 
2858     off = -1;
2859     if (switchOp == JSOP_CONDSWITCH) {
2860         intN caseNoteIndex = -1;
2861         JSBool beforeCases = JS_TRUE;
2862 
2863         /* Emit code for evaluating cases and jumping to case statements. */
2864         for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2865             pn4 = pn3->pn_left;
2866             if (pn4 && !js_EmitTree(cx, cg, pn4))
2867                 return JS_FALSE;
2868             if (caseNoteIndex >= 0) {
2869                 /* off is the previous JSOP_CASE's bytecode offset. */
2870                 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2871                                          CG_OFFSET(cg) - off)) {
2872                     return JS_FALSE;
2873                 }
2874             }
2875             if (!pn4) {
2876                 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
2877                 continue;
2878             }
2879             caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
2880             if (caseNoteIndex < 0)
2881                 return JS_FALSE;
2882             off = EmitJump(cx, cg, JSOP_CASE, 0);
2883             if (off < 0)
2884                 return JS_FALSE;
2885             pn3->pn_offset = off;
2886             if (beforeCases) {
2887                 uintN noteCount, noteCountDelta;
2888 
2889                 /* Switch note's second offset is to first JSOP_CASE. */
2890                 noteCount = CG_NOTE_COUNT(cg);
2891                 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
2892                                          off - top)) {
2893                     return JS_FALSE;
2894                 }
2895                 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
2896                 if (noteCountDelta != 0)
2897                     caseNoteIndex += noteCountDelta;
2898                 beforeCases = JS_FALSE;
2899             }
2900         }
2901 
2902         /*
2903          * If we didn't have an explicit default (which could fall in between
2904          * cases, preventing us from fusing this js_SetSrcNoteOffset with the
2905          * call in the loop above), link the last case to the implicit default
2906          * for the decompiler.
2907          */
2908         if (!hasDefault &&
2909             caseNoteIndex >= 0 &&
2910             !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2911                                  CG_OFFSET(cg) - off)) {
2912             return JS_FALSE;
2913         }
2914 
2915         /* Emit default even if no explicit default statement. */
2916         defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
2917         if (defaultOffset < 0)
2918             return JS_FALSE;
2919     } else {
2920         pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
2921 
2922         if (switchOp == JSOP_TABLESWITCH) {
2923             /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2924             SET_JUMP_OFFSET(pc, low);
2925             pc += JUMP_OFFSET_LEN;
2926             SET_JUMP_OFFSET(pc, high);
2927             pc += JUMP_OFFSET_LEN;
2928 
2929             /*
2930              * Use malloc to avoid arena bloat for programs with many switches.
2931              * We free table if non-null at label out, so all control flow must
2932              * exit this function through goto out or goto bad.
2933              */
2934             if (tableLength != 0) {
2935                 tableSize = (size_t)tableLength * sizeof *table;
2936                 table = (JSParseNode **) JS_malloc(cx, tableSize);
2937                 if (!table)
2938                     return JS_FALSE;
2939                 memset(table, 0, tableSize);
2940                 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2941                     if (pn3->pn_type == TOK_DEFAULT)
2942                         continue;
2943                     i = JSVAL_TO_INT(pn3->pn_val);
2944                     i -= low;
2945                     JS_ASSERT((uint32)i < tableLength);
2946                     table[i] = pn3;
2947                 }
2948             }
2949         } else {
2950             JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
2951 
2952             /* Fill in the number of cases. */
2953             SET_ATOM_INDEX(pc, caseCount);
2954             pc += ATOM_INDEX_LEN;
2955         }
2956 
2957         /*
2958          * After this point, all control flow involving JSOP_TABLESWITCH
2959          * must set ok and goto out to exit this function.  To keep things
2960          * simple, all switchOp cases exit that way.
2961          */
2962         if (constPropagated) {
2963             /*
2964              * Skip switchOp, as we are not setting jump offsets in the two
2965              * for loops below.  We'll restore CG_NEXT(cg) from savepc after,
2966              * unless there was an error.
2967              */
2968             savepc = CG_NEXT(cg);
2969             CG_NEXT(cg) = pc + 1;
2970             if (switchOp == JSOP_TABLESWITCH) {
2971                 for (i = 0; i < (jsint)tableLength; i++) {
2972                     pn3 = table[i];
2973                     if (pn3 &&
2974                         (pn4 = pn3->pn_left) != NULL &&
2975                         pn4->pn_type == TOK_NAME) {
2976                         /* Note a propagated constant with the const's name. */
2977                         JS_ASSERT(!pn4->pn_expr);
2978                         ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
2979                         if (!ale)
2980                             goto bad;
2981                         CG_NEXT(cg) = pc;
2982                         if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
2983                                            ALE_INDEX(ale)) < 0) {
2984                             goto bad;
2985                         }
2986                     }
2987                     pc += JUMP_OFFSET_LEN;
2988                 }
2989             } else {
2990                 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2991                     pn4 = pn3->pn_left;
2992                     if (pn4 && pn4->pn_type == TOK_NAME) {
2993                         /* Note a propagated constant with the const's name. */
2994                         JS_ASSERT(!pn4->pn_expr);
2995                         ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
2996                         if (!ale)
2997                             goto bad;
2998                         CG_NEXT(cg) = pc;
2999                         if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3000                                            ALE_INDEX(ale)) < 0) {
3001                             goto bad;
3002                         }
3003                     }
3004                     pc += ATOM_INDEX_LEN + JUMP_OFFSET_LEN;
3005                 }
3006             }
3007             CG_NEXT(cg) = savepc;
3008         }
3009     }
3010 
3011     /* Emit code for each case's statements, copying pn_offset up to pn3. */
3012     for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3013         if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3014             CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, pn3->pn_offset);
3015         pn4 = pn3->pn_right;
3016         ok = js_EmitTree(cx, cg, pn4);
3017         if (!ok)
3018             goto out;
3019         pn3->pn_offset = pn4->pn_offset;
3020         if (pn3->pn_type == TOK_DEFAULT)
3021             off = pn3->pn_offset - top;
3022     }
3023 
3024     if (!hasDefault) {
3025         /* If no default case, offset for default is to end of switch. */
3026         off = CG_OFFSET(cg) - top;
3027     }
3028 
3029     /* We better have set "off" by now. */
3030     JS_ASSERT(off != -1);
3031 
3032     /* Set the default offset (to end of switch if no default). */
3033     if (switchOp == JSOP_CONDSWITCH) {
3034         pc = NULL;
3035         JS_ASSERT(defaultOffset != -1);
3036         ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3037                               off - (defaultOffset - top));
3038         if (!ok)
3039             goto out;
3040     } else {
3041         pc = CG_CODE(cg, top);
3042         ok = js_SetJumpOffset(cx, cg, pc, off);
3043         if (!ok)
3044             goto out;
3045         pc += JUMP_OFFSET_LEN;
3046     }
3047 
3048     /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3049     off = CG_OFFSET(cg) - top;
3050     ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3051     if (!ok)
3052         goto out;
3053 
3054     if (switchOp == JSOP_TABLESWITCH) {
3055         /* Skip over the already-initialized switch bounds. */
3056         pc += 2 * JUMP_OFFSET_LEN;
3057 
3058         /* Fill in the jump table, if there is one. */
3059         for (i = 0; i < (jsint)tableLength; i++) {
3060             pn3 = table[i];
3061             off = pn3 ? pn3->pn_offset - top : 0;
3062             ok = js_SetJumpOffset(cx, cg, pc, off);
3063             if (!ok)
3064                 goto out;
3065             pc += JUMP_OFFSET_LEN;
3066         }
3067     } else if (switchOp == JSOP_LOOKUPSWITCH) {
3068         /* Skip over the already-initialized number of cases. */
3069         pc += ATOM_INDEX_LEN;
3070 
3071         for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3072             if (pn3->pn_type == TOK_DEFAULT)
3073                 continue;
3074             atom = js_AtomizeValue(cx, pn3->pn_val, 0);
3075             if (!atom)
3076                 goto bad;
3077             ale = js_IndexAtom(cx, atom, &cg->atomList);
3078             if (!ale)
3079                 goto bad;
3080             SET_ATOM_INDEX(pc, ALE_INDEX(ale));
3081             pc += ATOM_INDEX_LEN;
3082 
3083             off = pn3->pn_offset - top;
3084             ok = js_SetJumpOffset(cx, cg, pc, off);
3085             if (!ok)
3086                 goto out;
3087             pc += JUMP_OFFSET_LEN;
3088         }
3089     }
3090 
3091 out:
3092     if (table)
3093         JS_free(cx, table);
3094     if (ok) {
3095         ok = js_PopStatementCG(cx, cg);
3096 
3097 #if JS_HAS_BLOCK_SCOPE
3098         if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE) {
3099             EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3100             cg->stackDepth -= count;
3101         }
3102 #endif
3103     }
3104     return ok;
3105 
3106 bad:
3107     ok = JS_FALSE;
3108     goto out;
3109 }
3110 
3111 JSBool
js_EmitFunctionBytecode(JSContext * cx,JSCodeGenerator * cg,JSParseNode * body)3112 js_EmitFunctionBytecode(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3113 {
3114     if (!js_AllocTryNotes(cx, cg))
3115         return JS_FALSE;
3116 
3117     if (cg->treeContext.flags & TCF_FUN_IS_GENERATOR) {
3118         /* JSOP_GENERATOR must be the first instruction. */
3119         CG_SWITCH_TO_PROLOG(cg);
3120         JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3121         if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3122             return JS_FALSE;
3123         CG_SWITCH_TO_MAIN(cg);
3124     }
3125 
3126     return js_EmitTree(cx, cg, body) &&
3127            js_Emit1(cx, cg, JSOP_STOP) >= 0;
3128 }
3129 
3130 JSBool
js_EmitFunctionBody(JSContext * cx,JSCodeGenerator * cg,JSParseNode * body,JSFunction * fun)3131 js_EmitFunctionBody(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body,
3132                     JSFunction *fun)
3133 {
3134     JSStackFrame *fp, frame;
3135     JSObject *funobj;
3136     JSBool ok;
3137 
3138     fp = cx->fp;
3139     funobj = fun->object;
3140     JS_ASSERT(!fp || (fp->fun != fun && fp->varobj != funobj &&
3141                       fp->scopeChain != funobj));
3142     memset(&frame, 0, sizeof frame);
3143     frame.fun = fun;
3144     frame.varobj = frame.scopeChain = funobj;
3145     frame.down = fp;
3146     frame.flags = JS_HAS_COMPILE_N_GO_OPTION(cx)
3147                   ? JSFRAME_COMPILING | JSFRAME_COMPILE_N_GO
3148                   : JSFRAME_COMPILING;
3149     cx->fp = &frame;
3150     ok = js_EmitFunctionBytecode(cx, cg, body);
3151     cx->fp = fp;
3152     if (!ok)
3153         return JS_FALSE;
3154 
3155     if (!js_NewScriptFromCG(cx, cg, fun))
3156         return JS_FALSE;
3157 
3158     JS_ASSERT(FUN_INTERPRETED(fun));
3159     return JS_TRUE;
3160 }
3161 
3162 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3163 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, pn)                                  \
3164     JS_BEGIN_MACRO                                                            \
3165         uintN line_ = (pn)->pn_pos.begin.lineno;                              \
3166         uintN delta_ = line_ - CG_CURRENT_LINE(cg);                           \
3167         if (delta_ != 0) {                                                    \
3168             /*                                                                \
3169              * Encode any change in the current source line number by using   \
3170              * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3171              * whichever consumes less space.                                 \
3172              *                                                                \
3173              * NB: We handle backward line number deltas (possible with for   \
3174              * loops where the update part is emitted after the body, but its \
3175              * line number is <= any line number in the body) here by letting \
3176              * unsigned delta_ wrap to a very large number, which triggers a  \
3177              * SRC_SETLINE.                                                   \
3178              */                                                               \
3179             CG_CURRENT_LINE(cg) = line_;                                      \
3180             if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3181                 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3182                     return JS_FALSE;                                          \
3183             } else {                                                          \
3184                 do {                                                          \
3185                     if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0)               \
3186                         return JS_FALSE;                                      \
3187                 } while (--delta_ != 0);                                      \
3188             }                                                                 \
3189         }                                                                     \
3190     JS_END_MACRO
3191 
3192 /* A function, so that we avoid macro-bloating all the other callsites. */
3193 static JSBool
UpdateLineNumberNotes(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn)3194 UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3195 {
3196     UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
3197     return JS_TRUE;
3198 }
3199 
3200 static JSBool
MaybeEmitVarDecl(JSContext * cx,JSCodeGenerator * cg,JSOp prologOp,JSParseNode * pn,jsatomid * result)3201 MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3202                  JSParseNode *pn, jsatomid *result)
3203 {
3204     jsatomid atomIndex;
3205     JSAtomListElement *ale;
3206 
3207     if (pn->pn_slot >= 0) {
3208         atomIndex = (jsatomid) pn->pn_slot;
3209     } else {
3210         ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
3211         if (!ale)
3212             return JS_FALSE;
3213         atomIndex = ALE_INDEX(ale);
3214     }
3215 
3216     if ((js_CodeSpec[pn->pn_op].format & JOF_TYPEMASK) == JOF_CONST &&
3217         (!(cg->treeContext.flags & TCF_IN_FUNCTION) ||
3218          (cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT))) {
3219         /* Emit a prolog bytecode to predefine the variable. */
3220         CG_SWITCH_TO_PROLOG(cg);
3221         if (!UpdateLineNumberNotes(cx, cg, pn))
3222             return JS_FALSE;
3223         EMIT_ATOM_INDEX_OP(prologOp, atomIndex);
3224         CG_SWITCH_TO_MAIN(cg);
3225     }
3226 
3227     if (result)
3228         *result = atomIndex;
3229     return JS_TRUE;
3230 }
3231 
3232 #if JS_HAS_DESTRUCTURING
3233 
3234 typedef JSBool
3235 (*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3236                             JSParseNode *pn);
3237 
3238 static JSBool
EmitDestructuringDecl(JSContext * cx,JSCodeGenerator * cg,JSOp prologOp,JSParseNode * pn)3239 EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3240                       JSParseNode *pn)
3241 {
3242     JS_ASSERT(pn->pn_type == TOK_NAME);
3243     if (!BindNameToSlot(cx, &cg->treeContext, pn, prologOp == JSOP_NOP))
3244         return JS_FALSE;
3245 
3246     JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS);
3247     return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3248 }
3249 
3250 static JSBool
EmitDestructuringDecls(JSContext * cx,JSCodeGenerator * cg,JSOp prologOp,JSParseNode * pn)3251 EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3252                        JSParseNode *pn)
3253 {
3254     JSParseNode *pn2, *pn3;
3255     DestructuringDeclEmitter emitter;
3256 
3257     if (pn->pn_type == TOK_RB) {
3258         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3259             if (pn2->pn_type == TOK_COMMA)
3260                 continue;
3261             emitter = (pn2->pn_type == TOK_NAME)
3262                       ? EmitDestructuringDecl
3263                       : EmitDestructuringDecls;
3264             if (!emitter(cx, cg, prologOp, pn2))
3265                 return JS_FALSE;
3266         }
3267     } else {
3268         JS_ASSERT(pn->pn_type == TOK_RC);
3269         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3270             pn3 = pn2->pn_right;
3271             emitter = (pn3->pn_type == TOK_NAME)
3272                       ? EmitDestructuringDecl
3273                       : EmitDestructuringDecls;
3274             if (!emitter(cx, cg, prologOp, pn3))
3275                 return JS_FALSE;
3276         }
3277     }
3278     return JS_TRUE;
3279 }
3280 
3281 static JSBool
3282 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3283 
3284 static JSBool
EmitDestructuringLHS(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn,JSBool wantpop)3285 EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3286                      JSBool wantpop)
3287 {
3288     jsuint slot;
3289 
3290     /* Skip any parenthesization. */
3291     while (pn->pn_type == TOK_RP)
3292         pn = pn->pn_kid;
3293 
3294     /*
3295      * Now emit the lvalue opcode sequence.  If the lvalue is a nested
3296      * destructuring initialiser-form, call ourselves to handle it, then
3297      * pop the matched value.  Otherwise emit an lvalue bytecode sequence
3298      * ending with a JSOP_ENUMELEM or equivalent op.
3299      */
3300     if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3301         if (!EmitDestructuringOpsHelper(cx, cg, pn))
3302             return JS_FALSE;
3303         if (wantpop && js_Emit1(cx, cg, JSOP_POP) < 0)
3304             return JS_FALSE;
3305     } else {
3306         if (pn->pn_type == TOK_NAME &&
3307             !BindNameToSlot(cx, &cg->treeContext, pn, JS_FALSE)) {
3308             return JS_FALSE;
3309         }
3310 
3311         switch (pn->pn_op) {
3312           case JSOP_SETNAME:
3313             /*
3314              * NB: pn is a PN_NAME node, not a PN_BINARY.  Nevertheless,
3315              * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3316              * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3317              */
3318             if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3319                 return JS_FALSE;
3320             break;
3321 
3322           case JSOP_SETCONST:
3323             if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3324                 return JS_FALSE;
3325             break;
3326 
3327           case JSOP_SETLOCAL:
3328             if (wantpop) {
3329                 slot = (jsuint) pn->pn_slot;
3330                 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3331                 break;
3332             }
3333             /* FALL THROUGH */
3334 
3335           case JSOP_SETARG:
3336           case JSOP_SETVAR:
3337           case JSOP_SETGVAR:
3338             slot = (jsuint) pn->pn_slot;
3339             EMIT_UINT16_IMM_OP(pn->pn_op, slot);
3340             if (wantpop && js_Emit1(cx, cg, JSOP_POP) < 0)
3341                 return JS_FALSE;
3342             break;
3343 
3344           default:
3345 #if JS_HAS_LVALUE_RETURN || JS_HAS_XML_SUPPORT
3346           {
3347             ptrdiff_t top;
3348 
3349             top = CG_OFFSET(cg);
3350             if (!js_EmitTree(cx, cg, pn))
3351                 return JS_FALSE;
3352             if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3353                 return JS_FALSE;
3354             if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3355                 return JS_FALSE;
3356             break;
3357           }
3358 #endif
3359           case JSOP_ENUMELEM:
3360             JS_ASSERT(0);
3361         }
3362     }
3363 
3364     return JS_TRUE;
3365 }
3366 
3367 /*
3368  * Recursive helper for EmitDestructuringOps.
3369  *
3370  * Given a value to destructure on the stack, walk over an object or array
3371  * initialiser at pn, emitting bytecodes to match property values and store
3372  * them in the lvalues identified by the matched property names.
3373  */
3374 static JSBool
EmitDestructuringOpsHelper(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn)3375 EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3376 {
3377     jsuint index;
3378     JSParseNode *pn2, *pn3;
3379     JSBool doElemOp;
3380 
3381 #ifdef DEBUG
3382     intN stackDepth = cg->stackDepth;
3383     JS_ASSERT(stackDepth != 0);
3384     JS_ASSERT(pn->pn_arity == PN_LIST);
3385     JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3386 #endif
3387 
3388     if (pn->pn_count == 0) {
3389         /* Emit a DUP;POP sequence for the decompiler. */
3390         return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3391                js_Emit1(cx, cg, JSOP_POP) >= 0;
3392     }
3393 
3394     index = 0;
3395     for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3396         /*
3397          * Duplicate the value being destructured to use as a reference base.
3398          */
3399         if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3400             return JS_FALSE;
3401 
3402         /*
3403          * Now push the property name currently being matched, which is either
3404          * the array initialiser's current index, or the current property name
3405          * "label" on the left of a colon in the object initialiser.  Set pn3
3406          * to the lvalue node, which is in the value-initializing position.
3407          */
3408         doElemOp = JS_TRUE;
3409         if (pn->pn_type == TOK_RB) {
3410             if (!EmitNumberOp(cx, index, cg))
3411                 return JS_FALSE;
3412             pn3 = pn2;
3413         } else {
3414             JS_ASSERT(pn->pn_type == TOK_RC);
3415             JS_ASSERT(pn2->pn_type == TOK_COLON);
3416             pn3 = pn2->pn_left;
3417             if (pn3->pn_type == TOK_NUMBER) {
3418                 /*
3419                  * If we are emitting an object destructuring initialiser,
3420                  * annotate the index op with SRC_INITPROP so we know we are
3421                  * not decompiling an array initialiser.
3422                  */
3423                 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3424                     return JS_FALSE;
3425                 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3426                     return JS_FALSE;
3427             } else {
3428                 JS_ASSERT(pn3->pn_type == TOK_STRING ||
3429                           pn3->pn_type == TOK_NAME);
3430                 if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3431                     return JS_FALSE;
3432                 doElemOp = JS_FALSE;
3433             }
3434             pn3 = pn2->pn_right;
3435         }
3436 
3437         if (doElemOp) {
3438             /*
3439              * Ok, get the value of the matching property name.  This leaves
3440              * that value on top of the value being destructured, so the stack
3441              * is one deeper than when we started.
3442              */
3443             if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3444                 return JS_FALSE;
3445             JS_ASSERT(cg->stackDepth == stackDepth + 1);
3446         }
3447 
3448         /* Nullary comma node makes a hole in the array destructurer. */
3449         if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3450             JS_ASSERT(pn->pn_type == TOK_RB);
3451             JS_ASSERT(pn2 == pn3);
3452             if (js_Emit1(cx, cg, JSOP_POP) < 0)
3453                 return JS_FALSE;
3454         } else {
3455             if (!EmitDestructuringLHS(cx, cg, pn3, JS_TRUE))
3456                 return JS_FALSE;
3457         }
3458 
3459         JS_ASSERT(cg->stackDepth == stackDepth);
3460         ++index;
3461     }
3462 
3463     return JS_TRUE;
3464 }
3465 
3466 static ptrdiff_t
OpToDeclType(JSOp op)3467 OpToDeclType(JSOp op)
3468 {
3469     switch (op) {
3470       case JSOP_NOP:
3471         return SRC_DECL_LET;
3472       case JSOP_DEFCONST:
3473         return SRC_DECL_CONST;
3474       case JSOP_DEFVAR:
3475         return SRC_DECL_VAR;
3476       default:
3477         return SRC_DECL_NONE;
3478     }
3479 }
3480 
3481 static JSBool
EmitDestructuringOps(JSContext * cx,JSCodeGenerator * cg,JSOp declOp,JSParseNode * pn)3482 EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp declOp,
3483                      JSParseNode *pn)
3484 {
3485     /*
3486      * If we're called from a variable declaration, help the decompiler by
3487      * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3488      * If the destructuring initialiser is empty, our helper will emit a
3489      * JSOP_DUP followed by a JSOP_POP for the decompiler.
3490      */
3491     if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(declOp)) < 0)
3492         return JS_FALSE;
3493 
3494     /*
3495      * Call our recursive helper to emit the destructuring assignments and
3496      * related stack manipulations.
3497      */
3498     return EmitDestructuringOpsHelper(cx, cg, pn);
3499 }
3500 
3501 static JSBool
EmitGroupAssignment(JSContext * cx,JSCodeGenerator * cg,JSOp declOp,JSParseNode * lhs,JSParseNode * rhs)3502 EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp declOp,
3503                     JSParseNode *lhs, JSParseNode *rhs)
3504 {
3505     jsuint depth, limit, slot;
3506     JSParseNode *pn;
3507 
3508     depth = limit = (uintN) cg->stackDepth;
3509     for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3510         if (limit == JS_BIT(16)) {
3511             js_ReportCompileErrorNumber(cx, rhs,
3512                                         JSREPORT_PN | JSREPORT_ERROR,
3513                                         JSMSG_ARRAY_INIT_TOO_BIG);
3514             return JS_FALSE;
3515         }
3516 
3517         if (pn->pn_type == TOK_COMMA) {
3518             if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
3519                 return JS_FALSE;
3520         } else {
3521             JS_ASSERT(pn->pn_type != TOK_DEFSHARP);
3522             if (!js_EmitTree(cx, cg, pn))
3523                 return JS_FALSE;
3524         }
3525         ++limit;
3526     }
3527 
3528     if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(declOp)) < 0)
3529         return JS_FALSE;
3530 
3531     slot = depth;
3532     for (pn = lhs->pn_head; pn; pn = pn->pn_next) {
3533         if (slot < limit) {
3534             EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3535         } else {
3536             if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
3537                 return JS_FALSE;
3538         }
3539         if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3540             if (js_Emit1(cx, cg, JSOP_POP) < 0)
3541                 return JS_FALSE;
3542         } else {
3543             if (!EmitDestructuringLHS(cx, cg, pn, pn->pn_next != NULL))
3544                 return JS_FALSE;
3545         }
3546         ++slot;
3547     }
3548 
3549     EMIT_UINT16_IMM_OP(JSOP_SETSP, (jsatomid)depth);
3550     cg->stackDepth = (uintN) depth;
3551     return JS_TRUE;
3552 }
3553 
3554 /*
3555  * Helper called with pop out param initialized to a JSOP_POP* opcode.  If we
3556  * can emit a group assignment sequence, which results in 0 stack depth delta,
3557  * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3558  */
3559 static JSBool
MaybeEmitGroupAssignment(JSContext * cx,JSCodeGenerator * cg,JSOp declOp,JSParseNode * pn,JSOp * pop)3560 MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp declOp,
3561                          JSParseNode *pn, JSOp *pop)
3562 {
3563     JSParseNode *lhs, *rhs;
3564 
3565     JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3566     JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3567     lhs = pn->pn_left;
3568     rhs = pn->pn_right;
3569     if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3570         lhs->pn_count <= rhs->pn_count &&
3571         (rhs->pn_count == 0 ||
3572          rhs->pn_head->pn_type != TOK_DEFSHARP)) {
3573         if (!EmitGroupAssignment(cx, cg, declOp, lhs, rhs))
3574             return JS_FALSE;
3575         *pop = JSOP_NOP;
3576     }
3577     return JS_TRUE;
3578 }
3579 
3580 #endif /* JS_HAS_DESTRUCTURING */
3581 
3582 static JSBool
EmitVariables(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn,JSBool inLetHead,ptrdiff_t * headNoteIndex)3583 EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3584               JSBool inLetHead, ptrdiff_t *headNoteIndex)
3585 {
3586     JSTreeContext *tc;
3587     JSBool let, forInVar;
3588 #if JS_HAS_BLOCK_SCOPE
3589     JSBool forInLet, popScope;
3590     JSStmtInfo *stmt, *scopeStmt;
3591 #endif
3592     ptrdiff_t off, noteIndex, tmp;
3593     JSParseNode *pn2, *pn3;
3594     JSOp op;
3595     jsatomid atomIndex;
3596     uintN oldflags;
3597 
3598     /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3599     *headNoteIndex = -1;
3600 
3601     /*
3602      * Let blocks and expressions have a parenthesized head in which the new
3603      * scope is not yet open. Initializer evaluation uses the parent node's
3604      * lexical scope. If popScope is true below, then we hide the top lexical
3605      * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3606      * it won't find any names in the new let block.
3607      *
3608      * The same goes for let declarations in the head of any kind of for loop.
3609      * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3610      * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3611      * in the containing scope, and puts x in the loop body's scope.
3612      */
3613     tc = &cg->treeContext;
3614     let = (pn->pn_op == JSOP_NOP);
3615     forInVar = (pn->pn_extra & PNX_FORINVAR) != 0;
3616 #if JS_HAS_BLOCK_SCOPE
3617     forInLet = let && forInVar;
3618     popScope = (inLetHead || (let && (tc->flags & TCF_IN_FOR_INIT)));
3619     JS_ASSERT(!popScope || let);
3620 #endif
3621 
3622     off = noteIndex = -1;
3623     for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
3624 #if JS_HAS_DESTRUCTURING
3625         if (pn2->pn_type != TOK_NAME) {
3626             if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
3627                 /*
3628                  * Emit variable binding ops, but not destructuring ops.
3629                  * The parser (see Variables, jsparse.c) has ensured that
3630                  * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
3631                  * and that case will emit the destructuring code only after
3632                  * emitting an enumerating opcode and a branch that tests
3633                  * whether the enumeration ended.
3634                  */
3635                 JS_ASSERT(forInVar);
3636                 JS_ASSERT(pn->pn_count == 1);
3637                 if (!EmitDestructuringDecls(cx, cg, pn->pn_op, pn2))
3638                     return JS_FALSE;
3639                 break;
3640             }
3641 
3642             /*
3643              * A destructuring initialiser assignment preceded by var is
3644              * always evaluated promptly, even if it is to the left of 'in'
3645              * in a for-in loop.  As with 'for (var x = i in o)...', this
3646              * will cause the entire 'var [a, b] = i' to be hoisted out of
3647              * the head of the loop.
3648              */
3649             JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
3650             if (pn->pn_count == 1 && !forInLet) {
3651                 /*
3652                  * If this is the only destructuring assignment in the list,
3653                  * try to optimize to a group assignment.  If we're in a let
3654                  * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
3655                  * in pn->pn_op, to suppress a second (and misplaced) 'let'.
3656                  */
3657                 JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
3658                 op = JSOP_POP;
3659                 if (!MaybeEmitGroupAssignment(cx, cg,
3660                                               inLetHead ? JSOP_POP : pn->pn_op,
3661                                               pn2, &op)) {
3662                     return JS_FALSE;
3663                 }
3664                 if (op == JSOP_NOP) {
3665                     pn->pn_extra = (pn->pn_extra & ~PNX_POPVAR) | PNX_GROUPINIT;
3666                     break;
3667                 }
3668             }
3669 
3670             pn3 = pn2->pn_left;
3671             if (!EmitDestructuringDecls(cx, cg, pn->pn_op, pn3))
3672                 return JS_FALSE;
3673 
3674 #if JS_HAS_BLOCK_SCOPE
3675             /*
3676              * If this is a 'for (let [x, y] = i in o) ...' let declaration,
3677              * throw away i if it is a useless expression.
3678              */
3679             if (forInLet) {
3680                 JSBool useful = JS_FALSE;
3681 
3682                 JS_ASSERT(pn->pn_count == 1);
3683                 if (!CheckSideEffects(cx, tc, pn2->pn_right, &useful))
3684                     return JS_FALSE;
3685                 if (!useful)
3686                     return JS_TRUE;
3687             }
3688 #endif
3689 
3690             if (!js_EmitTree(cx, cg, pn2->pn_right))
3691                 return JS_FALSE;
3692 
3693 #if JS_HAS_BLOCK_SCOPE
3694             /*
3695              * The expression i in 'for (let [x, y] = i in o) ...', which is
3696              * pn2->pn_right above, appears to have side effects.  We've just
3697              * emitted code to evaluate i, but we must not destructure i yet.
3698              * Let the TOK_FOR: code in js_EmitTree do the destructuring to
3699              * emit the right combination of source notes and bytecode for the
3700              * decompiler.
3701              *
3702              * This has the effect of hoisting the evaluation of i out of the
3703              * for-in loop, without hoisting the let variables, which must of
3704              * course be scoped by the loop.  Set PNX_POPVAR to cause JSOP_POP
3705              * to be emitted, just before returning from this function.
3706              */
3707             if (forInVar) {
3708                 pn->pn_extra |= PNX_POPVAR;
3709                 if (forInLet)
3710                     break;
3711             }
3712 #endif
3713 
3714             /*
3715              * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
3716              * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
3717              * we will emit at the bottom of this function.
3718              */
3719             if (!EmitDestructuringOps(cx, cg,
3720                                       inLetHead ? JSOP_POP : pn->pn_op,
3721                                       pn3)) {
3722                 return JS_FALSE;
3723             }
3724             goto emit_note_pop;
3725         }
3726 #else
3727         JS_ASSERT(pn2->pn_type == TOK_NAME);
3728 #endif
3729 
3730         if (!BindNameToSlot(cx, &cg->treeContext, pn2, let))
3731             return JS_FALSE;
3732         JS_ASSERT(pn2->pn_slot >= 0 || !let);
3733 
3734         op = pn2->pn_op;
3735         if (op == JSOP_ARGUMENTS) {
3736             /* JSOP_ARGUMENTS => no initializer */
3737             JS_ASSERT(!pn2->pn_expr && !let);
3738             pn3 = NULL;
3739 #ifdef __GNUC__
3740             atomIndex = 0;            /* quell GCC overwarning */
3741 #endif
3742         } else {
3743             if (!MaybeEmitVarDecl(cx, cg, pn->pn_op, pn2, &atomIndex))
3744                 return JS_FALSE;
3745 
3746             pn3 = pn2->pn_expr;
3747             if (pn3) {
3748 #if JS_HAS_BLOCK_SCOPE
3749                 /*
3750                  * If this is a 'for (let x = i in o) ...' let declaration,
3751                  * throw away i if it is a useless expression.
3752                  */
3753                 if (forInLet) {
3754                     JSBool useful = JS_FALSE;
3755 
3756                     JS_ASSERT(pn->pn_count == 1);
3757                     if (!CheckSideEffects(cx, tc, pn3, &useful))
3758                         return JS_FALSE;
3759                     if (!useful)
3760                         return JS_TRUE;
3761                 }
3762 #endif
3763 
3764                 if (op == JSOP_SETNAME) {
3765                     JS_ASSERT(!let);
3766                     EMIT_ATOM_INDEX_OP(JSOP_BINDNAME, atomIndex);
3767                 }
3768                 if (pn->pn_op == JSOP_DEFCONST &&
3769                     !js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom,
3770                                                   pn3)) {
3771                     return JS_FALSE;
3772                 }
3773 
3774 #if JS_HAS_BLOCK_SCOPE
3775                 /* Evaluate expr in the outer lexical scope if requested. */
3776                 if (popScope) {
3777                     stmt = tc->topStmt;
3778                     scopeStmt = tc->topScopeStmt;
3779 
3780                     tc->topStmt = stmt->down;
3781                     tc->topScopeStmt = scopeStmt->downScope;
3782                 }
3783 #ifdef __GNUC__
3784                 else {
3785                     stmt = scopeStmt = NULL;    /* quell GCC overwarning */
3786                 }
3787 #endif
3788 #endif
3789 
3790                 oldflags = cg->treeContext.flags;
3791                 cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
3792                 if (!js_EmitTree(cx, cg, pn3))
3793                     return JS_FALSE;
3794                 cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
3795 
3796 #if JS_HAS_BLOCK_SCOPE
3797                 if (popScope) {
3798                     tc->topStmt = stmt;
3799                     tc->topScopeStmt = scopeStmt;
3800                 }
3801 #endif
3802             }
3803         }
3804 
3805         /*
3806          * 'for (var x in o) ...' and 'for (var x = i in o) ...' call the
3807          * TOK_VAR case, but only the initialized case (a strange one that
3808          * falls out of ECMA-262's grammar) wants to run past this point.
3809          * Both cases must conditionally emit a JSOP_DEFVAR, above.  Note
3810          * that the parser error-checks to ensure that pn->pn_count is 1.
3811          *
3812          * 'for (let x = i in o) ...' must evaluate i before the loop, and
3813          * subject it to useless expression elimination.  The variable list
3814          * in pn is a single let declaration if pn_op == JSOP_NOP.  We test
3815          * the let local in order to break early in this case, as well as in
3816          * the 'for (var x in o)' case.
3817          *
3818          * XXX Narcissus keeps track of variable declarations in the node
3819          * for the script being compiled, so there's no need to share any
3820          * conditional prolog code generation there.  We could do likewise,
3821          * but it's a big change, requiring extra allocation, so probably
3822          * not worth the trouble for SpiderMonkey.
3823          */
3824         JS_ASSERT(pn3 == pn2->pn_expr);
3825         if (forInVar && (!pn3 || let)) {
3826             JS_ASSERT(pn->pn_count == 1);
3827             break;
3828         }
3829 
3830         if (pn2 == pn->pn_head &&
3831             !inLetHead &&
3832             js_NewSrcNote2(cx, cg, SRC_DECL,
3833                            (pn->pn_op == JSOP_DEFCONST)
3834                            ? SRC_DECL_CONST
3835                            : (pn->pn_op == JSOP_DEFVAR)
3836                            ? SRC_DECL_VAR
3837                            : SRC_DECL_LET) < 0) {
3838             return JS_FALSE;
3839         }
3840         if (op == JSOP_ARGUMENTS) {
3841             if (js_Emit1(cx, cg, op) < 0)
3842                 return JS_FALSE;
3843         } else if (pn2->pn_slot >= 0) {
3844             EMIT_UINT16_IMM_OP(op, atomIndex);
3845         } else {
3846             EMIT_ATOM_INDEX_OP(op, atomIndex);
3847         }
3848 
3849 #if JS_HAS_DESTRUCTURING
3850     emit_note_pop:
3851 #endif
3852         tmp = CG_OFFSET(cg);
3853         if (noteIndex >= 0) {
3854             if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
3855                 return JS_FALSE;
3856         }
3857         if (!pn2->pn_next)
3858             break;
3859         off = tmp;
3860         noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3861         if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
3862             return JS_FALSE;
3863     }
3864 
3865     /* If this is a let head, emit and return a srcnote on the pop. */
3866     if (inLetHead) {
3867         *headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
3868         if (*headNoteIndex < 0)
3869             return JS_FALSE;
3870         if (!(pn->pn_extra & PNX_POPVAR))
3871             return js_Emit1(cx, cg, JSOP_NOP) >= 0;
3872     }
3873 
3874     return !(pn->pn_extra & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
3875 }
3876 
3877 #if defined DEBUG_brendan || defined DEBUG_mrbkap
3878 static JSBool
GettableNoteForNextOp(JSCodeGenerator * cg)3879 GettableNoteForNextOp(JSCodeGenerator *cg)
3880 {
3881     ptrdiff_t offset, target;
3882     jssrcnote *sn, *end;
3883 
3884     offset = 0;
3885     target = CG_OFFSET(cg);
3886     for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
3887          sn = SN_NEXT(sn)) {
3888         if (offset == target && SN_IS_GETTABLE(sn))
3889             return JS_TRUE;
3890         offset += SN_DELTA(sn);
3891     }
3892     return JS_FALSE;
3893 }
3894 #endif
3895 
3896 JSBool
js_EmitTree(JSContext * cx,JSCodeGenerator * cg,JSParseNode * pn)3897 js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3898 {
3899     JSBool ok, useful, wantval;
3900     JSStmtInfo *stmt, stmtInfo;
3901     ptrdiff_t top, off, tmp, beq, jmp;
3902     JSParseNode *pn2, *pn3;
3903     JSAtom *atom;
3904     JSAtomListElement *ale;
3905     jsatomid atomIndex;
3906     ptrdiff_t noteIndex;
3907     JSSrcNoteType noteType;
3908     jsbytecode *pc;
3909     JSOp op;
3910     JSTokenType type;
3911     uint32 argc;
3912     int stackDummy;
3913 
3914     if (!JS_CHECK_STACK_SIZE(cx, stackDummy)) {
3915         JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED);
3916         return JS_FALSE;
3917     }
3918 
3919     ok = JS_TRUE;
3920     cg->emitLevel++;
3921     pn->pn_offset = top = CG_OFFSET(cg);
3922 
3923     /* Emit notes to tell the current bytecode's source line number. */
3924     UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
3925 
3926     switch (pn->pn_type) {
3927       case TOK_FUNCTION:
3928       {
3929         void *cg2mark;
3930         JSCodeGenerator *cg2;
3931         JSFunction *fun;
3932 
3933 #if JS_HAS_XML_SUPPORT
3934         if (pn->pn_arity == PN_NULLARY) {
3935             if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
3936                 return JS_FALSE;
3937             break;
3938         }
3939 #endif
3940 
3941         /* Generate code for the function's body. */
3942         cg2mark = JS_ARENA_MARK(&cx->tempPool);
3943         JS_ARENA_ALLOCATE_TYPE(cg2, JSCodeGenerator, &cx->tempPool);
3944         if (!cg2) {
3945             JS_ReportOutOfMemory(cx);
3946             return JS_FALSE;
3947         }
3948         if (!js_InitCodeGenerator(cx, cg2, cg->codePool, cg->notePool,
3949                                   cg->filename, pn->pn_pos.begin.lineno,
3950                                   cg->principals)) {
3951             return JS_FALSE;
3952         }
3953         cg2->treeContext.flags = (uint16) (pn->pn_flags | TCF_IN_FUNCTION);
3954         cg2->treeContext.tryCount = pn->pn_tryCount;
3955         cg2->parent = cg;
3956         fun = (JSFunction *) JS_GetPrivate(cx, ATOM_TO_OBJECT(pn->pn_funAtom));
3957         if (!js_EmitFunctionBody(cx, cg2, pn->pn_body, fun))
3958             return JS_FALSE;
3959 
3960         /*
3961          * We need an activation object if an inner peeks out, or if such
3962          * inner-peeking caused one of our inners to become heavyweight.
3963          */
3964         if (cg2->treeContext.flags &
3965             (TCF_FUN_USES_NONLOCALS | TCF_FUN_HEAVYWEIGHT)) {
3966             cg->treeContext.flags |= TCF_FUN_HEAVYWEIGHT;
3967         }
3968         js_FinishCodeGenerator(cx, cg2);
3969         JS_ARENA_RELEASE(&cx->tempPool, cg2mark);
3970 
3971         /* Make the function object a literal in the outer script's pool. */
3972         ale = js_IndexAtom(cx, pn->pn_funAtom, &cg->atomList);
3973         if (!ale)
3974             return JS_FALSE;
3975         atomIndex = ALE_INDEX(ale);
3976 
3977         /* Emit a bytecode pointing to the closure object in its immediate. */
3978         if (pn->pn_op != JSOP_NOP) {
3979             EMIT_ATOM_INDEX_OP(pn->pn_op, atomIndex);
3980             break;
3981         }
3982 
3983         /* Top-level named functions need a nop for decompilation. */
3984         noteIndex = js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)atomIndex);
3985         if (noteIndex < 0 ||
3986             js_Emit1(cx, cg, JSOP_NOP) < 0) {
3987             return JS_FALSE;
3988         }
3989 
3990         /*
3991          * Top-levels also need a prolog op to predefine their names in the
3992          * variable object, or if local, to fill their stack slots.
3993          */
3994         CG_SWITCH_TO_PROLOG(cg);
3995 
3996         if (cg->treeContext.flags & TCF_IN_FUNCTION) {
3997             JSObject *obj, *pobj;
3998             JSProperty *prop;
3999             JSScopeProperty *sprop;
4000             uintN slot;
4001 
4002             obj = OBJ_GET_PARENT(cx, fun->object);
4003             if (!js_LookupHiddenProperty(cx, obj, ATOM_TO_JSID(fun->atom),
4004                                          &pobj, &prop)) {
4005                 return JS_FALSE;
4006             }
4007 
4008             JS_ASSERT(prop && pobj == obj);
4009             sprop = (JSScopeProperty *) prop;
4010             JS_ASSERT(sprop->getter == js_GetLocalVariable);
4011             slot = sprop->shortid;
4012             OBJ_DROP_PROPERTY(cx, pobj, prop);
4013 
4014             /*
4015              * If this local function is declared in a body block induced by
4016              * let declarations, reparent fun->object to the compiler-created
4017              * body block object so that JSOP_DEFLOCALFUN can clone that block
4018              * into the runtime scope chain.
4019              */
4020             stmt = cg->treeContext.topStmt;
4021             if (stmt && stmt->type == STMT_BLOCK &&
4022                 stmt->down && stmt->down->type == STMT_BLOCK &&
4023                 (stmt->down->flags & SIF_SCOPE)) {
4024                 obj = ATOM_TO_OBJECT(stmt->down->atom);
4025                 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
4026                 OBJ_SET_PARENT(cx, fun->object, obj);
4027             }
4028 
4029             if (atomIndex >= JS_BIT(16)) {
4030                 /*
4031                  * Lots of literals in the outer function, so we have to emit
4032                  * [JSOP_LITOPX, atomIndex, JSOP_DEFLOCALFUN, var slot].
4033                  */
4034                 off = js_EmitN(cx, cg, JSOP_LITOPX, 3);
4035                 if (off < 0)
4036                     return JS_FALSE;
4037                 pc = CG_CODE(cg, off);
4038                 SET_LITERAL_INDEX(pc, atomIndex);
4039                 EMIT_UINT16_IMM_OP(JSOP_DEFLOCALFUN, slot);
4040             } else {
4041                 /* Emit [JSOP_DEFLOCALFUN, var slot, atomIndex]. */
4042                 off = js_EmitN(cx, cg, JSOP_DEFLOCALFUN,
4043                                VARNO_LEN + ATOM_INDEX_LEN);
4044                 if (off < 0)
4045                     return JS_FALSE;
4046                 pc = CG_CODE(cg, off);
4047                 SET_VARNO(pc, slot);
4048                 pc += VARNO_LEN;
4049                 SET_ATOM_INDEX(pc, atomIndex);
4050             }
4051         } else {
4052             JS_ASSERT(!cg->treeContext.topStmt);
4053             EMIT_ATOM_INDEX_OP(JSOP_DEFFUN, atomIndex);
4054         }
4055 
4056         CG_SWITCH_TO_MAIN(cg);
4057         break;
4058       }
4059 
4060 #if JS_HAS_EXPORT_IMPORT
4061       case TOK_EXPORT:
4062         pn2 = pn->pn_head;
4063         if (pn2->pn_type == TOK_STAR) {
4064             /*
4065              * 'export *' must have no other elements in the list (what would
4066              * be the point?).
4067              */
4068             if (js_Emit1(cx, cg, JSOP_EXPORTALL) < 0)
4069                 return JS_FALSE;
4070         } else {
4071             /*
4072              * If not 'export *', the list consists of NAME nodes identifying
4073              * properties of the variables object to flag as exported.
4074              */
4075             do {
4076                 ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
4077                 if (!ale)
4078                     return JS_FALSE;
4079                 EMIT_ATOM_INDEX_OP(JSOP_EXPORTNAME, ALE_INDEX(ale));
4080             } while ((pn2 = pn2->pn_next) != NULL);
4081         }
4082         break;
4083 
4084       case TOK_IMPORT:
4085         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
4086             /*
4087              * Each subtree on an import list is rooted by a DOT or LB node.
4088              * A DOT may have a null pn_atom member, in which case pn_op must
4089              * be JSOP_IMPORTALL -- see EmitPropOp above.
4090              */
4091             if (!js_EmitTree(cx, cg, pn2))
4092                 return JS_FALSE;
4093         }
4094         break;
4095 #endif /* JS_HAS_EXPORT_IMPORT */
4096 
4097       case TOK_IF:
4098         /* Initialize so we can detect else-if chains and avoid recursion. */
4099         stmtInfo.type = STMT_IF;
4100         beq = jmp = -1;
4101         noteIndex = -1;
4102 
4103       if_again:
4104         /* Emit code for the condition before pushing stmtInfo. */
4105         if (!js_EmitTree(cx, cg, pn->pn_kid1))
4106             return JS_FALSE;
4107         top = CG_OFFSET(cg);
4108         if (stmtInfo.type == STMT_IF) {
4109             js_PushStatement(&cg->treeContext, &stmtInfo, STMT_IF, top);
4110         } else {
4111             /*
4112              * We came here from the goto further below that detects else-if
4113              * chains, so we must mutate stmtInfo back into a STMT_IF record.
4114              * Also (see below for why) we need a note offset for SRC_IF_ELSE
4115              * to help the decompiler.  Actually, we need two offsets, one for
4116              * decompiling any else clause and the second for decompiling an
4117              * else-if chain without bracing, overindenting, or incorrectly
4118              * scoping let declarations.
4119              */
4120             JS_ASSERT(stmtInfo.type == STMT_ELSE);
4121             stmtInfo.type = STMT_IF;
4122             stmtInfo.update = top;
4123             if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4124                 return JS_FALSE;
4125             if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - jmp))
4126                 return JS_FALSE;
4127         }
4128 
4129         /* Emit an annotated branch-if-false around the then part. */
4130         pn3 = pn->pn_kid3;
4131         noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4132         if (noteIndex < 0)
4133             return JS_FALSE;
4134         beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4135         if (beq < 0)
4136             return JS_FALSE;
4137 
4138         /* Emit code for the then and optional else parts. */
4139         if (!js_EmitTree(cx, cg, pn->pn_kid2))
4140             return JS_FALSE;
4141         if (pn3) {
4142             /* Modify stmtInfo so we know we're in the else part. */
4143             stmtInfo.type = STMT_ELSE;
4144 
4145             /*
4146              * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4147              * around the else part.  The js_PopStatementCG call at the bottom
4148              * of this switch case will fix up the backpatch chain linked from
4149              * stmtInfo.breaks.
4150              */
4151             jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4152             if (jmp < 0)
4153                 return JS_FALSE;
4154 
4155             /* Ensure the branch-if-false comes here, then emit the else. */
4156             CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4157             if (pn3->pn_type == TOK_IF) {
4158                 pn = pn3;
4159                 goto if_again;
4160             }
4161 
4162             if (!js_EmitTree(cx, cg, pn3))
4163                 return JS_FALSE;
4164 
4165             /*
4166              * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4167              * the decompiler's benefit.  We can't just "back up" from the pc
4168              * of the else clause, because we don't know whether an extended
4169              * jump was required to leap from the end of the then clause over
4170              * the else clause.
4171              */
4172             if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4173                 return JS_FALSE;
4174         } else {
4175             /* No else part, fixup the branch-if-false to come here. */
4176             CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4177         }
4178         ok = js_PopStatementCG(cx, cg);
4179         break;
4180 
4181       case TOK_SWITCH:
4182         /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4183         ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4184         break;
4185 
4186       case TOK_WHILE:
4187         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_WHILE_LOOP, top);
4188         if (!js_EmitTree(cx, cg, pn->pn_left))
4189             return JS_FALSE;
4190         noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4191         if (noteIndex < 0)
4192             return JS_FALSE;
4193         beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4194         if (beq < 0)
4195             return JS_FALSE;
4196         if (!js_EmitTree(cx, cg, pn->pn_right))
4197             return JS_FALSE;
4198         jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4199         if (jmp < 0)
4200             return JS_FALSE;
4201         CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4202         if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4203             return JS_FALSE;
4204         ok = js_PopStatementCG(cx, cg);
4205         break;
4206 
4207       case TOK_DO:
4208         /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4209         if (js_NewSrcNote(cx, cg, SRC_WHILE) < 0 ||
4210             js_Emit1(cx, cg, JSOP_NOP) < 0) {
4211             return JS_FALSE;
4212         }
4213 
4214         /* Compile the loop body. */
4215         top = CG_OFFSET(cg);
4216         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_DO_LOOP, top);
4217         if (!js_EmitTree(cx, cg, pn->pn_left))
4218             return JS_FALSE;
4219 
4220         /* Set loop and enclosing label update offsets, for continue. */
4221         stmt = &stmtInfo;
4222         do {
4223             stmt->update = CG_OFFSET(cg);
4224         } while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4225 
4226         /* Compile the loop condition, now that continues know where to go. */
4227         if (!js_EmitTree(cx, cg, pn->pn_right))
4228             return JS_FALSE;
4229 
4230         /*
4231          * No source note needed, because JSOP_IFNE is used only for do-while.
4232          * If we ever use JSOP_IFNE for other purposes, we can still avoid yet
4233          * another note here, by storing (jmp - top) in the SRC_WHILE note's
4234          * offset, and fetching that delta in order to decompile recursively.
4235          */
4236         if (EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg)) < 0)
4237             return JS_FALSE;
4238         ok = js_PopStatementCG(cx, cg);
4239         break;
4240 
4241       case TOK_FOR:
4242         beq = 0;                /* suppress gcc warnings */
4243         pn2 = pn->pn_left;
4244         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_FOR_LOOP, top);
4245 
4246         if (pn2->pn_type == TOK_IN) {
4247             JSBool emitIFEQ;
4248 
4249             /* Set stmtInfo type for later testing. */
4250             stmtInfo.type = STMT_FOR_IN_LOOP;
4251             noteIndex = -1;
4252 
4253             /*
4254              * If the left part is 'var x', emit code to define x if necessary
4255              * using a prolog opcode, but do not emit a pop.  If the left part
4256              * is 'var x = i', emit prolog code to define x if necessary; then
4257              * emit code to evaluate i, assign the result to x, and pop the
4258              * result off the stack.
4259              *
4260              * All the logic to do this is implemented in the outer switch's
4261              * TOK_VAR case, conditioned on pn_extra flags set by the parser.
4262              *
4263              * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4264              * called here will generate the proper note for the assignment
4265              * op that sets x = i, hoisting the initialized var declaration
4266              * out of the loop: 'var x = i; for (x in o) ...'.
4267              *
4268              * In the 'for (var x in o) ...' case, nothing but the prolog op
4269              * (if needed) should be generated here, we must emit the note
4270              * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4271              * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4272              *
4273              * A 'for (let x = i in o)' loop must not be hoisted, since in
4274              * this form the let variable is scoped by the loop body (but not
4275              * the head).  The initializer expression i must be evaluated for
4276              * any side effects.  So we hoist only i in the let case.
4277              */
4278             pn3 = pn2->pn_left;
4279             type = pn3->pn_type;
4280             cg->treeContext.flags |= TCF_IN_FOR_INIT;
4281             if (TOKEN_TYPE_IS_DECL(type) && !js_EmitTree(cx, cg, pn3))
4282                 return JS_FALSE;
4283             cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
4284 
4285             /* Emit a push to allocate the iterator. */
4286             if (js_Emit1(cx, cg, JSOP_STARTITER) < 0)
4287                 return JS_FALSE;
4288 
4289             /* Compile the object expression to the right of 'in'. */
4290             if (!js_EmitTree(cx, cg, pn2->pn_right))
4291                 return JS_FALSE;
4292 
4293             /*
4294              * Emit a bytecode to convert top of stack value to the iterator
4295              * object depending on the loop variant (for-in, for-each-in, or
4296              * destructuring for-in).
4297              */
4298 #if JS_HAS_DESTRUCTURING
4299             JS_ASSERT(pn->pn_op == JSOP_FORIN ||
4300                       pn->pn_op == JSOP_FOREACHKEYVAL ||
4301                       pn->pn_op == JSOP_FOREACH);
4302 #else
4303             JS_ASSERT(pn->pn_op == JSOP_FORIN || pn->pn_op == JSOP_FOREACH);
4304 #endif
4305             if (js_Emit1(cx, cg, pn->pn_op) < 0)
4306                 return JS_FALSE;
4307 
4308             top = CG_OFFSET(cg);
4309             SET_STATEMENT_TOP(&stmtInfo, top);
4310 
4311             /*
4312              * Compile a JSOP_FOR* bytecode based on the left hand side.
4313              *
4314              * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4315              * or similar, to signify assignment, rather than declaration, to
4316              * the decompiler.  EmitDestructuringOps takes a prolog bytecode
4317              * parameter and emits the appropriate source note, defaulting to
4318              * assignment, so JSOP_SETNAME is not critical here; many similar
4319              * ops could be used -- just not JSOP_NOP (which means 'let').
4320              */
4321             emitIFEQ = JS_TRUE;
4322             op = JSOP_SETNAME;
4323             switch (type) {
4324 #if JS_HAS_BLOCK_SCOPE
4325               case TOK_LET:
4326 #endif
4327               case TOK_VAR:
4328                 JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4329                 pn3 = pn3->pn_head;
4330 #if JS_HAS_DESTRUCTURING
4331                 if (pn3->pn_type == TOK_ASSIGN) {
4332                     pn3 = pn3->pn_left;
4333                     JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4334                 }
4335                 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4336                     op = pn2->pn_left->pn_op;
4337                     goto destructuring_for;
4338                 }
4339 #else
4340                 JS_ASSERT(pn3->pn_type == TOK_NAME);
4341 #endif
4342                 /*
4343                  * Always annotate JSOP_FORLOCAL if given input of the form
4344                  * 'for (let x in * o)' -- the decompiler must not hoist the
4345                  * 'let x' out of the loop head, or x will be bound in the
4346                  * wrong scope.  Likewise, but in this case only for the sake
4347                  * of higher decompilation fidelity only, do not hoist 'var x'
4348                  * when given 'for (var x in o)'.  But 'for (var x = i in o)'
4349                  * requires hoisting in order to preserve the initializer i.
4350                  * The decompiler can only handle so much!
4351                  */
4352                 if ((
4353 #if JS_HAS_BLOCK_SCOPE
4354                      type == TOK_LET ||
4355 #endif
4356                      !pn3->pn_expr) &&
4357                     js_NewSrcNote2(cx, cg, SRC_DECL,
4358                                    type == TOK_VAR
4359                                    ? SRC_DECL_VAR
4360                                    : SRC_DECL_LET) < 0) {
4361                     return JS_FALSE;
4362                 }
4363                 /* FALL THROUGH */
4364               case TOK_NAME:
4365                 if (pn3->pn_slot >= 0) {
4366                     op = pn3->pn_op;
4367                     switch (op) {
4368                       case JSOP_GETARG:   /* FALL THROUGH */
4369                       case JSOP_SETARG:   op = JSOP_FORARG; break;
4370                       case JSOP_GETVAR:   /* FALL THROUGH */
4371                       case JSOP_SETVAR:   op = JSOP_FORVAR; break;
4372                       case JSOP_GETGVAR:  /* FALL THROUGH */
4373                       case JSOP_SETGVAR:  op = JSOP_FORNAME; break;
4374                       case JSOP_GETLOCAL: /* FALL THROUGH */
4375                       case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4376                       default:            JS_ASSERT(0);
4377                     }
4378                 } else {
4379                     pn3->pn_op = JSOP_FORNAME;
4380                     if (!BindNameToSlot(cx, &cg->treeContext, pn3, JS_FALSE))
4381                         return JS_FALSE;
4382                     op = pn3->pn_op;
4383                 }
4384                 if (pn3->pn_slot >= 0) {
4385                     if (pn3->pn_attrs & JSPROP_READONLY) {
4386                         JS_ASSERT(op == JSOP_FORVAR);
4387                         op = JSOP_GETVAR;
4388                     }
4389                     atomIndex = (jsatomid) pn3->pn_slot;
4390                     EMIT_UINT16_IMM_OP(op, atomIndex);
4391                 } else {
4392                     if (!EmitAtomOp(cx, pn3, op, cg))
4393                         return JS_FALSE;
4394                 }
4395                 break;
4396 
4397               case TOK_DOT:
4398                 useful = JS_FALSE;
4399                 if (!CheckSideEffects(cx, &cg->treeContext, pn3->pn_expr,
4400                                       &useful)) {
4401                     return JS_FALSE;
4402                 }
4403                 if (!useful) {
4404                     if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg))
4405                         return JS_FALSE;
4406                     break;
4407                 }
4408                 /* FALL THROUGH */
4409 
4410 #if JS_HAS_DESTRUCTURING
4411               case TOK_RB:
4412               case TOK_RC:
4413               destructuring_for:
4414 #endif
4415 #if JS_HAS_XML_SUPPORT
4416               case TOK_UNARYOP:
4417 #endif
4418 #if JS_HAS_LVALUE_RETURN
4419               case TOK_LP:
4420 #endif
4421               case TOK_LB:
4422                 /*
4423                  * We separate the first/next bytecode from the enumerator
4424                  * variable binding to avoid any side-effects in the index
4425                  * expression (e.g., for (x[i++] in {}) should not bind x[i]
4426                  * or increment i at all).
4427                  */
4428                 emitIFEQ = JS_FALSE;
4429                 if (!js_Emit1(cx, cg, JSOP_FORELEM))
4430                     return JS_FALSE;
4431 
4432                 /*
4433                  * Emit a SRC_WHILE note with offset telling the distance to
4434                  * the loop-closing jump (we can't reckon from the branch at
4435                  * the top of the loop, because the loop-closing jump might
4436                  * need to be an extended jump, independent of whether the
4437                  * branch is short or long).
4438                  */
4439                 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4440                 if (noteIndex < 0)
4441                     return JS_FALSE;
4442                 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4443                 if (beq < 0)
4444                     return JS_FALSE;
4445 
4446 #if JS_HAS_DESTRUCTURING
4447                 if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4448                     if (!EmitDestructuringOps(cx, cg, op, pn3))
4449                         return JS_FALSE;
4450                     if (js_Emit1(cx, cg, JSOP_POP) < 0)
4451                         return JS_FALSE;
4452                     break;
4453                 }
4454 #endif
4455 #if JS_HAS_LVALUE_RETURN
4456                 if (pn3->pn_type == TOK_LP) {
4457                     JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4458                     if (!js_EmitTree(cx, cg, pn3))
4459                         return JS_FALSE;
4460                     if (!js_Emit1(cx, cg, JSOP_ENUMELEM))
4461                         return JS_FALSE;
4462                     break;
4463                 }
4464 #endif
4465 #if JS_HAS_XML_SUPPORT
4466                 if (pn3->pn_type == TOK_UNARYOP) {
4467                     JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4468                     if (!js_EmitTree(cx, cg, pn3))
4469                         return JS_FALSE;
4470                     if (!js_Emit1(cx, cg, JSOP_ENUMELEM))
4471                         return JS_FALSE;
4472                     break;
4473                 }
4474 #endif
4475 
4476                 /* Now that we're safely past the IFEQ, commit side effects. */
4477                 if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4478                     return JS_FALSE;
4479                 break;
4480 
4481               default:
4482                 JS_ASSERT(0);
4483             }
4484 
4485             if (emitIFEQ) {
4486                 /* Annotate so the decompiler can find the loop-closing jump. */
4487                 noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4488                 if (noteIndex < 0)
4489                     return JS_FALSE;
4490 
4491                 /* Pop and test the loop condition generated by JSOP_FOR*. */
4492                 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4493                 if (beq < 0)
4494                     return JS_FALSE;
4495             }
4496         } else {
4497             op = JSOP_POP;
4498             if (!pn2->pn_kid1) {
4499                 /* No initializer: emit an annotated nop for the decompiler. */
4500                 op = JSOP_NOP;
4501             } else {
4502                 cg->treeContext.flags |= TCF_IN_FOR_INIT;
4503 #if JS_HAS_DESTRUCTURING
4504                 pn3 = pn2->pn_kid1;
4505                 if (pn3->pn_type == TOK_ASSIGN &&
4506                     !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4507                     return JS_FALSE;
4508                 }
4509 #endif
4510                 if (op == JSOP_POP) {
4511                     if (!js_EmitTree(cx, cg, pn3))
4512                         return JS_FALSE;
4513                     if (TOKEN_TYPE_IS_DECL(pn3->pn_type)) {
4514                         /*
4515                          * Check whether a destructuring-initialized var decl
4516                          * was optimized to a group assignment.  If so, we do
4517                          * not need to emit a pop below, so switch to a nop,
4518                          * just for the decompiler.
4519                          */
4520                         JS_ASSERT(pn3->pn_arity == PN_LIST);
4521                         if (pn3->pn_extra & PNX_GROUPINIT)
4522                             op = JSOP_NOP;
4523                     }
4524                 }
4525                 cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
4526             }
4527             noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4528             if (noteIndex < 0 ||
4529                 js_Emit1(cx, cg, op) < 0) {
4530                 return JS_FALSE;
4531             }
4532 
4533             top = CG_OFFSET(cg);
4534             SET_STATEMENT_TOP(&stmtInfo, top);
4535             if (!pn2->pn_kid2) {
4536                 /* No loop condition: flag this fact in the source notes. */
4537                 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, 0))
4538                     return JS_FALSE;
4539             } else {
4540                 if (!js_EmitTree(cx, cg, pn2->pn_kid2))
4541                     return JS_FALSE;
4542                 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
4543                                          CG_OFFSET(cg) - top)) {
4544                     return JS_FALSE;
4545                 }
4546                 beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4547                 if (beq < 0)
4548                     return JS_FALSE;
4549             }
4550 
4551             /* Set pn3 (used below) here to avoid spurious gcc warnings. */
4552             pn3 = pn2->pn_kid3;
4553         }
4554 
4555         /* Emit code for the loop body. */
4556         if (!js_EmitTree(cx, cg, pn->pn_right))
4557             return JS_FALSE;
4558 
4559         if (pn2->pn_type != TOK_IN) {
4560             /* Set the second note offset so we can find the update part. */
4561             JS_ASSERT(noteIndex != -1);
4562             if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4563                                      CG_OFFSET(cg) - top)) {
4564                 return JS_FALSE;
4565             }
4566 
4567             if (pn3) {
4568                 /* Set loop and enclosing "update" offsets, for continue. */
4569                 stmt = &stmtInfo;
4570                 do {
4571                     stmt->update = CG_OFFSET(cg);
4572                 } while ((stmt = stmt->down) != NULL &&
4573                          stmt->type == STMT_LABEL);
4574 
4575                 op = JSOP_POP;
4576 #if JS_HAS_DESTRUCTURING
4577                 if (pn3->pn_type == TOK_ASSIGN &&
4578                     !MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4579                     return JS_FALSE;
4580                 }
4581 #endif
4582                 if (op == JSOP_POP) {
4583                     if (!js_EmitTree(cx, cg, pn3))
4584                         return JS_FALSE;
4585                     if (js_Emit1(cx, cg, op) < 0)
4586                         return JS_FALSE;
4587                 }
4588 
4589                 /* Restore the absolute line number for source note readers. */
4590                 off = (ptrdiff_t) pn->pn_pos.end.lineno;
4591                 if (CG_CURRENT_LINE(cg) != (uintN) off) {
4592                     if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4593                         return JS_FALSE;
4594                     CG_CURRENT_LINE(cg) = (uintN) off;
4595                 }
4596             }
4597 
4598             /* The third note offset helps us find the loop-closing jump. */
4599             if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
4600                                      CG_OFFSET(cg) - top)) {
4601                 return JS_FALSE;
4602             }
4603         }
4604 
4605         /* Emit the loop-closing jump and fixup all jump offsets. */
4606         jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4607         if (jmp < 0)
4608             return JS_FALSE;
4609         if (beq > 0)
4610             CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4611         if (pn2->pn_type == TOK_IN) {
4612             /* Set the SRC_WHILE note offset so we can find the closing jump. */
4613             JS_ASSERT(noteIndex != -1);
4614             if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, jmp - beq))
4615                 return JS_FALSE;
4616         }
4617 
4618         /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
4619         if (!js_PopStatementCG(cx, cg))
4620             return JS_FALSE;
4621 
4622         if (pn2->pn_type == TOK_IN) {
4623             if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
4624                 return JS_FALSE;
4625         }
4626         break;
4627 
4628       case TOK_BREAK:
4629         stmt = cg->treeContext.topStmt;
4630         atom = pn->pn_atom;
4631         if (atom) {
4632             ale = js_IndexAtom(cx, atom, &cg->atomList);
4633             if (!ale)
4634                 return JS_FALSE;
4635             while (stmt->type != STMT_LABEL || stmt->atom != atom)
4636                 stmt = stmt->down;
4637             noteType = SRC_BREAK2LABEL;
4638         } else {
4639             ale = NULL;
4640             while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
4641                 stmt = stmt->down;
4642             noteType = SRC_NULL;
4643         }
4644 
4645         if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
4646             return JS_FALSE;
4647         break;
4648 
4649       case TOK_CONTINUE:
4650         stmt = cg->treeContext.topStmt;
4651         atom = pn->pn_atom;
4652         if (atom) {
4653             /* Find the loop statement enclosed by the matching label. */
4654             JSStmtInfo *loop = NULL;
4655             ale = js_IndexAtom(cx, atom, &cg->atomList);
4656             if (!ale)
4657                 return JS_FALSE;
4658             while (stmt->type != STMT_LABEL || stmt->atom != atom) {
4659                 if (STMT_IS_LOOP(stmt))
4660                     loop = stmt;
4661                 stmt = stmt->down;
4662             }
4663             stmt = loop;
4664             noteType = SRC_CONT2LABEL;
4665         } else {
4666             ale = NULL;
4667             while (!STMT_IS_LOOP(stmt))
4668                 stmt = stmt->down;
4669             noteType = SRC_CONTINUE;
4670         }
4671 
4672         if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
4673             return JS_FALSE;
4674         break;
4675 
4676       case TOK_WITH:
4677         if (!js_EmitTree(cx, cg, pn->pn_left))
4678             return JS_FALSE;
4679         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
4680         if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
4681             return JS_FALSE;
4682         if (!js_EmitTree(cx, cg, pn->pn_right))
4683             return JS_FALSE;
4684         if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
4685             return JS_FALSE;
4686         ok = js_PopStatementCG(cx, cg);
4687         break;
4688 
4689       case TOK_TRY:
4690       {
4691         ptrdiff_t start, end, catchJump, catchStart, finallyCatch;
4692         intN depth;
4693         JSParseNode *lastCatch;
4694 
4695         catchJump = catchStart = finallyCatch = -1;
4696 
4697         /*
4698          * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4699          * for later fixup.
4700          *
4701          * When a finally block is 'active' (STMT_FINALLY on the treeContext),
4702          * non-local jumps (including jumps-over-catches) result in a GOSUB
4703          * being written into the bytecode stream and fixed-up later (c.f.
4704          * EmitBackPatchOp and BackPatch).
4705          */
4706         js_PushStatement(&cg->treeContext, &stmtInfo,
4707                          pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
4708                          CG_OFFSET(cg));
4709 
4710         /*
4711          * About JSOP_SETSP: an exception can be thrown while the stack is in
4712          * an unbalanced state, and this imbalance causes problems with things
4713          * like function invocation later on.
4714          *
4715          * To fix this, we compute the 'balanced' stack depth upon try entry,
4716          * and then restore the stack to this depth when we hit the first catch
4717          * or finally block.  We can't just zero the stack, because things like
4718          * for/in and with that are active upon entry to the block keep state
4719          * variables on the stack.
4720          */
4721         depth = cg->stackDepth;
4722 
4723         /* Mark try location for decompilation, then emit try block. */
4724         if (js_Emit1(cx, cg, JSOP_TRY) < 0)
4725             return JS_FALSE;
4726         start = CG_OFFSET(cg);
4727         if (!js_EmitTree(cx, cg, pn->pn_kid1))
4728             return JS_FALSE;
4729 
4730         /* GOSUB to finally, if present. */
4731         if (pn->pn_kid3) {
4732             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4733                 return JS_FALSE;
4734             jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
4735             if (jmp < 0)
4736                 return JS_FALSE;
4737 
4738             /* JSOP_RETSUB pops the return pc-index, balancing the stack. */
4739             cg->stackDepth = depth;
4740         }
4741 
4742         /* Emit (hidden) jump over catch and/or finally. */
4743         if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4744             return JS_FALSE;
4745         jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
4746         if (jmp < 0)
4747             return JS_FALSE;
4748 
4749         end = CG_OFFSET(cg);
4750 
4751         /* If this try has a catch block, emit it. */
4752         pn2 = pn->pn_kid2;
4753         lastCatch = NULL;
4754         if (pn2) {
4755             jsint count = 0;    /* previous catch block's population */
4756 
4757             catchStart = end;
4758 
4759             /*
4760              * The emitted code for a catch block looks like:
4761              *
4762              * [throwing]                          only if 2nd+ catch block
4763              * [leaveblock]                        only if 2nd+ catch block
4764              * enterblock                          with SRC_CATCH
4765              * exception
4766              * [dup]                               only if catchguard
4767              * setlocalpop <slot>                  or destructuring code
4768              * [< catchguard code >]               if there's a catchguard
4769              * [ifeq <offset to next catch block>]         " "
4770              * [pop]                               only if catchguard
4771              * < catch block contents >
4772              * leaveblock
4773              * goto <end of catch blocks>          non-local; finally applies
4774              *
4775              * If there's no catch block without a catchguard, the last
4776              * <offset to next catch block> points to rethrow code.  This
4777              * code will [gosub] to the finally code if appropriate, and is
4778              * also used for the catch-all trynote for capturing exceptions
4779              * thrown from catch{} blocks.
4780              */
4781             for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
4782                 ptrdiff_t guardJump, catchNote;
4783 
4784                 guardJump = GUARDJUMP(stmtInfo);
4785                 if (guardJump == -1) {
4786                     /* Set stack to original depth (see SETSP comment above). */
4787                     EMIT_UINT16_IMM_OP(JSOP_SETSP, (jsatomid)depth);
4788                     cg->stackDepth = depth;
4789                 } else {
4790                     /* Fix up and clean up previous catch block. */
4791                     CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
4792 
4793                     /*
4794                      * Account for the pushed exception object that we still
4795                      * have after the jumping from the previous guard.
4796                      */
4797                     JS_ASSERT(cg->stackDepth == depth);
4798                     cg->stackDepth = depth + 1;
4799 
4800                     /*
4801                      * Move exception back to cx->exception to prepare for
4802                      * the next catch. We hide [throwing] from the decompiler
4803                      * since it compensates for the hidden JSOP_DUP at the
4804                      * start of the previous guarded catch.
4805                      */
4806                     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
4807                         js_Emit1(cx, cg, JSOP_THROWING) < 0) {
4808                         return JS_FALSE;
4809                     }
4810 
4811                     /*
4812                      * Emit an unbalanced [leaveblock] for the previous catch,
4813                      * whose block object count is saved below.
4814                      */
4815                     if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4816                         return JS_FALSE;
4817                     JS_ASSERT(count >= 0);
4818                     EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
4819                 }
4820 
4821                 /*
4822                  * Annotate the JSOP_ENTERBLOCK that's about to be generated
4823                  * by the call to js_EmitTree immediately below.  Save this
4824                  * source note's index in stmtInfo for use by the TOK_CATCH:
4825                  * case, where the length of the catch guard is set as the
4826                  * note's offset.
4827                  */
4828                 catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
4829                 if (catchNote < 0)
4830                     return JS_FALSE;
4831                 CATCHNOTE(stmtInfo) = catchNote;
4832 
4833                 /*
4834                  * Emit the lexical scope and catch body.  Save the catch's
4835                  * block object population via count, for use when targeting
4836                  * guardJump at the next catch (the guard mismatch case).
4837                  */
4838                 JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
4839                 count = OBJ_BLOCK_COUNT(cx, ATOM_TO_OBJECT(pn3->pn_atom));
4840                 if (!js_EmitTree(cx, cg, pn3))
4841                     return JS_FALSE;
4842 
4843                 /* gosub <finally>, if required */
4844                 if (pn->pn_kid3) {
4845                     jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
4846                                           &GOSUBS(stmtInfo));
4847                     if (jmp < 0)
4848                         return JS_FALSE;
4849                     JS_ASSERT(cg->stackDepth == depth);
4850                 }
4851 
4852                 /*
4853                  * Jump over the remaining catch blocks.  This will get fixed
4854                  * up to jump to after catch/finally.
4855                  */
4856                 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4857                     return JS_FALSE;
4858                 jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
4859                 if (jmp < 0)
4860                     return JS_FALSE;
4861 
4862                 /*
4863                  * Save a pointer to the last catch node to handle try-finally
4864                  * and try-catch(guard)-finally special cases.
4865                  */
4866                 lastCatch = pn3->pn_expr;
4867             }
4868         }
4869 
4870         /*
4871          * Last catch guard jumps to the rethrow code sequence if none of the
4872          * guards match. Target guardJump at the beginning of the rethrow
4873          * sequence, just in case a guard expression throws and leaves the
4874          * stack unbalanced.
4875          */
4876         if (lastCatch && lastCatch->pn_kid2) {
4877             CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
4878 
4879             /* Sync the stack to take into account pushed exception. */
4880             JS_ASSERT(cg->stackDepth == depth);
4881             cg->stackDepth = depth + 1;
4882 
4883             /*
4884              * Rethrow the exception, delegating executing of finally if any
4885              * to the exception handler.
4886              */
4887             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
4888                 js_Emit1(cx, cg, JSOP_THROW) < 0) {
4889                 return JS_FALSE;
4890             }
4891         }
4892 
4893         JS_ASSERT(cg->stackDepth == depth);
4894 
4895         /* Emit finally handler if any. */
4896         if (pn->pn_kid3) {
4897             /*
4898              * We emit [setsp][gosub] to call try-finally when an exception is
4899              * thrown from try or try-catch blocks. The [gosub] and [retsub]
4900              * opcodes will take care of stacking and rethrowing any exception
4901              * pending across the finally.
4902              */
4903             finallyCatch = CG_OFFSET(cg);
4904             EMIT_UINT16_IMM_OP(JSOP_SETSP, (jsatomid)depth);
4905 
4906             jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
4907                                   &GOSUBS(stmtInfo));
4908             if (jmp < 0)
4909                 return JS_FALSE;
4910 
4911             JS_ASSERT(cg->stackDepth == depth);
4912             JS_ASSERT((uintN)depth <= cg->maxStackDepth);
4913 
4914             /*
4915              * Fix up the gosubs that might have been emitted before non-local
4916              * jumps to the finally code.
4917              */
4918             if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
4919                 return JS_FALSE;
4920 
4921             /*
4922              * The stack budget must be balanced at this point.  All [gosub]
4923              * calls emitted before this point will push two stack slots, one
4924              * for the pending exception (or JSVAL_HOLE if there is no pending
4925              * exception) and one for the [retsub] pc-index.
4926              */
4927             JS_ASSERT(cg->stackDepth == depth);
4928             cg->stackDepth += 2;
4929             if ((uintN)cg->stackDepth > cg->maxStackDepth)
4930                 cg->maxStackDepth = cg->stackDepth;
4931 
4932             /* Now indicate that we're emitting a subroutine body. */
4933             stmtInfo.type = STMT_SUBROUTINE;
4934             if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3))
4935                 return JS_FALSE;
4936             if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
4937                 !js_EmitTree(cx, cg, pn->pn_kid3) ||
4938                 js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
4939                 return JS_FALSE;
4940             }
4941 
4942             /* Restore stack depth budget to its balanced state. */
4943             JS_ASSERT(cg->stackDepth == depth + 2);
4944             cg->stackDepth = depth;
4945         }
4946         if (!js_PopStatementCG(cx, cg))
4947             return JS_FALSE;
4948 
4949         if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
4950             js_Emit1(cx, cg, JSOP_NOP) < 0) {
4951             return JS_FALSE;
4952         }
4953 
4954         /* Fix up the end-of-try/catch jumps to come here. */
4955         if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
4956             return JS_FALSE;
4957 
4958         /*
4959          * Add the try note last, to let post-order give us the right ordering
4960          * (first to last for a given nesting level, inner to outer by level).
4961          */
4962         if (pn->pn_kid2) {
4963             JS_ASSERT(end != -1 && catchStart != -1);
4964             if (!js_NewTryNote(cx, cg, start, end, catchStart))
4965                 return JS_FALSE;
4966         }
4967 
4968         /*
4969          * If we've got a finally, mark try+catch region with additional
4970          * trynote to catch exceptions (re)thrown from a catch block or
4971          * for the try{}finally{} case.
4972          */
4973         if (pn->pn_kid3) {
4974             JS_ASSERT(finallyCatch != -1);
4975             if (!js_NewTryNote(cx, cg, start, finallyCatch, finallyCatch))
4976                 return JS_FALSE;
4977         }
4978         break;
4979       }
4980 
4981       case TOK_CATCH:
4982       {
4983         ptrdiff_t catchStart, guardJump;
4984 
4985         /*
4986          * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4987          * and save the block object atom.
4988          */
4989         stmt = cg->treeContext.topStmt;
4990         JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
4991         stmt->type = STMT_CATCH;
4992         catchStart = stmt->update;
4993         atom = stmt->atom;
4994 
4995         /* Go up one statement info record to the TRY or FINALLY record. */
4996         stmt = stmt->down;
4997         JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
4998 
4999         /* Pick up the pending exception and bind it to the catch variable. */
5000         if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5001             return JS_FALSE;
5002 
5003         /*
5004          * Dup the exception object if there is a guard for rethrowing to use
5005          * it later when rethrowing or in other catches.
5006          */
5007         if (pn->pn_kid2) {
5008             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5009                 js_Emit1(cx, cg, JSOP_DUP) < 0) {
5010                 return JS_FALSE;
5011             }
5012         }
5013 
5014         pn2 = pn->pn_kid1;
5015         switch (pn2->pn_type) {
5016 #if JS_HAS_DESTRUCTURING
5017           case TOK_RB:
5018           case TOK_RC:
5019             if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5020                 return JS_FALSE;
5021             if (js_Emit1(cx, cg, JSOP_POP) < 0)
5022                 return JS_FALSE;
5023             break;
5024 #endif
5025 
5026           case TOK_NAME:
5027             /* Inline BindNameToSlot, adding block depth to pn2->pn_slot. */
5028             pn2->pn_slot += OBJ_BLOCK_DEPTH(cx, ATOM_TO_OBJECT(atom));
5029             EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_slot);
5030             break;
5031 
5032           default:
5033             JS_ASSERT(0);
5034         }
5035 
5036         /* Emit the guard expression, if there is one. */
5037         if (pn->pn_kid2) {
5038             if (!js_EmitTree(cx, cg, pn->pn_kid2))
5039                 return JS_FALSE;
5040             if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5041                                      CG_OFFSET(cg) - catchStart)) {
5042                 return JS_FALSE;
5043             }
5044             /* ifeq <next block> */
5045             guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5046             if (guardJump < 0)
5047                 return JS_FALSE;
5048             GUARDJUMP(*stmt) = guardJump;
5049 
5050             /* Pop duplicated exception object as we no longer need it. */
5051             if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5052                 js_Emit1(cx, cg, JSOP_POP) < 0) {
5053                 return JS_FALSE;
5054             }
5055         }
5056 
5057         /* Emit the catch body. */
5058         if (!js_EmitTree(cx, cg, pn->pn_kid3))
5059             return JS_FALSE;
5060 
5061         /*
5062          * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5063          * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5064          */
5065         off = cg->stackDepth;
5066         if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5067             return JS_FALSE;
5068         break;
5069       }
5070 
5071       case TOK_VAR:
5072         if (!EmitVariables(cx, cg, pn, JS_FALSE, &noteIndex))
5073             return JS_FALSE;
5074         break;
5075 
5076       case TOK_RETURN:
5077         /* Push a return value */
5078         pn2 = pn->pn_kid;
5079         if (pn2) {
5080             if (!js_EmitTree(cx, cg, pn2))
5081                 return JS_FALSE;
5082         } else {
5083             if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5084                 return JS_FALSE;
5085         }
5086 
5087         /*
5088          * EmitNonLocalJumpFixup mutates op to JSOP_RETRVAL after emitting a
5089          * JSOP_SETRVAL if there are open try blocks having finally clauses.
5090          * We can't simply transfer control flow to our caller in that case,
5091          * because we must gosub to those clauses from inner to outer, with
5092          * the correct stack pointer (i.e., after popping any with, for/in,
5093          * etc., slots nested inside the finally's try).
5094          */
5095         op = JSOP_RETURN;
5096         if (!EmitNonLocalJumpFixup(cx, cg, NULL, &op))
5097             return JS_FALSE;
5098         if (js_Emit1(cx, cg, op) < 0)
5099             return JS_FALSE;
5100         break;
5101 
5102 #if JS_HAS_GENERATORS
5103       case TOK_YIELD:
5104         if (pn->pn_kid) {
5105             if (!js_EmitTree(cx, cg, pn->pn_kid))
5106                 return JS_FALSE;
5107         } else {
5108             if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5109                 return JS_FALSE;
5110         }
5111         if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5112             return JS_FALSE;
5113         break;
5114 #endif
5115 
5116       case TOK_LC:
5117 #if JS_HAS_XML_SUPPORT
5118         if (pn->pn_arity == PN_UNARY) {
5119             if (!js_EmitTree(cx, cg, pn->pn_kid))
5120                 return JS_FALSE;
5121             if (js_Emit1(cx, cg, pn->pn_op) < 0)
5122                 return JS_FALSE;
5123             break;
5124         }
5125 #endif
5126 
5127         JS_ASSERT(pn->pn_arity == PN_LIST);
5128 
5129         noteIndex = -1;
5130         tmp = CG_OFFSET(cg);
5131         if (pn->pn_extra & PNX_NEEDBRACES) {
5132             noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5133             if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5134                 return JS_FALSE;
5135         }
5136 
5137         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_BLOCK, top);
5138         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5139             if (!js_EmitTree(cx, cg, pn2))
5140                 return JS_FALSE;
5141         }
5142 
5143         if (noteIndex >= 0 &&
5144             !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5145                                  CG_OFFSET(cg) - tmp)) {
5146             return JS_FALSE;
5147         }
5148 
5149         ok = js_PopStatementCG(cx, cg);
5150         break;
5151 
5152       case TOK_BODY:
5153         JS_ASSERT(pn->pn_arity == PN_LIST);
5154         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_BODY, top);
5155         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5156             if (!js_EmitTree(cx, cg, pn2))
5157                 return JS_FALSE;
5158         }
5159         ok = js_PopStatementCG(cx, cg);
5160         break;
5161 
5162       case TOK_SEMI:
5163         pn2 = pn->pn_kid;
5164         if (pn2) {
5165             /*
5166              * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5167              * debugger, and eval frames may need the value of the ultimate
5168              * expression statement as the script's result, despite the fact
5169              * that it appears useless to the compiler.
5170              */
5171             useful = wantval = !cx->fp->fun ||
5172                                !FUN_INTERPRETED(cx->fp->fun) ||
5173                                (cx->fp->flags & JSFRAME_SPECIAL);
5174             if (!useful) {
5175                 if (!CheckSideEffects(cx, &cg->treeContext, pn2, &useful))
5176                     return JS_FALSE;
5177             }
5178 
5179             /*
5180              * Don't eliminate apparently useless expressions if they are
5181              * labeled expression statements.  The tc->topStmt->update test
5182              * catches the case where we are nesting in js_EmitTree for a
5183              * labeled compound statement.
5184              */
5185             if (!useful &&
5186                 (!cg->treeContext.topStmt ||
5187                  cg->treeContext.topStmt->type != STMT_LABEL ||
5188                  cg->treeContext.topStmt->update < CG_OFFSET(cg))) {
5189                 CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5190                 if (!js_ReportCompileErrorNumber(cx, cg,
5191                                                  JSREPORT_CG |
5192                                                  JSREPORT_WARNING |
5193                                                  JSREPORT_STRICT,
5194                                                  JSMSG_USELESS_EXPR)) {
5195                     return JS_FALSE;
5196                 }
5197             } else {
5198                 op = wantval ? JSOP_POPV : JSOP_POP;
5199 #if JS_HAS_DESTRUCTURING
5200                 if (!wantval &&
5201                     pn2->pn_type == TOK_ASSIGN &&
5202                     !MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5203                     return JS_FALSE;
5204                 }
5205 #endif
5206                 if (op != JSOP_NOP) {
5207                     if (!js_EmitTree(cx, cg, pn2))
5208                         return JS_FALSE;
5209                     if (js_Emit1(cx, cg, op) < 0)
5210                         return JS_FALSE;
5211                 }
5212             }
5213         }
5214         break;
5215 
5216       case TOK_COLON:
5217         /* Emit an annotated nop so we know to decompile a label. */
5218         atom = pn->pn_atom;
5219         ale = js_IndexAtom(cx, atom, &cg->atomList);
5220         if (!ale)
5221             return JS_FALSE;
5222         pn2 = pn->pn_expr;
5223         noteType = (pn2->pn_type == TOK_LC ||
5224                     (pn2->pn_type == TOK_LEXICALSCOPE &&
5225                      pn2->pn_expr->pn_type == TOK_LC))
5226                    ? SRC_LABELBRACE
5227                    : SRC_LABEL;
5228         noteIndex = js_NewSrcNote2(cx, cg, noteType,
5229                                    (ptrdiff_t) ALE_INDEX(ale));
5230         if (noteIndex < 0 ||
5231             js_Emit1(cx, cg, JSOP_NOP) < 0) {
5232             return JS_FALSE;
5233         }
5234 
5235         /* Emit code for the labeled statement. */
5236         js_PushStatement(&cg->treeContext, &stmtInfo, STMT_LABEL,
5237                          CG_OFFSET(cg));
5238         stmtInfo.atom = atom;
5239         if (!js_EmitTree(cx, cg, pn2))
5240             return JS_FALSE;
5241         if (!js_PopStatementCG(cx, cg))
5242             return JS_FALSE;
5243 
5244         /* If the statement was compound, emit a note for the end brace. */
5245         if (noteType == SRC_LABELBRACE) {
5246             if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5247                 js_Emit1(cx, cg, JSOP_NOP) < 0) {
5248                 return JS_FALSE;
5249             }
5250         }
5251         break;
5252 
5253       case TOK_COMMA:
5254         /*
5255          * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5256          * These notes help the decompiler bracket the bytecodes generated
5257          * from each sub-expression that follows a comma.
5258          */
5259         off = noteIndex = -1;
5260         for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5261             if (!js_EmitTree(cx, cg, pn2))
5262                 return JS_FALSE;
5263             tmp = CG_OFFSET(cg);
5264             if (noteIndex >= 0) {
5265                 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5266                     return JS_FALSE;
5267             }
5268             if (!pn2->pn_next)
5269                 break;
5270             off = tmp;
5271             noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5272             if (noteIndex < 0 ||
5273                 js_Emit1(cx, cg, JSOP_POP) < 0) {
5274                 return JS_FALSE;
5275             }
5276         }
5277         break;
5278 
5279       case TOK_ASSIGN:
5280         /*
5281          * Check left operand type and generate specialized code for it.
5282          * Specialize to avoid ECMA "reference type" values on the operand
5283          * stack, which impose pervasive runtime "GetValue" costs.
5284          */
5285         pn2 = pn->pn_left;
5286         JS_ASSERT(pn2->pn_type != TOK_RP);
5287         atomIndex = (jsatomid) -1;
5288         switch (pn2->pn_type) {
5289           case TOK_NAME:
5290             if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
5291                 return JS_FALSE;
5292             if (pn2->pn_slot >= 0) {
5293                 atomIndex = (jsatomid) pn2->pn_slot;
5294             } else {
5295                 ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
5296                 if (!ale)
5297                     return JS_FALSE;
5298                 atomIndex = ALE_INDEX(ale);
5299                 EMIT_ATOM_INDEX_OP(JSOP_BINDNAME, atomIndex);
5300             }
5301             break;
5302           case TOK_DOT:
5303             if (!js_EmitTree(cx, cg, pn2->pn_expr))
5304                 return JS_FALSE;
5305             ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
5306             if (!ale)
5307                 return JS_FALSE;
5308             atomIndex = ALE_INDEX(ale);
5309             break;
5310           case TOK_LB:
5311             JS_ASSERT(pn2->pn_arity == PN_BINARY);
5312             if (!js_EmitTree(cx, cg, pn2->pn_left))
5313                 return JS_FALSE;
5314             if (!js_EmitTree(cx, cg, pn2->pn_right))
5315                 return JS_FALSE;
5316             break;
5317 #if JS_HAS_DESTRUCTURING
5318           case TOK_RB:
5319           case TOK_RC:
5320             break;
5321 #endif
5322 #if JS_HAS_LVALUE_RETURN
5323           case TOK_LP:
5324             if (!js_EmitTree(cx, cg, pn2))
5325                 return JS_FALSE;
5326             break;
5327 #endif
5328 #if JS_HAS_XML_SUPPORT
5329           case TOK_UNARYOP:
5330             JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5331             if (!js_EmitTree(cx, cg, pn2->pn_kid))
5332                 return JS_FALSE;
5333             if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5334                 return JS_FALSE;
5335             break;
5336 #endif
5337           default:
5338             JS_ASSERT(0);
5339         }
5340 
5341         op = pn->pn_op;
5342 #if JS_HAS_GETTER_SETTER
5343         if (op == JSOP_GETTER || op == JSOP_SETTER) {
5344             /* We'll emit these prefix bytecodes after emitting the r.h.s. */
5345             if (atomIndex != (jsatomid) -1 && atomIndex >= JS_BIT(16)) {
5346                 ReportStatementTooLarge(cx, cg);
5347                 return JS_FALSE;
5348             }
5349         } else
5350 #endif
5351         /* If += or similar, dup the left operand and get its value. */
5352         if (op != JSOP_NOP) {
5353             switch (pn2->pn_type) {
5354               case TOK_NAME:
5355                 if (pn2->pn_op != JSOP_SETNAME) {
5356                     EMIT_UINT16_IMM_OP((pn2->pn_op == JSOP_SETGVAR)
5357                                        ? JSOP_GETGVAR
5358                                        : (pn2->pn_op == JSOP_SETARG)
5359                                        ? JSOP_GETARG
5360                                        : (pn2->pn_op == JSOP_SETLOCAL)
5361                                        ? JSOP_GETLOCAL
5362                                        : JSOP_GETVAR,
5363                                        atomIndex);
5364                     break;
5365                 }
5366                 /* FALL THROUGH */
5367               case TOK_DOT:
5368                 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5369                     return JS_FALSE;
5370                 EMIT_ATOM_INDEX_OP((pn2->pn_type == TOK_NAME)
5371                                    ? JSOP_GETXPROP
5372                                    : JSOP_GETPROP,
5373                                    atomIndex);
5374                 break;
5375               case TOK_LB:
5376 #if JS_HAS_LVALUE_RETURN
5377               case TOK_LP:
5378 #endif
5379 #if JS_HAS_XML_SUPPORT
5380               case TOK_UNARYOP:
5381 #endif
5382                 if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5383                     return JS_FALSE;
5384                 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5385                     return JS_FALSE;
5386                 break;
5387               default:;
5388             }
5389         }
5390 
5391         /* Now emit the right operand (it may affect the namespace). */
5392         if (!js_EmitTree(cx, cg, pn->pn_right))
5393             return JS_FALSE;
5394 
5395         /* If += etc., emit the binary operator with a decompiler note. */
5396         if (op != JSOP_NOP) {
5397             /*
5398              * Take care to avoid SRC_ASSIGNOP if the left-hand side is a
5399              * const declared in a function (i.e., with non-negative pn_slot
5400              * and JSPROP_READONLY in pn_attrs), as in this case (just a bit
5401              * further below) we will avoid emitting the assignment op.
5402              */
5403             if (pn2->pn_type != TOK_NAME ||
5404                 pn2->pn_slot < 0 ||
5405                 !(pn2->pn_attrs & JSPROP_READONLY)) {
5406                 if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5407                     return JS_FALSE;
5408             }
5409             if (js_Emit1(cx, cg, op) < 0)
5410                 return JS_FALSE;
5411         }
5412 
5413         /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5414         if (pn2->pn_type != TOK_NAME &&
5415 #if JS_HAS_DESTRUCTURING
5416             pn2->pn_type != TOK_RB &&
5417             pn2->pn_type != TOK_RC &&
5418 #endif
5419             js_NewSrcNote2(cx, cg, SrcNoteForPropOp(pn2, pn2->pn_op),
5420                            CG_OFFSET(cg) - top) < 0) {
5421             return JS_FALSE;
5422         }
5423 
5424         /* Finally, emit the specialized assignment bytecode. */
5425         switch (pn2->pn_type) {
5426           case TOK_NAME:
5427             if (pn2->pn_slot < 0 || !(pn2->pn_attrs & JSPROP_READONLY)) {
5428                 if (pn2->pn_slot >= 0) {
5429                     EMIT_UINT16_IMM_OP(pn2->pn_op, atomIndex);
5430                 } else {
5431           case TOK_DOT:
5432                     EMIT_ATOM_INDEX_OP(pn2->pn_op, atomIndex);
5433                 }
5434             }
5435             break;
5436           case TOK_LB:
5437 #if JS_HAS_LVALUE_RETURN
5438           case TOK_LP:
5439 #endif
5440             if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5441                 return JS_FALSE;
5442             break;
5443 #if JS_HAS_DESTRUCTURING
5444           case TOK_RB:
5445           case TOK_RC:
5446             if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5447                 return JS_FALSE;
5448             break;
5449 #endif
5450 #if JS_HAS_XML_SUPPORT
5451           case TOK_UNARYOP:
5452             if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5453                 return JS_FALSE;
5454             break;
5455 #endif
5456           default:
5457             JS_ASSERT(0);
5458         }
5459         break;
5460 
5461       case TOK_HOOK:
5462         /* Emit the condition, then branch if false to the else part. */
5463         if (!js_EmitTree(cx, cg, pn->pn_kid1))
5464             return JS_FALSE;
5465         noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5466         if (noteIndex < 0)
5467             return JS_FALSE;
5468         beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5469         if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5470             return JS_FALSE;
5471 
5472         /* Jump around else, fixup the branch, emit else, fixup jump. */
5473         jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5474         if (jmp < 0)
5475             return JS_FALSE;
5476         CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5477 
5478         /*
5479          * Because each branch pushes a single value, but our stack budgeting
5480          * analysis ignores branches, we now have to adjust cg->stackDepth to
5481          * ignore the value pushed by the first branch.  Execution will follow
5482          * only one path, so we must decrement cg->stackDepth.
5483          *
5484          * Failing to do this will foil code, such as the try/catch/finally
5485          * exception handling code generator, that samples cg->stackDepth for
5486          * use at runtime (JSOP_SETSP), or in let expression and block code
5487          * generation, which must use the stack depth to compute local stack
5488          * indexes correctly.
5489          */
5490         JS_ASSERT(cg->stackDepth > 0);
5491         cg->stackDepth--;
5492         if (!js_EmitTree(cx, cg, pn->pn_kid3))
5493             return JS_FALSE;
5494         CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5495         if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5496             return JS_FALSE;
5497         break;
5498 
5499       case TOK_OR:
5500       case TOK_AND:
5501         /*
5502          * JSOP_OR converts the operand on the stack to boolean, and if true,
5503          * leaves the original operand value on the stack and jumps; otherwise
5504          * it pops and falls into the next bytecode, which evaluates the right
5505          * operand.  The jump goes around the right operand evaluation.
5506          *
5507          * JSOP_AND converts the operand on the stack to boolean, and if false,
5508          * leaves the original operand value on the stack and jumps; otherwise
5509          * it pops and falls into the right operand's bytecode.
5510          *
5511          * Avoid tail recursion for long ||...|| expressions and long &&...&&
5512          * expressions or long mixtures of ||'s and &&'s that can easily blow
5513          * the stack, by forward-linking and then backpatching all the JSOP_OR
5514          * and JSOP_AND bytecodes' immediate jump-offset operands.
5515          */
5516         pn3 = pn;
5517         if (!js_EmitTree(cx, cg, pn->pn_left))
5518             return JS_FALSE;
5519         top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5520         if (top < 0)
5521             return JS_FALSE;
5522         jmp = top;
5523         pn2 = pn->pn_right;
5524         while (pn2->pn_type == TOK_OR || pn2->pn_type == TOK_AND) {
5525             pn = pn2;
5526             if (!js_EmitTree(cx, cg, pn->pn_left))
5527                 return JS_FALSE;
5528             off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5529             if (off < 0)
5530                 return JS_FALSE;
5531             if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5532                 return JS_FALSE;
5533             jmp = off;
5534             pn2 = pn->pn_right;
5535         }
5536         if (!js_EmitTree(cx, cg, pn2))
5537             return JS_FALSE;
5538         off = CG_OFFSET(cg);
5539         do {
5540             pc = CG_CODE(cg, top);
5541             tmp = GetJumpOffset(cg, pc);
5542             CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5543             *pc = pn3->pn_op;
5544             top += tmp;
5545         } while ((pn3 = pn3->pn_right) != pn2);
5546         break;
5547 
5548       case TOK_BITOR:
5549       case TOK_BITXOR:
5550       case TOK_BITAND:
5551       case TOK_EQOP:
5552       case TOK_RELOP:
5553       case TOK_IN:
5554       case TOK_INSTANCEOF:
5555       case TOK_SHOP:
5556       case TOK_PLUS:
5557       case TOK_MINUS:
5558       case TOK_STAR:
5559       case TOK_DIVOP:
5560         if (pn->pn_arity == PN_LIST) {
5561             /* Left-associative operator chain: avoid too much recursion. */
5562             pn2 = pn->pn_head;
5563             if (!js_EmitTree(cx, cg, pn2))
5564                 return JS_FALSE;
5565             op = pn->pn_op;
5566             while ((pn2 = pn2->pn_next) != NULL) {
5567                 if (!js_EmitTree(cx, cg, pn2))
5568                     return JS_FALSE;
5569                 if (js_Emit1(cx, cg, op) < 0)
5570                     return JS_FALSE;
5571             }
5572         } else {
5573 #if JS_HAS_XML_SUPPORT
5574             uintN oldflags;
5575 
5576       case TOK_DBLCOLON:
5577             if (pn->pn_arity == PN_NAME) {
5578                 if (!js_EmitTree(cx, cg, pn->pn_expr))
5579                     return JS_FALSE;
5580                 if (!EmitAtomOp(cx, pn, pn->pn_op, cg))
5581                     return JS_FALSE;
5582                 break;
5583             }
5584 
5585             /*
5586              * Binary :: has a right operand that brackets arbitrary code,
5587              * possibly including a let (a = b) ... expression.  We must clear
5588              * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
5589              */
5590             oldflags = cg->treeContext.flags;
5591             cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
5592 #endif
5593 
5594             /* Binary operators that evaluate both operands unconditionally. */
5595             if (!js_EmitTree(cx, cg, pn->pn_left))
5596                 return JS_FALSE;
5597             if (!js_EmitTree(cx, cg, pn->pn_right))
5598                 return JS_FALSE;
5599 #if JS_HAS_XML_SUPPORT
5600             cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
5601 #endif
5602             if (js_Emit1(cx, cg, pn->pn_op) < 0)
5603                 return JS_FALSE;
5604         }
5605         break;
5606 
5607       case TOK_THROW:
5608 #if JS_HAS_XML_SUPPORT
5609       case TOK_AT:
5610       case TOK_DEFAULT:
5611         JS_ASSERT(pn->pn_arity == PN_UNARY);
5612         /* FALL THROUGH */
5613 #endif
5614       case TOK_UNARYOP:
5615       {
5616         uintN oldflags;
5617 
5618         /* Unary op, including unary +/-. */
5619         pn2 = pn->pn_kid;
5620         op = pn->pn_op;
5621         if (op == JSOP_TYPEOF) {
5622             for (pn3 = pn2; pn3->pn_type == TOK_RP; pn3 = pn3->pn_kid)
5623                 continue;
5624             if (pn3->pn_type != TOK_NAME)
5625                 op = JSOP_TYPEOFEXPR;
5626         }
5627         oldflags = cg->treeContext.flags;
5628         cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
5629         if (!js_EmitTree(cx, cg, pn2))
5630             return JS_FALSE;
5631         cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
5632 #if JS_HAS_XML_SUPPORT
5633         if (op == JSOP_XMLNAME &&
5634             js_NewSrcNote2(cx, cg, SRC_PCBASE,
5635                            CG_OFFSET(cg) - pn2->pn_offset) < 0) {
5636             return JS_FALSE;
5637         }
5638 #endif
5639         if (js_Emit1(cx, cg, op) < 0)
5640             return JS_FALSE;
5641         break;
5642       }
5643 
5644       case TOK_INC:
5645       case TOK_DEC:
5646       {
5647         intN depth;
5648 
5649         /* Emit lvalue-specialized code for ++/-- operators. */
5650         pn2 = pn->pn_kid;
5651         JS_ASSERT(pn2->pn_type != TOK_RP);
5652         op = pn->pn_op;
5653         depth = cg->stackDepth;
5654         switch (pn2->pn_type) {
5655           case TOK_NAME:
5656             pn2->pn_op = op;
5657             if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
5658                 return JS_FALSE;
5659             op = pn2->pn_op;
5660             if (pn2->pn_slot >= 0) {
5661                 if (pn2->pn_attrs & JSPROP_READONLY) {
5662                     /* Incrementing a declared const: just get its value. */
5663                     op = ((js_CodeSpec[op].format & JOF_TYPEMASK) == JOF_CONST)
5664                          ? JSOP_GETGVAR
5665                          : JSOP_GETVAR;
5666                 }
5667                 atomIndex = (jsatomid) pn2->pn_slot;
5668                 EMIT_UINT16_IMM_OP(op, atomIndex);
5669             } else {
5670                 if (!EmitAtomOp(cx, pn2, op, cg))
5671                     return JS_FALSE;
5672             }
5673             break;
5674           case TOK_DOT:
5675             if (!EmitPropOp(cx, pn2, op, cg))
5676                 return JS_FALSE;
5677             ++depth;
5678             break;
5679           case TOK_LB:
5680             if (!EmitElemOp(cx, pn2, op, cg))
5681                 return JS_FALSE;
5682             depth += 2;
5683             break;
5684 #if JS_HAS_LVALUE_RETURN
5685           case TOK_LP:
5686             if (!js_EmitTree(cx, cg, pn2))
5687                 return JS_FALSE;
5688             depth = cg->stackDepth;
5689             if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
5690                                CG_OFFSET(cg) - pn2->pn_offset) < 0) {
5691                 return JS_FALSE;
5692             }
5693             if (js_Emit1(cx, cg, op) < 0)
5694                 return JS_FALSE;
5695             break;
5696 #endif
5697 #if JS_HAS_XML_SUPPORT
5698           case TOK_UNARYOP:
5699             JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5700             if (!js_EmitTree(cx, cg, pn2->pn_kid))
5701                 return JS_FALSE;
5702             if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5703                 return JS_FALSE;
5704             depth = cg->stackDepth;
5705             if (js_Emit1(cx, cg, op) < 0)
5706                 return JS_FALSE;
5707             break;
5708 #endif
5709           default:
5710             JS_ASSERT(0);
5711         }
5712 
5713         /*
5714          * Allocate another stack slot for GC protection in case the initial
5715          * value being post-incremented or -decremented is not a number, but
5716          * converts to a jsdouble.  In the TOK_NAME cases, op has 0 operand
5717          * uses and 1 definition, so we don't need an extra stack slot -- we
5718          * can use the one allocated for the def.
5719          */
5720         if (pn2->pn_type != TOK_NAME &&
5721             (js_CodeSpec[op].format & JOF_POST) &&
5722             (uintN)depth == cg->maxStackDepth) {
5723             ++cg->maxStackDepth;
5724         }
5725         break;
5726       }
5727 
5728       case TOK_DELETE:
5729         /*
5730          * Under ECMA 3, deleting a non-reference returns true -- but alas we
5731          * must evaluate the operand if it appears it might have side effects.
5732          */
5733         pn2 = pn->pn_kid;
5734         switch (pn2->pn_type) {
5735           case TOK_NAME:
5736             pn2->pn_op = JSOP_DELNAME;
5737             if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
5738                 return JS_FALSE;
5739             op = pn2->pn_op;
5740             if (op == JSOP_FALSE) {
5741                 if (js_Emit1(cx, cg, op) < 0)
5742                     return JS_FALSE;
5743             } else {
5744                 if (!EmitAtomOp(cx, pn2, op, cg))
5745                     return JS_FALSE;
5746             }
5747             break;
5748           case TOK_DOT:
5749             if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg))
5750                 return JS_FALSE;
5751             break;
5752 #if JS_HAS_XML_SUPPORT
5753           case TOK_DBLDOT:
5754             if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
5755                 return JS_FALSE;
5756             break;
5757 #endif
5758 #if JS_HAS_LVALUE_RETURN
5759           case TOK_LP:
5760             if (pn2->pn_op != JSOP_SETCALL) {
5761                 JS_ASSERT(pn2->pn_op == JSOP_CALL || pn2->pn_op == JSOP_EVAL);
5762                 pn2->pn_op = JSOP_SETCALL;
5763             }
5764             top = CG_OFFSET(cg);
5765             if (!js_EmitTree(cx, cg, pn2))
5766                 return JS_FALSE;
5767             if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
5768                 return JS_FALSE;
5769             if (js_Emit1(cx, cg, JSOP_DELELEM) < 0)
5770                 return JS_FALSE;
5771             break;
5772 #endif
5773           case TOK_LB:
5774             if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
5775                 return JS_FALSE;
5776             break;
5777           default:
5778             /*
5779              * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
5780              * to foo(), true (a comma expression, requiring SRC_PCDELTA).
5781              */
5782             useful = JS_FALSE;
5783             if (!CheckSideEffects(cx, &cg->treeContext, pn2, &useful))
5784                 return JS_FALSE;
5785             if (!useful) {
5786                 off = noteIndex = -1;
5787             } else {
5788                 if (!js_EmitTree(cx, cg, pn2))
5789                     return JS_FALSE;
5790                 off = CG_OFFSET(cg);
5791                 noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5792                 if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
5793                     return JS_FALSE;
5794             }
5795             if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
5796                 return JS_FALSE;
5797             if (noteIndex >= 0) {
5798                 tmp = CG_OFFSET(cg);
5799                 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5800                     return JS_FALSE;
5801             }
5802         }
5803         break;
5804 
5805 #if JS_HAS_XML_SUPPORT
5806       case TOK_FILTER:
5807         if (!js_EmitTree(cx, cg, pn->pn_left))
5808             return JS_FALSE;
5809         jmp = js_Emit3(cx, cg, JSOP_FILTER, 0, 0);
5810         if (jmp < 0)
5811             return JS_FALSE;
5812         if (!js_EmitTree(cx, cg, pn->pn_right))
5813             return JS_FALSE;
5814         if (js_Emit1(cx, cg, JSOP_ENDFILTER) < 0)
5815             return JS_FALSE;
5816         CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5817         break;
5818 #endif
5819 
5820       case TOK_DOT:
5821         /*
5822          * Pop a stack operand, convert it to object, get a property named by
5823          * this bytecode's immediate-indexed atom operand, and push its value
5824          * (not a reference to it).  This bytecode sets the virtual machine's
5825          * "obj" register to the left operand's ToObject conversion result,
5826          * for use by JSOP_PUSHOBJ.
5827          */
5828         ok = EmitPropOp(cx, pn, pn->pn_op, cg);
5829         break;
5830 
5831       case TOK_LB:
5832 #if JS_HAS_XML_SUPPORT
5833       case TOK_DBLDOT:
5834 #endif
5835         /*
5836          * Pop two operands, convert the left one to object and the right one
5837          * to property name (atom or tagged int), get the named property, and
5838          * push its value.  Set the "obj" register to the result of ToObject
5839          * on the left operand.
5840          */
5841         ok = EmitElemOp(cx, pn, pn->pn_op, cg);
5842         break;
5843 
5844       case TOK_NEW:
5845       case TOK_LP:
5846       {
5847         uintN oldflags;
5848 
5849         /*
5850          * Emit function call or operator new (constructor call) code.
5851          * First, emit code for the left operand to evaluate the callable or
5852          * constructable object expression.
5853          *
5854          * For E4X, if this expression is a dotted member reference, select
5855          * JSOP_GETMETHOD instead of JSOP_GETPROP.  ECMA-357 separates XML
5856          * method lookup from the normal property id lookup done for native
5857          * objects.
5858          */
5859         pn2 = pn->pn_head;
5860 #if JS_HAS_XML_SUPPORT
5861         if (pn2->pn_type == TOK_DOT && pn2->pn_op != JSOP_GETMETHOD) {
5862             JS_ASSERT(pn2->pn_op == JSOP_GETPROP);
5863             pn2->pn_op = JSOP_GETMETHOD;
5864             pn2->pn_attrs |= JSPROP_IMPLICIT_FUNCTION_NAMESPACE;
5865         }
5866 #endif
5867         if (!js_EmitTree(cx, cg, pn2))
5868             return JS_FALSE;
5869 
5870         /*
5871          * Push the virtual machine's "obj" register, which was set by a
5872          * name, property, or element get (or set) bytecode.
5873          */
5874         if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
5875             return JS_FALSE;
5876 
5877         /* Remember start of callable-object bytecode for decompilation hint. */
5878         off = top;
5879 
5880         /*
5881          * Emit code for each argument in order, then emit the JSOP_*CALL or
5882          * JSOP_NEW bytecode with a two-byte immediate telling how many args
5883          * were pushed on the operand stack.
5884          */
5885         oldflags = cg->treeContext.flags;
5886         cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
5887         for (pn2 = pn2->pn_next; pn2; pn2 = pn2->pn_next) {
5888             if (!js_EmitTree(cx, cg, pn2))
5889                 return JS_FALSE;
5890         }
5891         cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
5892         if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
5893             return JS_FALSE;
5894 
5895         argc = pn->pn_count - 1;
5896         if (js_Emit3(cx, cg, pn->pn_op, ARGC_HI(argc), ARGC_LO(argc)) < 0)
5897             return JS_FALSE;
5898         break;
5899       }
5900 
5901       case TOK_LEXICALSCOPE:
5902       {
5903         JSObject *obj;
5904         jsint count;
5905 
5906         atom = pn->pn_atom;
5907         obj = ATOM_TO_OBJECT(atom);
5908         js_PushBlockScope(&cg->treeContext, &stmtInfo, atom, CG_OFFSET(cg));
5909 
5910         OBJ_SET_BLOCK_DEPTH(cx, obj, cg->stackDepth);
5911         count = OBJ_BLOCK_COUNT(cx, obj);
5912         cg->stackDepth += count;
5913         if ((uintN)cg->stackDepth > cg->maxStackDepth)
5914             cg->maxStackDepth = cg->stackDepth;
5915 
5916         /*
5917          * If this lexical scope is not for a catch block, let block or let
5918          * expression, or any kind of for loop (where the scope starts in the
5919          * head after the first part if for (;;), else in the body if for-in);
5920          * and if our container is top-level but not a function body, or else
5921          * a block statement; then emit a SRC_BRACE note.  All other container
5922          * statements get braces by default from the decompiler.
5923          */
5924         noteIndex = -1;
5925         type = pn->pn_expr->pn_type;
5926         if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
5927             (!(stmt = stmtInfo.down)
5928              ? !(cg->treeContext.flags & TCF_IN_FUNCTION)
5929              : stmt->type == STMT_BLOCK)) {
5930 #if defined DEBUG_brendan || defined DEBUG_mrbkap
5931             /* There must be no source note already output for the next op. */
5932             JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
5933                       CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
5934                       !GettableNoteForNextOp(cg));
5935 #endif
5936             noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5937             if (noteIndex < 0)
5938                 return JS_FALSE;
5939         }
5940 
5941         ale = js_IndexAtom(cx, atom, &cg->atomList);
5942         if (!ale)
5943             return JS_FALSE;
5944         JS_ASSERT(CG_OFFSET(cg) == top);
5945         EMIT_ATOM_INDEX_OP(JSOP_ENTERBLOCK, ALE_INDEX(ale));
5946 
5947         if (!js_EmitTree(cx, cg, pn->pn_expr))
5948             return JS_FALSE;
5949 
5950         op = pn->pn_op;
5951         if (op == JSOP_LEAVEBLOCKEXPR) {
5952             if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
5953                 return JS_FALSE;
5954         } else {
5955             if (noteIndex >= 0 &&
5956                 !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5957                                      CG_OFFSET(cg) - top)) {
5958                 return JS_FALSE;
5959             }
5960         }
5961 
5962         /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
5963         EMIT_UINT16_IMM_OP(op, count);
5964         cg->stackDepth -= count;
5965 
5966         ok = js_PopStatementCG(cx, cg);
5967         break;
5968       }
5969 
5970 #if JS_HAS_BLOCK_SCOPE
5971       case TOK_LET:
5972         /* Let statements have their variable declarations on the left. */
5973         if (pn->pn_arity == PN_BINARY) {
5974             pn2 = pn->pn_right;
5975             pn = pn->pn_left;
5976         } else {
5977             pn2 = NULL;
5978         }
5979 
5980         /* Non-null pn2 means that pn is the variable list from a let head. */
5981         JS_ASSERT(pn->pn_arity == PN_LIST);
5982         if (!EmitVariables(cx, cg, pn, pn2 != NULL, &noteIndex))
5983             return JS_FALSE;
5984 
5985         /* Thus non-null pn2 is the body of the let block or expression. */
5986         tmp = CG_OFFSET(cg);
5987         if (pn2 && !js_EmitTree(cx, cg, pn2))
5988             return JS_FALSE;
5989 
5990         if (noteIndex >= 0 &&
5991             !js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5992                                  CG_OFFSET(cg) - tmp)) {
5993             return JS_FALSE;
5994         }
5995         break;
5996 #endif /* JS_HAS_BLOCK_SCOPE */
5997 
5998 #if JS_HAS_GENERATORS
5999        case TOK_ARRAYPUSH:
6000         /*
6001          * The array object's stack index is in cg->arrayCompSlot.  See below
6002          * under the array initialiser code generator for array comprehension
6003          * special casing.
6004          */
6005         if (!js_EmitTree(cx, cg, pn->pn_kid))
6006             return JS_FALSE;
6007         EMIT_UINT16_IMM_OP(pn->pn_op, cg->arrayCompSlot);
6008         break;
6009 #endif
6010 
6011       case TOK_RB:
6012 #if JS_HAS_GENERATORS
6013       case TOK_ARRAYCOMP:
6014 #endif
6015         /*
6016          * Emit code for [a, b, c] of the form:
6017          *   t = new Array; t[0] = a; t[1] = b; t[2] = c; t;
6018          * but use a stack slot for t and avoid dup'ing and popping it via
6019          * the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
6020          */
6021         ale = js_IndexAtom(cx, CLASS_ATOM(cx, Array), &cg->atomList);
6022         if (!ale)
6023             return JS_FALSE;
6024         EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
6025         if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
6026             return JS_FALSE;
6027         if (js_Emit1(cx, cg, JSOP_NEWINIT) < 0)
6028             return JS_FALSE;
6029 
6030         pn2 = pn->pn_head;
6031 #if JS_HAS_SHARP_VARS
6032         if (pn2 && pn2->pn_type == TOK_DEFSHARP) {
6033             EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid)pn2->pn_num);
6034             pn2 = pn2->pn_next;
6035         }
6036 #endif
6037 
6038 #if JS_HAS_GENERATORS
6039         if (pn->pn_type == TOK_ARRAYCOMP) {
6040             uintN saveSlot;
6041 
6042             /*
6043              * Pass the new array's stack index to the TOK_ARRAYPUSH case by
6044              * storing it in pn->pn_extra, then simply traverse the TOK_FOR
6045              * node and its kids under pn2 to generate this comprehension.
6046              */
6047             JS_ASSERT(cg->stackDepth > 0);
6048             saveSlot = cg->arrayCompSlot;
6049             cg->arrayCompSlot = (uint32) (cg->stackDepth - 1);
6050             if (!js_EmitTree(cx, cg, pn2))
6051                 return JS_FALSE;
6052             cg->arrayCompSlot = saveSlot;
6053 
6054             /* Emit the usual op needed for decompilation. */
6055             if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6056                 return JS_FALSE;
6057             break;
6058         }
6059 #endif /* JS_HAS_GENERATORS */
6060 
6061         for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6062             if (!EmitNumberOp(cx, atomIndex, cg))
6063                 return JS_FALSE;
6064 
6065             /* FIXME 260106: holes in a sparse initializer are void-filled. */
6066             if (pn2->pn_type == TOK_COMMA) {
6067                 if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
6068                     return JS_FALSE;
6069             } else {
6070                 if (!js_EmitTree(cx, cg, pn2))
6071                     return JS_FALSE;
6072             }
6073 
6074             if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6075                 return JS_FALSE;
6076         }
6077 
6078         if (pn->pn_extra & PNX_ENDCOMMA) {
6079             /* Emit a source note so we know to decompile an extra comma. */
6080             if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6081                 return JS_FALSE;
6082         }
6083 
6084         /* Emit an op for sharp array cleanup and decompilation. */
6085         if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6086             return JS_FALSE;
6087         break;
6088 
6089       case TOK_RC:
6090         /*
6091          * Emit code for {p:a, '%q':b, 2:c} of the form:
6092          *   t = new Object; t.p = a; t['%q'] = b; t[2] = c; t;
6093          * but use a stack slot for t and avoid dup'ing and popping it via
6094          * the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
6095          */
6096         ale = js_IndexAtom(cx, CLASS_ATOM(cx, Object), &cg->atomList);
6097         if (!ale)
6098             return JS_FALSE;
6099         EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
6100 
6101         if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
6102             return JS_FALSE;
6103         if (js_Emit1(cx, cg, JSOP_NEWINIT) < 0)
6104             return JS_FALSE;
6105 
6106         pn2 = pn->pn_head;
6107 #if JS_HAS_SHARP_VARS
6108         if (pn2 && pn2->pn_type == TOK_DEFSHARP) {
6109             EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid)pn2->pn_num);
6110             pn2 = pn2->pn_next;
6111         }
6112 #endif
6113 
6114         for (; pn2; pn2 = pn2->pn_next) {
6115             /* Emit an index for t[2], else map an atom for t.p or t['%q']. */
6116             pn3 = pn2->pn_left;
6117             switch (pn3->pn_type) {
6118               case TOK_NUMBER:
6119                 if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6120                     return JS_FALSE;
6121                 break;
6122               case TOK_NAME:
6123               case TOK_STRING:
6124                 ale = js_IndexAtom(cx, pn3->pn_atom, &cg->atomList);
6125                 if (!ale)
6126                     return JS_FALSE;
6127                 break;
6128               default:
6129                 JS_ASSERT(0);
6130             }
6131 
6132             /* Emit code for the property initializer. */
6133             if (!js_EmitTree(cx, cg, pn2->pn_right))
6134                 return JS_FALSE;
6135 
6136 #if JS_HAS_GETTER_SETTER
6137             op = pn2->pn_op;
6138             if (op == JSOP_GETTER || op == JSOP_SETTER) {
6139                 if (pn3->pn_type != TOK_NUMBER &&
6140                     ALE_INDEX(ale) >= JS_BIT(16)) {
6141                     ReportStatementTooLarge(cx, cg);
6142                     return JS_FALSE;
6143                 }
6144                 if (js_Emit1(cx, cg, op) < 0)
6145                     return JS_FALSE;
6146             }
6147 #endif
6148             /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6149             if (pn3->pn_type == TOK_NUMBER) {
6150                 if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6151                     return JS_FALSE;
6152                 if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6153                     return JS_FALSE;
6154             } else {
6155                 EMIT_ATOM_INDEX_OP(JSOP_INITPROP, ALE_INDEX(ale));
6156             }
6157         }
6158 
6159         /* Emit an op for sharpArray cleanup and decompilation. */
6160         if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6161             return JS_FALSE;
6162         break;
6163 
6164 #if JS_HAS_SHARP_VARS
6165       case TOK_DEFSHARP:
6166         if (!js_EmitTree(cx, cg, pn->pn_kid))
6167             return JS_FALSE;
6168         EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid) pn->pn_num);
6169         break;
6170 
6171       case TOK_USESHARP:
6172         EMIT_UINT16_IMM_OP(JSOP_USESHARP, (jsatomid) pn->pn_num);
6173         break;
6174 #endif /* JS_HAS_SHARP_VARS */
6175 
6176       case TOK_RP:
6177       {
6178         uintN oldflags;
6179 
6180         /*
6181          * The node for (e) has e as its kid, enabling users who want to nest
6182          * assignment expressions in conditions to avoid the error correction
6183          * done by Condition (from x = y to x == y) by double-parenthesizing.
6184          */
6185         oldflags = cg->treeContext.flags;
6186         cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
6187         if (!js_EmitTree(cx, cg, pn->pn_kid))
6188             return JS_FALSE;
6189         cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
6190         if (js_Emit1(cx, cg, JSOP_GROUP) < 0)
6191             return JS_FALSE;
6192         break;
6193       }
6194 
6195       case TOK_NAME:
6196         if (!BindNameToSlot(cx, &cg->treeContext, pn, JS_FALSE))
6197             return JS_FALSE;
6198         op = pn->pn_op;
6199         if (op == JSOP_ARGUMENTS) {
6200             if (js_Emit1(cx, cg, op) < 0)
6201                 return JS_FALSE;
6202             break;
6203         }
6204         if (pn->pn_slot >= 0) {
6205             atomIndex = (jsatomid) pn->pn_slot;
6206             EMIT_UINT16_IMM_OP(op, atomIndex);
6207             break;
6208         }
6209         /* FALL THROUGH */
6210 
6211 #if JS_HAS_XML_SUPPORT
6212       case TOK_XMLATTR:
6213       case TOK_XMLSPACE:
6214       case TOK_XMLTEXT:
6215       case TOK_XMLCDATA:
6216       case TOK_XMLCOMMENT:
6217 #endif
6218       case TOK_STRING:
6219       case TOK_OBJECT:
6220         /*
6221          * The scanner and parser associate JSOP_NAME with TOK_NAME, although
6222          * other bytecodes may result instead (JSOP_BINDNAME/JSOP_SETNAME,
6223          * JSOP_FORNAME, etc.).  Among JSOP_*NAME* variants, only JSOP_NAME
6224          * may generate the first operand of a call or new expression, so only
6225          * it sets the "obj" virtual machine register to the object along the
6226          * scope chain in which the name was found.
6227          *
6228          * Token types for STRING and OBJECT have corresponding bytecode ops
6229          * in pn_op and emit the same format as NAME, so they share this code.
6230          */
6231         ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6232         break;
6233 
6234       case TOK_NUMBER:
6235         ok = EmitNumberOp(cx, pn->pn_dval, cg);
6236         break;
6237 
6238 #if JS_HAS_XML_SUPPORT
6239       case TOK_ANYNAME:
6240 #endif
6241       case TOK_PRIMARY:
6242         if (js_Emit1(cx, cg, pn->pn_op) < 0)
6243             return JS_FALSE;
6244         break;
6245 
6246 #if JS_HAS_DEBUGGER_KEYWORD
6247       case TOK_DEBUGGER:
6248         if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6249             return JS_FALSE;
6250         break;
6251 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6252 
6253 #if JS_HAS_XML_SUPPORT
6254       case TOK_XMLELEM:
6255       case TOK_XMLLIST:
6256         if (pn->pn_op == JSOP_XMLOBJECT) {
6257             ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6258             break;
6259         }
6260 
6261         JS_ASSERT(pn->pn_type == TOK_XMLLIST || pn->pn_count != 0);
6262         switch (pn->pn_head ? pn->pn_head->pn_type : TOK_XMLLIST) {
6263           case TOK_XMLETAGO:
6264             JS_ASSERT(0);
6265             /* FALL THROUGH */
6266           case TOK_XMLPTAGC:
6267           case TOK_XMLSTAGO:
6268             break;
6269           default:
6270             if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6271                 return JS_FALSE;
6272         }
6273 
6274         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6275             if (pn2->pn_type == TOK_LC &&
6276                 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6277                 return JS_FALSE;
6278             }
6279             if (!js_EmitTree(cx, cg, pn2))
6280                 return JS_FALSE;
6281             if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6282                 return JS_FALSE;
6283         }
6284 
6285         if (pn->pn_extra & PNX_XMLROOT) {
6286             if (pn->pn_count == 0) {
6287                 JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6288                 atom = cx->runtime->atomState.emptyAtom;
6289                 ale = js_IndexAtom(cx, atom, &cg->atomList);
6290                 if (!ale)
6291                     return JS_FALSE;
6292                 EMIT_ATOM_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6293             }
6294             if (js_Emit1(cx, cg, pn->pn_op) < 0)
6295                 return JS_FALSE;
6296         }
6297 #ifdef DEBUG
6298         else
6299             JS_ASSERT(pn->pn_count != 0);
6300 #endif
6301         break;
6302 
6303       case TOK_XMLPTAGC:
6304         if (pn->pn_op == JSOP_XMLOBJECT) {
6305             ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6306             break;
6307         }
6308         /* FALL THROUGH */
6309 
6310       case TOK_XMLSTAGO:
6311       case TOK_XMLETAGO:
6312       {
6313         uint32 i;
6314 
6315         if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6316             return JS_FALSE;
6317 
6318         ale = js_IndexAtom(cx,
6319                            (pn->pn_type == TOK_XMLETAGO)
6320                            ? cx->runtime->atomState.etagoAtom
6321                            : cx->runtime->atomState.stagoAtom,
6322                            &cg->atomList);
6323         if (!ale)
6324             return JS_FALSE;
6325         EMIT_ATOM_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6326 
6327         JS_ASSERT(pn->pn_count != 0);
6328         pn2 = pn->pn_head;
6329         if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6330             return JS_FALSE;
6331         if (!js_EmitTree(cx, cg, pn2))
6332             return JS_FALSE;
6333         if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6334             return JS_FALSE;
6335 
6336         for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6337             if (pn2->pn_type == TOK_LC &&
6338                 js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6339                 return JS_FALSE;
6340             }
6341             if (!js_EmitTree(cx, cg, pn2))
6342                 return JS_FALSE;
6343             if ((i & 1) && pn2->pn_type == TOK_LC) {
6344                 if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6345                     return JS_FALSE;
6346             }
6347             if (js_Emit1(cx, cg,
6348                          (i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6349                 return JS_FALSE;
6350             }
6351         }
6352 
6353         ale = js_IndexAtom(cx,
6354                            (pn->pn_type == TOK_XMLPTAGC)
6355                            ? cx->runtime->atomState.ptagcAtom
6356                            : cx->runtime->atomState.tagcAtom,
6357                            &cg->atomList);
6358         if (!ale)
6359             return JS_FALSE;
6360         EMIT_ATOM_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6361         if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6362             return JS_FALSE;
6363 
6364         if ((pn->pn_extra & PNX_XMLROOT) && js_Emit1(cx, cg, pn->pn_op) < 0)
6365             return JS_FALSE;
6366         break;
6367       }
6368 
6369       case TOK_XMLNAME:
6370         if (pn->pn_arity == PN_LIST) {
6371             JS_ASSERT(pn->pn_count != 0);
6372             for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6373                 if (!js_EmitTree(cx, cg, pn2))
6374                     return JS_FALSE;
6375                 if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6376                     return JS_FALSE;
6377             }
6378         } else {
6379             JS_ASSERT(pn->pn_arity == PN_NULLARY);
6380             ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6381         }
6382         break;
6383 
6384       case TOK_XMLPI:
6385         ale = js_IndexAtom(cx, pn->pn_atom2, &cg->atomList);
6386         if (!ale)
6387             return JS_FALSE;
6388         if (!EmitAtomIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6389             return JS_FALSE;
6390         if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6391             return JS_FALSE;
6392         break;
6393 #endif /* JS_HAS_XML_SUPPORT */
6394 
6395       default:
6396         JS_ASSERT(0);
6397     }
6398 
6399     if (ok && --cg->emitLevel == 0 && cg->spanDeps)
6400         ok = OptimizeSpanDeps(cx, cg);
6401 
6402     return ok;
6403 }
6404 
6405 /* XXX get rid of offsetBias, it's used only by SRC_FOR and SRC_DECL */
6406 JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6407     {"null",            0,      0,      0},
6408     {"if",              0,      0,      0},
6409     {"if-else",         2,      0,      1},
6410     {"while",           1,      0,      1},
6411     {"for",             3,      1,      1},
6412     {"continue",        0,      0,      0},
6413     {"decl",            1,      1,      1},
6414     {"pcdelta",         1,      0,      1},
6415     {"assignop",        0,      0,      0},
6416     {"cond",            1,      0,      1},
6417     {"brace",           1,      0,      1},
6418     {"hidden",          0,      0,      0},
6419     {"pcbase",          1,      0,     -1},
6420     {"label",           1,      0,      0},
6421     {"labelbrace",      1,      0,      0},
6422     {"endbrace",        0,      0,      0},
6423     {"break2label",     1,      0,      0},
6424     {"cont2label",      1,      0,      0},
6425     {"switch",          2,      0,      1},
6426     {"funcdef",         1,      0,      0},
6427     {"catch",           1,      0,      1},
6428     {"extended",       -1,      0,      0},
6429     {"newline",         0,      0,      0},
6430     {"setline",         1,      0,      0},
6431     {"xdelta",          0,      0,      0},
6432 };
6433 
6434 static intN
AllocSrcNote(JSContext * cx,JSCodeGenerator * cg)6435 AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6436 {
6437     intN index;
6438     JSArenaPool *pool;
6439     size_t size;
6440 
6441     index = CG_NOTE_COUNT(cg);
6442     if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6443         pool = cg->notePool;
6444         size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6445         if (!CG_NOTES(cg)) {
6446             /* Allocate the first note array lazily; leave noteMask alone. */
6447             JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
6448         } else {
6449             /* Grow by doubling note array size; update noteMask on success. */
6450             JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6451             if (CG_NOTES(cg))
6452                 CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6453         }
6454         if (!CG_NOTES(cg)) {
6455             JS_ReportOutOfMemory(cx);
6456             return -1;
6457         }
6458     }
6459 
6460     CG_NOTE_COUNT(cg) = index + 1;
6461     return index;
6462 }
6463 
6464 intN
js_NewSrcNote(JSContext * cx,JSCodeGenerator * cg,JSSrcNoteType type)6465 js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
6466 {
6467     intN index, n;
6468     jssrcnote *sn;
6469     ptrdiff_t offset, delta, xdelta;
6470 
6471     /*
6472      * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6473      * incrementing CG_NOTE_COUNT(cg).
6474      */
6475     index = AllocSrcNote(cx, cg);
6476     if (index < 0)
6477         return -1;
6478     sn = &CG_NOTES(cg)[index];
6479 
6480     /*
6481      * Compute delta from the last annotated bytecode's offset.  If it's too
6482      * big to fit in sn, allocate one or more xdelta notes and reset sn.
6483      */
6484     offset = CG_OFFSET(cg);
6485     delta = offset - CG_LAST_NOTE_OFFSET(cg);
6486     CG_LAST_NOTE_OFFSET(cg) = offset;
6487     if (delta >= SN_DELTA_LIMIT) {
6488         do {
6489             xdelta = JS_MIN(delta, SN_XDELTA_MASK);
6490             SN_MAKE_XDELTA(sn, xdelta);
6491             delta -= xdelta;
6492             index = AllocSrcNote(cx, cg);
6493             if (index < 0)
6494                 return -1;
6495             sn = &CG_NOTES(cg)[index];
6496         } while (delta >= SN_DELTA_LIMIT);
6497     }
6498 
6499     /*
6500      * Initialize type and delta, then allocate the minimum number of notes
6501      * needed for type's arity.  Usually, we won't need more, but if an offset
6502      * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6503      */
6504     SN_MAKE_NOTE(sn, type, delta);
6505     for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
6506         if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
6507             return -1;
6508     }
6509     return index;
6510 }
6511 
6512 intN
js_NewSrcNote2(JSContext * cx,JSCodeGenerator * cg,JSSrcNoteType type,ptrdiff_t offset)6513 js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
6514                ptrdiff_t offset)
6515 {
6516     intN index;
6517 
6518     index = js_NewSrcNote(cx, cg, type);
6519     if (index >= 0) {
6520         if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
6521             return -1;
6522     }
6523     return index;
6524 }
6525 
6526 intN
js_NewSrcNote3(JSContext * cx,JSCodeGenerator * cg,JSSrcNoteType type,ptrdiff_t offset1,ptrdiff_t offset2)6527 js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
6528                ptrdiff_t offset1, ptrdiff_t offset2)
6529 {
6530     intN index;
6531 
6532     index = js_NewSrcNote(cx, cg, type);
6533     if (index >= 0) {
6534         if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
6535             return -1;
6536         if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
6537             return -1;
6538     }
6539     return index;
6540 }
6541 
6542 static JSBool
GrowSrcNotes(JSContext * cx,JSCodeGenerator * cg)6543 GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
6544 {
6545     JSArenaPool *pool;
6546     size_t size;
6547 
6548     /* Grow by doubling note array size; update noteMask on success. */
6549     pool = cg->notePool;
6550     size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6551     JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6552     if (!CG_NOTES(cg)) {
6553         JS_ReportOutOfMemory(cx);
6554         return JS_FALSE;
6555     }
6556     CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6557     return JS_TRUE;
6558 }
6559 
6560 jssrcnote *
js_AddToSrcNoteDelta(JSContext * cx,JSCodeGenerator * cg,jssrcnote * sn,ptrdiff_t delta)6561 js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
6562                      ptrdiff_t delta)
6563 {
6564     ptrdiff_t base, limit, newdelta, diff;
6565     intN index;
6566 
6567     /*
6568      * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
6569      * main script note deltas, and only by a small positive amount.
6570      */
6571     JS_ASSERT(cg->current == &cg->main);
6572     JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
6573 
6574     base = SN_DELTA(sn);
6575     limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
6576     newdelta = base + delta;
6577     if (newdelta < limit) {
6578         SN_SET_DELTA(sn, newdelta);
6579     } else {
6580         index = sn - cg->main.notes;
6581         if ((cg->main.noteCount & cg->main.noteMask) == 0) {
6582             if (!GrowSrcNotes(cx, cg))
6583                 return NULL;
6584             sn = cg->main.notes + index;
6585         }
6586         diff = cg->main.noteCount - index;
6587         cg->main.noteCount++;
6588         memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
6589         SN_MAKE_XDELTA(sn, delta);
6590         sn++;
6591     }
6592     return sn;
6593 }
6594 
6595 JS_FRIEND_API(uintN)
js_SrcNoteLength(jssrcnote * sn)6596 js_SrcNoteLength(jssrcnote *sn)
6597 {
6598     uintN arity;
6599     jssrcnote *base;
6600 
6601     arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
6602     for (base = sn++; arity; sn++, arity--) {
6603         if (*sn & SN_3BYTE_OFFSET_FLAG)
6604             sn += 2;
6605     }
6606     return sn - base;
6607 }
6608 
6609 JS_FRIEND_API(ptrdiff_t)
js_GetSrcNoteOffset(jssrcnote * sn,uintN which)6610 js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
6611 {
6612     /* Find the offset numbered which (i.e., skip exactly which offsets). */
6613     JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
6614     JS_ASSERT(which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
6615     for (sn++; which; sn++, which--) {
6616         if (*sn & SN_3BYTE_OFFSET_FLAG)
6617             sn += 2;
6618     }
6619     if (*sn & SN_3BYTE_OFFSET_FLAG) {
6620         return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
6621                            | (sn[1] << 8)
6622                            | sn[2]);
6623     }
6624     return (ptrdiff_t)*sn;
6625 }
6626 
6627 JSBool
js_SetSrcNoteOffset(JSContext * cx,JSCodeGenerator * cg,uintN index,uintN which,ptrdiff_t offset)6628 js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
6629                     uintN which, ptrdiff_t offset)
6630 {
6631     jssrcnote *sn;
6632     ptrdiff_t diff;
6633 
6634     if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
6635         ReportStatementTooLarge(cx, cg);
6636         return JS_FALSE;
6637     }
6638 
6639     /* Find the offset numbered which (i.e., skip exactly which offsets). */
6640     sn = &CG_NOTES(cg)[index];
6641     JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
6642     JS_ASSERT(which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
6643     for (sn++; which; sn++, which--) {
6644         if (*sn & SN_3BYTE_OFFSET_FLAG)
6645             sn += 2;
6646     }
6647 
6648     /* See if the new offset requires three bytes. */
6649     if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
6650         /* Maybe this offset was already set to a three-byte value. */
6651         if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
6652             /* Losing, need to insert another two bytes for this offset. */
6653             index = PTRDIFF(sn, CG_NOTES(cg), jssrcnote);
6654 
6655             /*
6656              * Simultaneously test to see if the source note array must grow to
6657              * accomodate either the first or second byte of additional storage
6658              * required by this 3-byte offset.
6659              */
6660             if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
6661                 if (!GrowSrcNotes(cx, cg))
6662                     return JS_FALSE;
6663                 sn = CG_NOTES(cg) + index;
6664             }
6665             CG_NOTE_COUNT(cg) += 2;
6666 
6667             diff = CG_NOTE_COUNT(cg) - (index + 3);
6668             JS_ASSERT(diff >= 0);
6669             if (diff > 0)
6670                 memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
6671         }
6672         *sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
6673         *sn++ = (jssrcnote)(offset >> 8);
6674     }
6675     *sn = (jssrcnote)offset;
6676     return JS_TRUE;
6677 }
6678 
6679 #ifdef DEBUG_notme
6680 #define DEBUG_srcnotesize
6681 #endif
6682 
6683 #ifdef DEBUG_srcnotesize
6684 #define NBINS 10
6685 static uint32 hist[NBINS];
6686 
DumpSrcNoteSizeHist()6687 void DumpSrcNoteSizeHist()
6688 {
6689     static FILE *fp;
6690     int i, n;
6691 
6692     if (!fp) {
6693         fp = fopen("/tmp/srcnotes.hist", "w");
6694         if (!fp)
6695             return;
6696         setvbuf(fp, NULL, _IONBF, 0);
6697     }
6698     fprintf(fp, "SrcNote size histogram:\n");
6699     for (i = 0; i < NBINS; i++) {
6700         fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
6701         for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
6702             fputc('*', fp);
6703         fputc('\n', fp);
6704     }
6705     fputc('\n', fp);
6706 }
6707 #endif
6708 
6709 /*
6710  * Fill in the storage at notes with prolog and main srcnotes; the space at
6711  * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
6712  * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
6713  * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
6714  */
6715 JSBool
js_FinishTakingSrcNotes(JSContext * cx,JSCodeGenerator * cg,jssrcnote * notes)6716 js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
6717 {
6718     uintN prologCount, mainCount, totalCount;
6719     ptrdiff_t offset, delta;
6720     jssrcnote *sn;
6721 
6722     JS_ASSERT(cg->current == &cg->main);
6723 
6724     prologCount = cg->prolog.noteCount;
6725     if (prologCount && cg->prolog.currentLine != cg->firstLine) {
6726         CG_SWITCH_TO_PROLOG(cg);
6727         if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
6728             return JS_FALSE;
6729         prologCount = cg->prolog.noteCount;
6730         CG_SWITCH_TO_MAIN(cg);
6731     } else {
6732         /*
6733          * Either no prolog srcnotes, or no line number change over prolog.
6734          * We don't need a SRC_SETLINE, but we may need to adjust the offset
6735          * of the first main note, by adding to its delta and possibly even
6736          * prepending SRC_XDELTA notes to it to account for prolog bytecodes
6737          * that came at and after the last annotated bytecode.
6738          */
6739         offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
6740         JS_ASSERT(offset >= 0);
6741         if (offset > 0 && cg->main.noteCount != 0) {
6742             /* NB: Use as much of the first main note's delta as we can. */
6743             sn = cg->main.notes;
6744             delta = SN_IS_XDELTA(sn)
6745                     ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
6746                     : SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
6747             if (offset < delta)
6748                 delta = offset;
6749             for (;;) {
6750                 if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
6751                     return JS_FALSE;
6752                 offset -= delta;
6753                 if (offset == 0)
6754                     break;
6755                 delta = JS_MIN(offset, SN_XDELTA_MASK);
6756                 sn = cg->main.notes;
6757             }
6758         }
6759     }
6760 
6761     mainCount = cg->main.noteCount;
6762     totalCount = prologCount + mainCount;
6763     if (prologCount)
6764         memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
6765     memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
6766     SN_MAKE_TERMINATOR(&notes[totalCount]);
6767 
6768 #ifdef DEBUG_notme
6769   { int bin = JS_CeilingLog2(totalCount);
6770     if (bin >= NBINS)
6771         bin = NBINS - 1;
6772     ++hist[bin];
6773   }
6774 #endif
6775     return JS_TRUE;
6776 }
6777 
6778 JSBool
js_AllocTryNotes(JSContext * cx,JSCodeGenerator * cg)6779 js_AllocTryNotes(JSContext *cx, JSCodeGenerator *cg)
6780 {
6781     size_t size, incr;
6782     ptrdiff_t delta;
6783 
6784     size = TRYNOTE_SIZE(cg->treeContext.tryCount);
6785     if (size <= cg->tryNoteSpace)
6786         return JS_TRUE;
6787 
6788     /*
6789      * Allocate trynotes from cx->tempPool.
6790      * XXX Too much growing and we bloat, as other tempPool allocators block
6791      * in-place growth, and we never recycle old free space in an arena.
6792      * YYY But once we consume an entire arena, we'll realloc it, letting the
6793      * malloc heap recycle old space, while still freeing _en masse_ via the
6794      * arena pool.
6795      */
6796     if (!cg->tryBase) {
6797         size = JS_ROUNDUP(size, TRYNOTE_SIZE(TRYNOTE_CHUNK));
6798         JS_ARENA_ALLOCATE_CAST(cg->tryBase, JSTryNote *, &cx->tempPool, size);
6799         if (!cg->tryBase)
6800             return JS_FALSE;
6801         cg->tryNoteSpace = size;
6802         cg->tryNext = cg->tryBase;
6803     } else {
6804         delta = PTRDIFF((char *)cg->tryNext, (char *)cg->tryBase, char);
6805         incr = size - cg->tryNoteSpace;
6806         incr = JS_ROUNDUP(incr, TRYNOTE_SIZE(TRYNOTE_CHUNK));
6807         size = cg->tryNoteSpace;
6808         JS_ARENA_GROW_CAST(cg->tryBase, JSTryNote *, &cx->tempPool, size, incr);
6809         if (!cg->tryBase)
6810             return JS_FALSE;
6811         cg->tryNoteSpace = size + incr;
6812         cg->tryNext = (JSTryNote *)((char *)cg->tryBase + delta);
6813     }
6814     return JS_TRUE;
6815 }
6816 
6817 JSTryNote *
js_NewTryNote(JSContext * cx,JSCodeGenerator * cg,ptrdiff_t start,ptrdiff_t end,ptrdiff_t catchStart)6818 js_NewTryNote(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t start,
6819               ptrdiff_t end, ptrdiff_t catchStart)
6820 {
6821     JSTryNote *tn;
6822 
6823     JS_ASSERT(cg->tryBase <= cg->tryNext);
6824     JS_ASSERT(catchStart >= 0);
6825     tn = cg->tryNext++;
6826     tn->start = start;
6827     tn->length = end - start;
6828     tn->catchStart = catchStart;
6829     return tn;
6830 }
6831 
6832 void
js_FinishTakingTryNotes(JSContext * cx,JSCodeGenerator * cg,JSTryNote * notes)6833 js_FinishTakingTryNotes(JSContext *cx, JSCodeGenerator *cg, JSTryNote *notes)
6834 {
6835     uintN count;
6836 
6837     count = PTRDIFF(cg->tryNext, cg->tryBase, JSTryNote);
6838     if (!count)
6839         return;
6840 
6841     memcpy(notes, cg->tryBase, TRYNOTE_SIZE(count));
6842     notes[count].start = 0;
6843     notes[count].length = CG_OFFSET(cg);
6844     notes[count].catchStart = 0;
6845 }
6846