1
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2
* vim: set ts=8 sw=4 et tw=78:
4
* ***** BEGIN LICENSE BLOCK *****
5
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
7
* The contents of this file are subject to the Mozilla Public License Version
8
* 1.1 (the "License"); you may not use this file except in compliance with
9
* the License. You may obtain a copy of the License at
10
* http://www.mozilla.org/MPL/
12
* Software distributed under the License is distributed on an "AS IS" basis,
13
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14
* for the specific language governing rights and limitations under the
17
* The Original Code is Mozilla Communicator client code, released
20
* The Initial Developer of the Original Code is
21
* Netscape Communications Corporation.
22
* Portions created by the Initial Developer are Copyright (C) 1998
23
* the Initial Developer. All Rights Reserved.
27
* Alternatively, the contents of this file may be used under the terms of
28
* either of the GNU General Public License Version 2 or later (the "GPL"),
29
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30
* in which case the provisions of the GPL or the LGPL are applicable instead
31
* of those above. If you wish to allow use of your version of this file only
32
* under the terms of either the GPL or the LGPL, and not to allow others to
33
* use your version of this file under the terms of the MPL, indicate your
34
* decision by deleting the provisions above and replace them with the notice
35
* and other provisions required by the GPL or the LGPL. If you do not delete
36
* the provisions above, a recipient may use your version of this file under
37
* the terms of any one of the MPL, the GPL or the LGPL.
39
* ***** END LICENSE BLOCK ***** */
42
* JS bytecode generation.
50
#include "jsarena.h" /* Added by JSIFY */
51
#include "jsutil.h" /* Added by JSIFY */
68
/* Allocation chunk counts, must be powers of two in general. */
69
#define BYTECODE_CHUNK 256 /* code allocation increment */
70
#define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
71
#define TRYNOTE_CHUNK 64 /* trynote allocation increment */
73
/* Macros to compute byte sizes from typed element counts. */
74
#define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
75
#define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
76
#define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
79
js_InitCodeGenerator(JSContext *cx, JSCodeGenerator *cg,
80
JSArenaPool *codePool, JSArenaPool *notePool,
81
const char *filename, uintN lineno,
82
JSPrincipals *principals)
84
memset(cg, 0, sizeof *cg);
85
TREE_CONTEXT_INIT(&cg->treeContext);
86
cg->treeContext.flags |= TCF_COMPILING;
87
cg->codePool = codePool;
88
cg->notePool = notePool;
89
cg->codeMark = JS_ARENA_MARK(codePool);
90
cg->noteMark = JS_ARENA_MARK(notePool);
91
cg->tempMark = JS_ARENA_MARK(&cx->tempPool);
92
cg->current = &cg->main;
93
cg->filename = filename;
94
cg->firstLine = cg->prolog.currentLine = cg->main.currentLine = lineno;
95
cg->principals = principals;
96
ATOM_LIST_INIT(&cg->atomList);
97
cg->prolog.noteMask = cg->main.noteMask = SRCNOTE_CHUNK - 1;
98
ATOM_LIST_INIT(&cg->constList);
103
js_FinishCodeGenerator(JSContext *cx, JSCodeGenerator *cg)
105
TREE_CONTEXT_FINISH(&cg->treeContext);
106
JS_ARENA_RELEASE(cg->codePool, cg->codeMark);
107
JS_ARENA_RELEASE(cg->notePool, cg->noteMark);
108
JS_ARENA_RELEASE(&cx->tempPool, cg->tempMark);
112
EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
114
jsbytecode *base, *limit, *next;
115
ptrdiff_t offset, length;
120
limit = CG_LIMIT(cg);
121
offset = PTRDIFF(next, base, jsbytecode);
122
if (next + delta > limit) {
123
length = offset + delta;
124
length = (length <= BYTECODE_CHUNK)
126
: JS_BIT(JS_CeilingLog2(length));
127
incr = BYTECODE_SIZE(length);
129
JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
131
size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
133
JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
136
JS_ReportOutOfMemory(cx);
140
CG_LIMIT(cg) = base + length;
141
CG_NEXT(cg) = base + offset;
147
UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
150
const JSCodeSpec *cs;
153
pc = CG_CODE(cg, target);
154
cs = &js_CodeSpec[pc[0]];
157
nuses = 2 + GET_ARGC(pc); /* stack: fun, this, [argc arguments] */
158
cg->stackDepth -= nuses;
159
JS_ASSERT(cg->stackDepth >= 0);
160
if (cg->stackDepth < 0) {
162
JS_snprintf(numBuf, sizeof numBuf, "%d", target);
163
JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
164
js_GetErrorMessage, NULL,
165
JSMSG_STACK_UNDERFLOW,
166
cg->filename ? cg->filename : "stdin",
169
cg->stackDepth += cs->ndefs;
170
if ((uintN)cg->stackDepth > cg->maxStackDepth)
171
cg->maxStackDepth = cg->stackDepth;
175
js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
177
ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
180
*CG_NEXT(cg)++ = (jsbytecode)op;
181
UpdateDepth(cx, cg, offset);
187
js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
189
ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
192
jsbytecode *next = CG_NEXT(cg);
193
next[0] = (jsbytecode)op;
195
CG_NEXT(cg) = next + 2;
196
UpdateDepth(cx, cg, offset);
202
js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
205
ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
208
jsbytecode *next = CG_NEXT(cg);
209
next[0] = (jsbytecode)op;
212
CG_NEXT(cg) = next + 3;
213
UpdateDepth(cx, cg, offset);
219
js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
221
ptrdiff_t length = 1 + (ptrdiff_t)extra;
222
ptrdiff_t offset = EmitCheck(cx, cg, op, length);
225
jsbytecode *next = CG_NEXT(cg);
226
*next = (jsbytecode)op;
227
memset(next + 1, 0, BYTECODE_SIZE(extra));
228
CG_NEXT(cg) = next + length;
229
UpdateDepth(cx, cg, offset);
234
/* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
235
const char js_with_statement_str[] = "with statement";
236
const char js_finally_block_str[] = "finally block";
237
const char js_script_str[] = "script";
239
static const char *statementName[] = {
240
"label statement", /* LABEL */
241
"if statement", /* IF */
242
"else statement", /* ELSE */
243
"switch statement", /* SWITCH */
245
js_with_statement_str, /* WITH */
246
"catch block", /* CATCH */
247
"try block", /* TRY */
248
js_finally_block_str, /* FINALLY */
249
js_finally_block_str, /* SUBROUTINE */
250
"do loop", /* DO_LOOP */
251
"for loop", /* FOR_LOOP */
252
"for/in loop", /* FOR_IN_LOOP */
253
"while loop", /* WHILE_LOOP */
257
StatementName(JSCodeGenerator *cg)
259
if (!cg->treeContext.topStmt)
260
return js_script_str;
261
return statementName[cg->treeContext.topStmt->type];
265
ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
267
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
272
Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
273
and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
274
into unconditional (gotos and gosubs), and conditional jumps or branches
275
(which pop a value, test it, and jump depending on its value). Most jumps
276
have just one immediate operand, a signed offset from the jump opcode's pc
277
to the target bytecode. The lookup and table switch opcodes may contain
280
Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
281
fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
282
suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
283
the extended form of the JSOP_OR branch opcode). The unextended or short
284
formats have 16-bit signed immediate offset operands, the extended or long
285
formats have 32-bit signed immediates. The span-dependency problem consists
286
of selecting as few long instructions as possible, or about as few -- since
287
jumps can span other jumps, extending one jump may cause another to need to
290
Most JS scripts are short, so need no extended jumps. We optimize for this
291
case by generating short jumps until we know a long jump is needed. After
292
that point, we keep generating short jumps, but each jump's 16-bit immediate
293
offset operand is actually an unsigned index into cg->spanDeps, an array of
294
JSSpanDep structs. Each struct tells the top offset in the script of the
295
opcode, the "before" offset of the jump (which will be the same as top for
296
simplex jumps, but which will index further into the bytecode array for a
297
non-initial jump offset in a lookup or table switch), the after "offset"
298
adjusted during span-dependent instruction selection (initially the same
299
value as the "before" offset), and the jump target (more below).
301
Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
302
ensure that all bytecode generated so far can be inspected to discover where
303
the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
304
that we generate span-dependency records sorted by their offsets, so we can
305
binary-search when trying to find a JSSpanDep for a given bytecode offset,
306
or the nearest JSSpanDep at or above a given pc.
308
To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
309
65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
310
tells us that we need to binary-search for the cg->spanDeps entry by the
311
jump opcode's bytecode offset (sd->before).
313
Jump targets need to be maintained in a data structure that lets us look
314
up an already-known target by its address (jumps may have a common target),
315
and that also lets us update the addresses (script-relative, a.k.a. absolute
316
offsets) of targets that come after a jump target (for when a jump below
317
that target needs to be extended). We use an AVL tree, implemented using
318
recursion, but with some tricky optimizations to its height-balancing code
319
(see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
321
A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
322
positive sign, even though they link "backward" (i.e., toward lower bytecode
323
address). We don't want to waste space and search time in the AVL tree for
324
such temporary backpatch deltas, so we use a single-bit wildcard scheme to
325
tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
326
in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
327
target, or is still awaiting backpatching.
329
Note that backpatch chains would present a problem for BuildSpanDepTable,
330
which inspects bytecode to build cg->spanDeps on demand, when the first
331
short jump offset overflows. To solve this temporary problem, we emit a
332
proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
333
nuses/ndefs counts help keep the stack balanced, but whose opcode format
334
distinguishes its backpatch delta immediate operand from a normal jump
338
BalanceJumpTargets(JSJumpTarget **jtp)
340
JSJumpTarget *jt, *jt2, *root;
341
int dir, otherDir, heightChanged;
345
JS_ASSERT(jt->balance != 0);
347
if (jt->balance < -1) {
349
doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
350
} else if (jt->balance > 1) {
352
doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
357
otherDir = JT_OTHER_DIR(dir);
359
jt2 = jt->kids[otherDir];
360
*jtp = root = jt2->kids[dir];
362
jt->kids[otherDir] = root->kids[dir];
363
root->kids[dir] = jt;
365
jt2->kids[dir] = root->kids[otherDir];
366
root->kids[otherDir] = jt2;
369
root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
370
root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
373
*jtp = root = jt->kids[otherDir];
374
jt->kids[otherDir] = root->kids[dir];
375
root->kids[dir] = jt;
377
heightChanged = (root->balance != 0);
378
jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
381
return heightChanged;
384
typedef struct AddJumpTargetArgs {
392
AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
399
JSCodeGenerator *cg = args->cg;
403
cg->jtFreeList = jt->kids[JT_LEFT];
405
JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
408
JS_ReportOutOfMemory(args->cx);
412
jt->offset = args->offset;
414
jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
415
cg->numJumpTargets++;
421
if (jt->offset == args->offset) {
426
if (args->offset < jt->offset)
427
balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
429
balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
433
jt->balance += balanceDelta;
434
return (balanceDelta && jt->balance)
435
? 1 - BalanceJumpTargets(jtp)
440
static int AVLCheck(JSJumpTarget *jt)
445
JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
446
lh = AVLCheck(jt->kids[JT_LEFT]);
447
rh = AVLCheck(jt->kids[JT_RIGHT]);
448
JS_ASSERT(jt->balance == rh - lh);
449
return 1 + JS_MAX(lh, rh);
454
SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
457
AddJumpTargetArgs args;
459
if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
460
ReportStatementTooLarge(cx, cg);
466
args.offset = sd->top + off;
468
AddJumpTarget(&args, &cg->jumpTargets);
473
AVLCheck(cg->jumpTargets);
476
SD_SET_TARGET(sd, args.node);
480
#define SPANDEPS_MIN 256
481
#define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
482
#define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
485
AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
489
JSSpanDep *sdbase, *sd;
492
index = cg->numSpanDeps;
493
if (index + 1 == 0) {
494
ReportStatementTooLarge(cx, cg);
498
if ((index & (index - 1)) == 0 &&
499
(!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
501
size = SPANDEPS_SIZE_MIN;
502
JS_ARENA_ALLOCATE_CAST(sdbase, JSSpanDep *, &cx->tempPool, size);
504
size = SPANDEPS_SIZE(index);
505
JS_ARENA_GROW_CAST(sdbase, JSSpanDep *, &cx->tempPool, size, size);
509
cg->spanDeps = sdbase;
512
cg->numSpanDeps = index + 1;
513
sd = cg->spanDeps + index;
514
sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
515
sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode);
517
if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
518
/* Jump offset will be backpatched if off is a non-zero "bpdelta". */
520
JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
521
if (off > BPDELTA_MAX) {
522
ReportStatementTooLarge(cx, cg);
526
SD_SET_BPDELTA(sd, off);
527
} else if (off == 0) {
528
/* Jump offset will be patched directly, without backpatch chaining. */
529
SD_SET_TARGET(sd, NULL);
531
/* The jump offset in off is non-zero, therefore it's already known. */
532
if (!SetSpanDepTarget(cx, cg, sd, off))
536
if (index > SPANDEP_INDEX_MAX)
537
index = SPANDEP_INDEX_HUGE;
538
SET_SPANDEP_INDEX(pc2, index);
543
BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
545
jsbytecode *pc, *end;
547
const JSCodeSpec *cs;
550
pc = CG_BASE(cg) + cg->spanDepTodo;
554
cs = &js_CodeSpec[op];
555
len = (ptrdiff_t)cs->length;
557
switch (cs->format & JOF_TYPEMASK) {
559
off = GET_JUMP_OFFSET(pc);
560
if (!AddSpanDep(cx, cg, pc, pc, off))
564
case JOF_TABLESWITCH:
570
off = GET_JUMP_OFFSET(pc2);
571
if (!AddSpanDep(cx, cg, pc, pc2, off))
573
pc2 += JUMP_OFFSET_LEN;
574
low = GET_JUMP_OFFSET(pc2);
575
pc2 += JUMP_OFFSET_LEN;
576
high = GET_JUMP_OFFSET(pc2);
577
pc2 += JUMP_OFFSET_LEN;
578
for (i = low; i <= high; i++) {
579
off = GET_JUMP_OFFSET(pc2);
580
if (!AddSpanDep(cx, cg, pc, pc2, off))
582
pc2 += JUMP_OFFSET_LEN;
588
case JOF_LOOKUPSWITCH:
594
off = GET_JUMP_OFFSET(pc2);
595
if (!AddSpanDep(cx, cg, pc, pc2, off))
597
pc2 += JUMP_OFFSET_LEN;
598
npairs = (jsint) GET_ATOM_INDEX(pc2);
599
pc2 += ATOM_INDEX_LEN;
601
pc2 += ATOM_INDEX_LEN;
602
off = GET_JUMP_OFFSET(pc2);
603
if (!AddSpanDep(cx, cg, pc, pc2, off))
605
pc2 += JUMP_OFFSET_LEN;
621
GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
628
index = GET_SPANDEP_INDEX(pc);
629
if (index != SPANDEP_INDEX_HUGE)
630
return cg->spanDeps + index;
632
offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode);
634
hi = cg->numSpanDeps - 1;
637
sd = cg->spanDeps + mid;
638
if (sd->before == offset)
640
if (sd->before < offset)
651
SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
656
JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
657
if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
658
SET_JUMP_OFFSET(pc, delta);
662
if (delta > BPDELTA_MAX) {
663
ReportStatementTooLarge(cx, cg);
667
if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
670
sd = GetSpanDep(cg, pc);
671
JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
672
SD_SET_BPDELTA(sd, delta);
677
UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
679
if (jt->offset > pivot) {
681
if (jt->kids[JT_LEFT])
682
UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
684
if (jt->kids[JT_RIGHT])
685
UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
689
FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
693
JSSpanDep *sdbase, *sd;
695
num = cg->numSpanDeps;
698
sdbase = cg->spanDeps;
702
if (sd->before == offset)
704
if (sd->before < offset)
712
JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
717
FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
719
if (jt->kids[JT_LEFT])
720
FreeJumpTargets(cg, jt->kids[JT_LEFT]);
721
if (jt->kids[JT_RIGHT])
722
FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
723
jt->kids[JT_LEFT] = cg->jtFreeList;
728
OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
730
jsbytecode *pc, *oldpc, *base, *limit, *next;
731
JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
732
ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
737
jssrcnote *sn, *snlimit;
739
uintN i, n, noteIndex;
740
JSTryNote *tn, *tnlimit;
746
sdbase = cg->spanDeps;
747
sdlimit = sdbase + cg->numSpanDeps;
748
offset = CG_OFFSET(cg);
763
for (sd = sdbase; sd < sdlimit; sd++) {
764
JS_ASSERT(JT_HAS_TAG(sd->target));
767
if (sd->top != top) {
770
JS_ASSERT(top == sd->before);
774
type = (js_CodeSpec[op].format & JOF_TYPEMASK);
775
if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
777
* We already extended all the jump offset operands for
778
* the opcode at sd->top. Jumps and branches have only
779
* one jump offset operand, but switches have many, all
780
* of which are adjacent in cg->spanDeps.
785
JS_ASSERT(type == JOF_JUMP ||
786
type == JOF_TABLESWITCH ||
787
type == JOF_LOOKUPSWITCH);
790
if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
791
span = SD_SPAN(sd, pivot);
792
if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
793
ptrdiff_t deltaFromTop = 0;
798
case JSOP_GOTO: op = JSOP_GOTOX; break;
799
case JSOP_IFEQ: op = JSOP_IFEQX; break;
800
case JSOP_IFNE: op = JSOP_IFNEX; break;
801
case JSOP_OR: op = JSOP_ORX; break;
802
case JSOP_AND: op = JSOP_ANDX; break;
803
case JSOP_GOSUB: op = JSOP_GOSUBX; break;
804
case JSOP_CASE: op = JSOP_CASEX; break;
805
case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
806
case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
807
case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
809
ReportStatementTooLarge(cx, cg);
812
*pc = (jsbytecode) op;
814
for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
817
* sd2->offset already includes delta as it stood
818
* before we entered this loop, but it must also
819
* include the delta relative to top due to all the
820
* extended jump offset immediates for the opcode
821
* starting at top, which we extend in this loop.
823
* If there is only one extended jump offset, then
824
* sd2->offset won't change and this for loop will
827
sd2->offset += deltaFromTop;
828
deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
831
* sd2 comes after sd, and won't be revisited by
832
* the outer for loop, so we have to increase its
833
* offset by delta, not merely by deltaFromTop.
835
sd2->offset += delta;
838
delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
839
UpdateJumpTargets(cg->jumpTargets, sd2->offset,
840
JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
852
printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
853
cg->filename ? cg->filename : "stdin", cg->firstLine,
854
growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
855
passes, offset + growth, offset, growth);
859
* Ensure that we have room for the extended jumps, but don't round up
860
* to a power of two -- we're done generating code, so we cut to fit.
862
limit = CG_LIMIT(cg);
863
length = offset + growth;
864
next = base + length;
866
JS_ASSERT(length > BYTECODE_CHUNK);
867
size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode));
868
incr = BYTECODE_SIZE(length) - size;
869
JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
871
JS_ReportOutOfMemory(cx);
875
CG_LIMIT(cg) = next = base + length;
880
* Set up a fake span dependency record to guard the end of the code
881
* being generated. This guard record is returned as a fencepost by
882
* FindNearestSpanDep if there is no real spandep at or above a given
883
* unextended code offset.
886
guard.offset = offset + growth;
887
guard.before = offset;
892
* Now work backwards through the span dependencies, copying chunks of
893
* bytecode between each extended jump toward the end of the grown code
894
* space, and restoring immediate offset operands for all jump bytecodes.
895
* The first chunk of bytecodes, starting at base and ending at the first
896
* extended jump offset (NB: this chunk includes the operation bytecode
897
* just before that immediate jump offset), doesn't need to be copied.
899
JS_ASSERT(sd == sdlimit);
901
while (--sd >= sdbase) {
902
if (sd->top != top) {
904
op = (JSOp) base[top];
905
type = (js_CodeSpec[op].format & JOF_TYPEMASK);
907
for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
911
JS_ASSERT(top == sd2->before);
914
oldpc = base + sd->before;
915
span = SD_SPAN(sd, pivot);
918
* If this jump didn't need to be extended, restore its span immediate
919
* offset operand now, overwriting the index of sd within cg->spanDeps
920
* that was stored temporarily after *pc when BuildSpanDepTable ran.
922
* Note that span might fit in 16 bits even for an extended jump op,
923
* if the op has multiple span operands, not all of which overflowed
924
* (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
925
* range for a short jump, but others are not).
927
if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
928
JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
929
SET_JUMP_OFFSET(oldpc, span);
934
* Set up parameters needed to copy the next run of bytecode starting
935
* at offset (which is a cursor into the unextended, original bytecode
936
* vector), down to sd->before (a cursor of the same scale as offset,
937
* it's the index of the original jump pc). Reuse delta to count the
938
* nominal number of bytes to copy.
940
pc = base + sd->offset;
941
delta = offset - sd->before;
942
JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
945
* Don't bother copying the jump offset we're about to reset, but do
946
* copy the bytecode at oldpc (which comes just before its immediate
947
* jump offset operand), on the next iteration through the loop, by
948
* including it in offset's new value.
950
offset = sd->before + 1;
951
size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
953
memmove(pc + 1 + JUMPX_OFFSET_LEN,
954
oldpc + 1 + JUMP_OFFSET_LEN,
958
SET_JUMPX_OFFSET(pc, span);
963
* Fix source note deltas. Don't hardwire the delta fixup adjustment,
964
* even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
965
* at each sd that moved. The future may bring different offset sizes
966
* for span-dependent instruction operands. However, we fix only main
967
* notes here, not prolog notes -- we know that prolog opcodes are not
968
* span-dependent, and aren't likely ever to be.
972
for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
976
* Recall that the offset of a given note includes its delta, and
977
* tells the offset of the annotated bytecode from the main entry
978
* point of the script.
980
offset += SN_DELTA(sn);
981
while (sd < sdlimit && sd->before < offset) {
983
* To compute the delta to add to sn, we need to look at the
984
* spandep after sd, whose offset - (before + growth) tells by
985
* how many bytes sd's instruction grew.
990
delta = sd2->offset - (sd2->before + growth);
992
JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
993
sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
996
snlimit = cg->main.notes + cg->main.noteCount;
1003
* If sn has span-dependent offset operands, check whether each
1004
* covers further span-dependencies, and increase those operands
1005
* accordingly. Some source notes measure offset not from the
1006
* annotated pc, but from that pc plus some small bias. NB: we
1007
* assume that spec->offsetBias can't itself span span-dependent
1010
spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1011
if (spec->isSpanDep) {
1012
pivot = offset + spec->offsetBias;
1014
for (i = 0; i < n; i++) {
1015
span = js_GetSrcNoteOffset(sn, i);
1018
target = pivot + span * spec->isSpanDep;
1019
sd2 = FindNearestSpanDep(cg, target,
1026
* Increase target by sd2's before-vs-after offset delta,
1027
* which is absolute (i.e., relative to start of script,
1028
* as is target). Recompute the span by subtracting its
1029
* adjusted pivot from target.
1031
target += sd2->offset - sd2->before;
1032
span = target - (pivot + growth);
1033
span *= spec->isSpanDep;
1034
noteIndex = sn - cg->main.notes;
1035
if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1037
sn = cg->main.notes + noteIndex;
1038
snlimit = cg->main.notes + cg->main.noteCount;
1042
cg->main.lastNoteOffset += growth;
1045
* Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1046
* not clear how we can beat that).
1048
for (tn = cg->tryBase, tnlimit = cg->tryNext; tn < tnlimit; tn++) {
1050
* First, look for the nearest span dependency at/above tn->start.
1051
* There may not be any such spandep, in which case the guard will
1055
sd = FindNearestSpanDep(cg, offset, 0, &guard);
1056
delta = sd->offset - sd->before;
1057
tn->start = offset + delta;
1060
* Next, find the nearest spandep at/above tn->start + tn->length.
1061
* Use its delta minus tn->start's delta to increase tn->length.
1063
length = tn->length;
1064
sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1066
tn->length = length + sd2->offset - sd2->before - delta;
1069
* Finally, adjust tn->catchStart upward only if it is non-zero,
1070
* and provided there are spandeps below it that grew.
1072
offset = tn->catchStart;
1074
sd = FindNearestSpanDep(cg, offset, sd2 - sdbase, &guard);
1075
tn->catchStart = offset + sd->offset - sd->before;
1080
#ifdef DEBUG_brendan
1084
for (sd = sdbase; sd < sdlimit; sd++) {
1085
offset = sd->offset;
1087
/* NB: sd->top cursors into the original, unextended bytecode vector. */
1088
if (sd->top != top) {
1089
JS_ASSERT(top == -1 ||
1090
!JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1094
JS_ASSERT(top == sd->before);
1095
op = (JSOp) base[offset];
1096
type = (js_CodeSpec[op].format & JOF_TYPEMASK);
1097
JS_ASSERT(type == JOF_JUMP ||
1098
type == JOF_JUMPX ||
1099
type == JOF_TABLESWITCH ||
1100
type == JOF_TABLESWITCHX ||
1101
type == JOF_LOOKUPSWITCH ||
1102
type == JOF_LOOKUPSWITCHX);
1107
if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1108
span = GET_JUMPX_OFFSET(pc);
1109
if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1112
JS_ASSERT(type == JOF_TABLESWITCHX ||
1113
type == JOF_LOOKUPSWITCHX);
1116
span = GET_JUMP_OFFSET(pc);
1118
JS_ASSERT(SD_SPAN(sd, pivot) == span);
1120
JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1125
* Reset so we optimize at most once -- cg may be used for further code
1126
* generation of successive, independent, top-level statements. No jump
1127
* can span top-level statements, because JS lacks goto.
1129
size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1130
JS_ArenaFreeAllocation(&cx->tempPool, cg->spanDeps,
1131
JS_MAX(size, SPANDEPS_SIZE_MIN));
1132
cg->spanDeps = NULL;
1133
FreeJumpTargets(cg, cg->jumpTargets);
1134
cg->jumpTargets = NULL;
1135
cg->numSpanDeps = cg->numJumpTargets = 0;
1136
cg->spanDepTodo = CG_OFFSET(cg);
1141
EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1147
extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1148
if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1151
jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1152
if (jmp >= 0 && (extend || cg->spanDeps)) {
1153
pc = CG_CODE(cg, jmp);
1154
if (!AddSpanDep(cx, cg, pc, pc, off))
1161
GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1168
return GET_JUMP_OFFSET(pc);
1170
sd = GetSpanDep(cg, pc);
1172
if (!JT_HAS_TAG(jt))
1173
return JT_TO_BPDELTA(jt);
1176
while (--sd >= cg->spanDeps && sd->top == top)
1179
return JT_CLR_TAG(jt)->offset - sd->offset;
1183
js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1186
if (!cg->spanDeps) {
1187
if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1188
SET_JUMP_OFFSET(pc, off);
1192
if (!BuildSpanDepTable(cx, cg))
1196
return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1200
js_InStatement(JSTreeContext *tc, JSStmtType type)
1204
for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1205
if (stmt->type == type)
1212
js_IsGlobalReference(JSTreeContext *tc, JSAtom *atom, JSBool *loopyp)
1219
for (stmt = tc->topStmt; stmt; stmt = stmt->down) {
1220
if (stmt->type == STMT_WITH)
1222
if (STMT_IS_LOOP(stmt)) {
1226
if (stmt->flags & SIF_SCOPE) {
1227
obj = ATOM_TO_OBJECT(stmt->atom);
1228
JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1229
scope = OBJ_SCOPE(obj);
1230
if (SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom)))
1238
js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1243
SET_STATEMENT_TOP(stmt, top);
1245
stmt->down = tc->topStmt;
1247
if (STMT_LINKS_SCOPE(stmt)) {
1248
stmt->downScope = tc->topScopeStmt;
1249
tc->topScopeStmt = stmt;
1251
stmt->downScope = NULL;
1256
js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSAtom *blockAtom,
1261
js_PushStatement(tc, stmt, STMT_BLOCK, top);
1262
stmt->flags |= SIF_SCOPE;
1263
blockObj = ATOM_TO_OBJECT(blockAtom);
1264
blockObj->slots[JSSLOT_PARENT] = OBJECT_TO_JSVAL(tc->blockChain);
1265
stmt->downScope = tc->topScopeStmt;
1266
tc->topScopeStmt = stmt;
1267
tc->blockChain = blockObj;
1268
stmt->atom = blockAtom;
1272
* Emit a backpatch op with offset pointing to the previous jump of this type,
1273
* so that we can walk back up the chain fixing up the op and jump offset.
1276
EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1278
ptrdiff_t offset, delta;
1280
offset = CG_OFFSET(cg);
1281
delta = offset - *lastp;
1283
JS_ASSERT(delta > 0);
1284
return EmitJump(cx, cg, op, delta);
1288
* Macro to emit a bytecode followed by a uint16 immediate operand stored in
1289
* big-endian order, used for arg and var numbers as well as for atomIndexes.
1290
* NB: We use cx and cg from our caller's lexical environment, and return
1293
#define EMIT_UINT16_IMM_OP(op, i) \
1295
if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1299
/* Emit additional bytecode(s) for non-local jumps. */
1301
EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1309
* Return from within a try block that has a finally clause must be split
1310
* into two ops: JSOP_SETRVAL, to pop the r.v. and store it in fp->rval;
1311
* and JSOP_RETRVAL, which makes control flow go back to the caller, who
1312
* picks up fp->rval as usual. Otherwise, the stack will be unbalanced
1313
* when executing the finally clause.
1315
* We mutate *returnop once only if we find an enclosing try-block (viz,
1316
* STMT_FINALLY) to ensure that we emit just one JSOP_SETRVAL before one
1317
* or more JSOP_GOSUBs and other fixup opcodes emitted by this function.
1318
* Our caller (the TOK_RETURN case of js_EmitTree) then emits *returnop.
1319
* The fixup opcodes and gosubs must interleave in the proper order, from
1320
* inner statement to outer, so that finally clauses run at the correct
1324
JS_ASSERT(*returnop == JSOP_RETURN);
1325
for (stmt = cg->treeContext.topStmt; stmt != toStmt;
1326
stmt = stmt->down) {
1327
if (stmt->type == STMT_FINALLY ||
1328
((cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT) &&
1329
STMT_MAYBE_SCOPE(stmt))) {
1330
if (js_Emit1(cx, cg, JSOP_SETRVAL) < 0)
1332
*returnop = JSOP_RETRVAL;
1338
* If there are no try-with-finally blocks open around this return
1339
* statement, we can generate a return forthwith and skip generating
1342
if (*returnop == JSOP_RETURN)
1347
* The non-local jump fixup we emit will unbalance cg->stackDepth, because
1348
* the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1349
* end of a with statement, so we save cg->stackDepth here and restore it
1350
* just before a successful return.
1352
depth = cg->stackDepth;
1353
for (stmt = cg->treeContext.topStmt; stmt != toStmt; stmt = stmt->down) {
1354
switch (stmt->type) {
1356
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1358
jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt));
1364
/* There's a With object on the stack that we need to pop. */
1365
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1367
if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1371
case STMT_FOR_IN_LOOP:
1373
* The iterator and the object being iterated need to be popped.
1375
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1377
if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1381
case STMT_SUBROUTINE:
1383
* There's a [exception or hole, retsub pc-index] pair on the
1384
* stack that we need to pop.
1386
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1388
if (js_Emit1(cx, cg, JSOP_POP2) < 0)
1395
if (stmt->flags & SIF_SCOPE) {
1398
/* There is a Block object with locals on the stack to pop. */
1399
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1401
i = OBJ_BLOCK_COUNT(cx, ATOM_TO_OBJECT(stmt->atom));
1402
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1406
cg->stackDepth = depth;
1411
EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1412
ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1416
if (!EmitNonLocalJumpFixup(cx, cg, toStmt, NULL))
1420
index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1421
else if (noteType != SRC_NULL)
1422
index = js_NewSrcNote(cx, cg, noteType);
1428
return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1432
BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1433
jsbytecode *target, jsbytecode op)
1435
jsbytecode *pc, *stop;
1436
ptrdiff_t delta, span;
1438
pc = CG_CODE(cg, last);
1439
stop = CG_CODE(cg, -1);
1440
while (pc != stop) {
1441
delta = GetJumpOffset(cg, pc);
1442
span = PTRDIFF(target, pc, jsbytecode);
1443
CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1446
* Set *pc after jump offset in case bpdelta didn't overflow, but span
1447
* does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1448
* and need to see the JSOP_BACKPATCH* op at *pc).
1457
js_PopStatement(JSTreeContext *tc)
1463
tc->topStmt = stmt->down;
1464
if (STMT_LINKS_SCOPE(stmt)) {
1465
tc->topScopeStmt = stmt->downScope;
1466
if (stmt->flags & SIF_SCOPE) {
1467
blockObj = ATOM_TO_OBJECT(stmt->atom);
1468
tc->blockChain = JSVAL_TO_OBJECT(blockObj->slots[JSSLOT_PARENT]);
1474
js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1478
stmt = cg->treeContext.topStmt;
1479
if (!STMT_IS_TRYING(stmt) &&
1480
(!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1481
!BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1485
js_PopStatement(&cg->treeContext);
1490
js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1496
JSAtomListElement *ale;
1498
/* XXX just do numbers for now */
1499
if (pn->pn_type == TOK_NUMBER) {
1501
valueAtom = (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival))
1502
? js_AtomizeInt(cx, ival, 0)
1503
: js_AtomizeDouble(cx, dval, 0);
1506
ale = js_IndexAtom(cx, atom, &cg->constList);
1509
ALE_SET_VALUE(ale, ATOM_KEY(valueAtom));
1515
js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSBool letdecl)
1520
JSScopeProperty *sprop;
1523
for (stmt = tc->topScopeStmt; stmt; stmt = stmt->downScope) {
1524
if (stmt->type == STMT_WITH) {
1525
/* Ignore with statements enclosing a single let declaration. */
1531
/* Skip "maybe scope" statements that don't contain let bindings. */
1532
if (!(stmt->flags & SIF_SCOPE))
1535
obj = ATOM_TO_OBJECT(stmt->atom);
1536
JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
1537
scope = OBJ_SCOPE(obj);
1538
sprop = SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom));
1540
JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1544
* Use LOCKED_OBJ_GET_SLOT since we know obj is single-
1545
* threaded and owned by this compiler activation.
1547
v = LOCKED_OBJ_GET_SLOT(obj, JSSLOT_BLOCK_DEPTH);
1548
JS_ASSERT(JSVAL_IS_INT(v) && JSVAL_TO_INT(v) >= 0);
1549
*slotp = JSVAL_TO_INT(v) + sprop->shortid;
1561
js_LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1568
JSAtomListElement *ale;
1569
JSObject *obj, *pobj;
1574
* fp chases cg down the stack, but only until we reach the outermost cg.
1575
* This enables propagating consts from top-level into switch cases in a
1576
* function compiled along with the top-level script. All stack frames
1577
* with matching code generators should be flagged with JSFRAME_COMPILING;
1578
* we check sanity here.
1584
JS_ASSERT(fp->flags & JSFRAME_COMPILING);
1587
if (obj == fp->scopeChain) {
1588
/* XXX this will need revising when 'let const' is added. */
1589
stmt = js_LexicalLookup(&cg->treeContext, atom, &slot, JS_FALSE);
1593
ATOM_LIST_SEARCH(ale, &cg->constList, atom);
1595
*vp = ALE_VALUE(ale);
1600
* Try looking in the variable object for a direct property that
1601
* is readonly and permanent. We know such a property can't be
1602
* shadowed by another property on obj's prototype chain, or a
1603
* with object or catch variable; nor can prop's value be changed,
1604
* nor can prop be deleted.
1607
if (OBJ_GET_CLASS(cx, obj) == &js_FunctionClass) {
1608
ok = js_LookupHiddenProperty(cx, obj, ATOM_TO_JSID(atom),
1614
JSScopeProperty *sprop = (JSScopeProperty *)prop;
1617
* Any hidden property must be a formal arg or local var,
1618
* which will shadow a global const of the same name.
1620
JS_ASSERT(sprop->getter == js_GetArgument ||
1621
sprop->getter == js_GetLocalVariable);
1623
OBJ_DROP_PROPERTY(cx, pobj, prop);
1628
ok = OBJ_LOOKUP_PROPERTY(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop);
1631
(fp->flags & (JSFRAME_EVAL | JSFRAME_COMPILE_N_GO))) {
1633
* We're compiling code that will be executed immediately,
1634
* not re-executed against a different scope chain and/or
1635
* variable object. Therefore we can get constant values
1636
* from our variable object here.
1638
ok = OBJ_GET_ATTRIBUTES(cx, obj, ATOM_TO_JSID(atom), prop,
1640
if (ok && !(~attrs & (JSPROP_READONLY | JSPROP_PERMANENT)))
1641
ok = OBJ_GET_PROPERTY(cx, obj, ATOM_TO_JSID(atom), vp);
1644
OBJ_DROP_PROPERTY(cx, pobj, prop);
1650
} while ((cg = cg->parent) != NULL);
1655
* Allocate an index invariant for all activations of the code being compiled
1656
* in cg, that can be used to store and fetch a reference to a cloned RegExp
1657
* object that shares the same JSRegExp private data created for the object
1658
* literal in pn->pn_atom. We need clones to hold lastIndex and other direct
1659
* properties that should not be shared among threads sharing a precompiled
1660
* function or script.
1662
* If the code being compiled is function code, allocate a reserved slot in
1663
* the cloned function object that shares its precompiled script with other
1664
* cloned function objects and with the compiler-created clone-parent. There
1665
* are fun->nregexps such reserved slots in each function object cloned from
1666
* fun->object. NB: during compilation, funobj slots must never be allocated,
1667
* because js_AllocSlot could hand out one of the slots that should be given
1668
* to a regexp clone.
1670
* If the code being compiled is global code, reserve the fp->vars slot at
1671
* ALE_INDEX(ale), by ensuring that cg->treeContext.numGlobalVars is at least
1672
* one more than this index. For global code, fp->vars is parallel to the
1673
* global script->atomMap.vector array, but possibly shorter for the common
1674
* case (where var declarations and regexp literals cluster toward the front
1675
* of the script or function body).
1677
* Global variable name literals in script->atomMap have fast-global slot
1678
* numbers (stored as int-tagged jsvals) in the corresponding fp->vars array
1679
* element. The atomIndex for a regexp object literal thus also addresses an
1680
* fp->vars element that is not used by any optimized global variable, so we
1681
* use that GC-scanned element to keep the regexp object clone alive, as well
1682
* as to lazily create and find it at run-time for the JSOP_REGEXP bytecode.
1684
* In no case can cx->fp->varobj be a Call object here, because that implies
1685
* we are compiling eval code, in which case (cx->fp->flags & JSFRAME_EVAL)
1686
* is true, and js_GetToken will have already selected JSOP_OBJECT instead of
1687
* JSOP_REGEXP, to avoid all this RegExp object cloning business.
1689
* Why clone regexp objects? ECMA specifies that when a regular expression
1690
* literal is scanned, a RegExp object is created. In the spec, compilation
1691
* and execution happen indivisibly, but in this implementation and many of
1692
* its embeddings, code is precompiled early and re-executed in multiple
1693
* threads, or using multiple global objects, or both, for efficiency.
1695
* In such cases, naively following ECMA leads to wrongful sharing of RegExp
1696
* objects, which makes for collisions on the lastIndex property (especially
1697
* for global regexps) and on any ad-hoc properties. Also, __proto__ and
1698
* __parent__ refer to the pre-compilation prototype and global objects, a
1699
* pigeon-hole problem for instanceof tests.
1702
IndexRegExpClone(JSContext *cx, JSParseNode *pn, JSAtomListElement *ale,
1703
JSCodeGenerator *cg)
1705
JSObject *varobj, *reobj;
1712
JS_ASSERT(!(cx->fp->flags & (JSFRAME_EVAL | JSFRAME_COMPILE_N_GO)));
1714
varobj = cx->fp->varobj;
1715
clasp = OBJ_GET_CLASS(cx, varobj);
1716
if (clasp == &js_FunctionClass) {
1717
fun = (JSFunction *) JS_GetPrivate(cx, varobj);
1718
countPtr = &fun->u.i.nregexps;
1719
cloneIndex = *countPtr;
1721
JS_ASSERT(clasp != &js_CallClass);
1722
countPtr = &cg->treeContext.numGlobalVars;
1723
cloneIndex = ALE_INDEX(ale);
1726
if ((cloneIndex + 1) >> 16) {
1727
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1728
JSMSG_NEED_DIET, js_script_str);
1731
if (cloneIndex >= *countPtr)
1732
*countPtr = cloneIndex + 1;
1734
reobj = ATOM_TO_OBJECT(pn->pn_atom);
1735
JS_ASSERT(OBJ_GET_CLASS(cx, reobj) == &js_RegExpClass);
1736
re = (JSRegExp *) JS_GetPrivate(cx, reobj);
1737
re->cloneIndex = cloneIndex;
1742
* Emit a bytecode and its 2-byte constant (atom) index immediate operand.
1743
* If the atomIndex requires more than 2 bytes, emit a prefix op whose 24-bit
1744
* immediate operand indexes the atom in script->atomMap.
1746
* If op has JOF_NAME mode, emit JSOP_FINDNAME to find and push the object in
1747
* the scope chain in which the literal name was found, followed by the name
1748
* as a string. This enables us to use the JOF_ELEM counterpart to op.
1750
* Otherwise, if op has JOF_PROP mode, emit JSOP_LITERAL before op, to push
1751
* the atom's value key. For JOF_PROP ops, the object being operated on has
1752
* already been pushed, and JSOP_LITERAL will push the id, leaving the stack
1753
* in the proper state for a JOF_ELEM counterpart.
1755
* Otherwise, emit JSOP_LITOPX to push the atom index, then perform a special
1756
* dispatch on op, but getting op's atom index from the stack instead of from
1757
* an unsigned 16-bit immediate operand.
1760
EmitAtomIndexOp(JSContext *cx, JSOp op, jsatomid atomIndex, JSCodeGenerator *cg)
1767
if (atomIndex >= JS_BIT(16)) {
1768
mode = (js_CodeSpec[op].format & JOF_MODEMASK);
1769
if (op != JSOP_SETNAME) {
1770
prefixOp = ((mode != JOF_NAME && mode != JOF_PROP) ||
1771
#if JS_HAS_XML_SUPPORT
1772
op == JSOP_GETMETHOD ||
1773
op == JSOP_SETMETHOD ||
1775
op == JSOP_SETCONST)
1777
: (mode == JOF_NAME)
1780
off = js_EmitN(cx, cg, prefixOp, 3);
1783
pc = CG_CODE(cg, off);
1784
SET_LITERAL_INDEX(pc, atomIndex);
1788
case JSOP_DECNAME: op = JSOP_DECELEM; break;
1789
case JSOP_DECPROP: op = JSOP_DECELEM; break;
1790
case JSOP_DELNAME: op = JSOP_DELELEM; break;
1791
case JSOP_DELPROP: op = JSOP_DELELEM; break;
1792
case JSOP_FORNAME: op = JSOP_FORELEM; break;
1793
case JSOP_FORPROP: op = JSOP_FORELEM; break;
1794
case JSOP_GETPROP: op = JSOP_GETELEM; break;
1795
case JSOP_GETXPROP: op = JSOP_GETXELEM; break;
1796
case JSOP_IMPORTPROP: op = JSOP_IMPORTELEM; break;
1797
case JSOP_INCNAME: op = JSOP_INCELEM; break;
1798
case JSOP_INCPROP: op = JSOP_INCELEM; break;
1799
case JSOP_INITPROP: op = JSOP_INITELEM; break;
1800
case JSOP_NAME: op = JSOP_GETELEM; break;
1801
case JSOP_NAMEDEC: op = JSOP_ELEMDEC; break;
1802
case JSOP_NAMEINC: op = JSOP_ELEMINC; break;
1803
case JSOP_PROPDEC: op = JSOP_ELEMDEC; break;
1804
case JSOP_PROPINC: op = JSOP_ELEMINC; break;
1805
case JSOP_BINDNAME: return JS_TRUE;
1806
case JSOP_SETNAME: op = JSOP_SETELEM; break;
1807
case JSOP_SETPROP: op = JSOP_SETELEM; break;
1808
#if JS_HAS_EXPORT_IMPORT
1809
case JSOP_EXPORTNAME:
1810
ReportStatementTooLarge(cx, cg);
1814
#if JS_HAS_XML_SUPPORT
1815
JS_ASSERT(mode == 0 || op == JSOP_SETCONST ||
1816
op == JSOP_GETMETHOD || op == JSOP_SETMETHOD);
1818
JS_ASSERT(mode == 0 || op == JSOP_SETCONST);
1823
return js_Emit1(cx, cg, op) >= 0;
1826
EMIT_UINT16_IMM_OP(op, atomIndex);
1831
* Slight sugar for EmitAtomIndexOp, again accessing cx and cg from the macro
1832
* caller's lexical environment, and embedding a false return on error.
1833
* XXXbe hey, who checks for fun->nvars and fun->nargs overflow?!
1835
#define EMIT_ATOM_INDEX_OP(op, atomIndex) \
1837
if (!EmitAtomIndexOp(cx, op, atomIndex, cg)) \
1842
EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1844
JSAtomListElement *ale;
1846
ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
1849
if (op == JSOP_REGEXP && !IndexRegExpClone(cx, pn, ale, cg))
1851
return EmitAtomIndexOp(cx, op, ALE_INDEX(ale), cg);
1855
* This routine tries to optimize name gets and sets to stack slot loads and
1856
* stores, given the variables object and scope chain in cx's top frame, the
1857
* compile-time context in tc, and a TOK_NAME node pn. It returns false on
1858
* error, true on success.
1860
* The caller can inspect pn->pn_slot for a non-negative slot number to tell
1861
* whether optimization occurred, in which case BindNameToSlot also updated
1862
* pn->pn_op. If pn->pn_slot is still -1 on return, pn->pn_op nevertheless
1863
* may have been optimized, e.g., from JSOP_NAME to JSOP_ARGUMENTS. Whether
1864
* or not pn->pn_op was modified, if this function finds an argument or local
1865
* variable name, pn->pn_attrs will contain the property's attributes after a
1866
* successful return.
1868
* NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1869
* to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1873
BindNameToSlot(JSContext *cx, JSTreeContext *tc, JSParseNode *pn,
1881
JSObject *obj, *pobj;
1883
JSBool optimizeGlobals;
1884
JSPropertyOp getter;
1886
JSAtomListElement *ale;
1888
JSScopeProperty *sprop;
1890
JS_ASSERT(pn->pn_type == TOK_NAME);
1891
if (pn->pn_slot >= 0 || pn->pn_op == JSOP_ARGUMENTS)
1894
/* QNAME references can never be optimized to use arg/var storage. */
1895
if (pn->pn_op == JSOP_QNAMEPART)
1899
* We can't optimize if we are compiling a with statement and its body,
1900
* or we're in a catch block whose exception variable has the same name
1901
* as this node. FIXME: we should be able to optimize catch vars to be
1905
stmt = js_LexicalLookup(tc, atom, &slot, letdecl);
1907
if (stmt->type == STMT_WITH)
1910
JS_ASSERT(stmt->flags & SIF_SCOPE);
1911
JS_ASSERT(slot >= 0);
1914
case JSOP_NAME: op = JSOP_GETLOCAL; break;
1915
case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
1916
case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
1917
case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
1918
case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
1919
case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
1920
case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
1921
case JSOP_DELNAME: op = JSOP_FALSE; break;
1922
default: JS_ASSERT(0);
1924
if (op != pn->pn_op) {
1932
* A Script object can be used to split an eval into a compile step done
1933
* at construction time, and an execute step done separately, possibly in
1934
* a different scope altogether. We therefore cannot do any name-to-slot
1935
* optimizations, but must lookup names at runtime. Note that script_exec
1936
* ensures that its caller's frame has a Call object, so arg and var name
1937
* lookups will succeed.
1940
if (fp->flags & JSFRAME_SCRIPT_OBJECT)
1944
* We can't optimize if var and closure (a local function not in a larger
1945
* expression and not at top-level within another's body) collide.
1946
* XXX suboptimal: keep track of colliding names and deoptimize only those
1948
if (tc->flags & TCF_FUN_CLOSURE_VS_VAR)
1952
* We can't optimize if we're not compiling a function body, whether via
1953
* eval, or directly when compiling a function statement or expression.
1956
clasp = OBJ_GET_CLASS(cx, obj);
1957
if (clasp != &js_FunctionClass && clasp != &js_CallClass) {
1958
/* Check for an eval or debugger frame. */
1959
if (fp->flags & JSFRAME_SPECIAL)
1963
* Optimize global variable accesses if there are at least 100 uses
1964
* in unambiguous contexts, or failing that, if least half of all the
1965
* uses of global vars/consts/functions are in loops.
1967
optimizeGlobals = (tc->globalUses >= 100 ||
1968
(tc->loopyGlobalUses &&
1969
tc->loopyGlobalUses >= tc->globalUses / 2));
1970
if (!optimizeGlobals)
1973
optimizeGlobals = JS_FALSE;
1977
* We can't optimize if we are in an eval called inside a with statement.
1979
if (fp->scopeChain != obj)
1985
attrs = slot = 0; /* quell GCC overwarning */
1987
if (optimizeGlobals) {
1989
* We are optimizing global variables, and there is no pre-existing
1990
* global property named atom. If atom was declared via const or var,
1991
* optimize pn to access fp->vars using the appropriate JOF_QVAR op.
1993
ATOM_LIST_SEARCH(ale, &tc->decls, atom);
1995
/* Use precedes declaration, or name is never declared. */
1999
attrs = (ALE_JSOP(ale) == JSOP_DEFCONST)
2000
? JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT
2001
: JSPROP_ENUMERATE | JSPROP_PERMANENT;
2003
/* Index atom so we can map fast global number to name. */
2004
JS_ASSERT(tc->flags & TCF_COMPILING);
2005
ale = js_IndexAtom(cx, atom, &((JSCodeGenerator *) tc)->atomList);
2009
/* Defend against tc->numGlobalVars 16-bit overflow. */
2010
slot = ALE_INDEX(ale);
2011
if ((slot + 1) >> 16)
2014
if ((uint16)(slot + 1) > tc->numGlobalVars)
2015
tc->numGlobalVars = (uint16)(slot + 1);
2018
* We may be able to optimize name to stack slot. Look for an argument
2019
* or variable property in the function, or its call object, not found
2020
* in any prototype object. Rewrite pn_op and update pn accordingly.
2021
* NB: We know that JSOP_DELNAME on an argument or variable evaluates
2022
* to false, due to JSPROP_PERMANENT.
2024
if (!js_LookupHiddenProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop))
2026
sprop = (JSScopeProperty *) prop;
2029
getter = sprop->getter;
2030
attrs = sprop->attrs;
2031
slot = (sprop->flags & SPROP_HAS_SHORTID) ? sprop->shortid : -1;
2033
OBJ_DROP_PROPERTY(cx, pobj, prop);
2037
if (optimizeGlobals || getter) {
2038
if (optimizeGlobals) {
2040
case JSOP_NAME: op = JSOP_GETGVAR; break;
2041
case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2042
case JSOP_SETCONST: /* NB: no change */ break;
2043
case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2044
case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2045
case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2046
case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2047
case JSOP_FORNAME: /* NB: no change */ break;
2048
case JSOP_DELNAME: /* NB: no change */ break;
2049
default: JS_ASSERT(0);
2051
} else if (getter == js_GetLocalVariable ||
2052
getter == js_GetCallVariable) {
2054
case JSOP_NAME: op = JSOP_GETVAR; break;
2055
case JSOP_SETNAME: op = JSOP_SETVAR; break;
2056
case JSOP_SETCONST: op = JSOP_SETVAR; break;
2057
case JSOP_INCNAME: op = JSOP_INCVAR; break;
2058
case JSOP_NAMEINC: op = JSOP_VARINC; break;
2059
case JSOP_DECNAME: op = JSOP_DECVAR; break;
2060
case JSOP_NAMEDEC: op = JSOP_VARDEC; break;
2061
case JSOP_FORNAME: op = JSOP_FORVAR; break;
2062
case JSOP_DELNAME: op = JSOP_FALSE; break;
2063
default: JS_ASSERT(0);
2065
} else if (getter == js_GetArgument ||
2066
(getter == js_CallClass.getProperty &&
2067
fp->fun && (uintN) slot < fp->fun->nargs)) {
2069
case JSOP_NAME: op = JSOP_GETARG; break;
2070
case JSOP_SETNAME: op = JSOP_SETARG; break;
2071
case JSOP_INCNAME: op = JSOP_INCARG; break;
2072
case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2073
case JSOP_DECNAME: op = JSOP_DECARG; break;
2074
case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2075
case JSOP_FORNAME: op = JSOP_FORARG; break;
2076
case JSOP_DELNAME: op = JSOP_FALSE; break;
2077
default: JS_ASSERT(0);
2080
if (op != pn->pn_op) {
2084
pn->pn_attrs = attrs;
2087
if (pn->pn_slot < 0) {
2089
* We couldn't optimize pn, so it's not a global or local slot name.
2090
* Now we must check for the predefined arguments variable. It may be
2091
* overridden by assignment, in which case the function is heavyweight
2092
* and the interpreter will look up 'arguments' in the function's call
2095
if (pn->pn_op == JSOP_NAME &&
2096
atom == cx->runtime->atomState.argumentsAtom) {
2097
pn->pn_op = JSOP_ARGUMENTS;
2101
tc->flags |= TCF_FUN_USES_NONLOCALS;
2107
* If pn contains a useful expression, return true with *answer set to true.
2108
* If pn contains a useless expression, return true with *answer set to false.
2109
* Return false on error.
2111
* The caller should initialize *answer to false and invoke this function on
2112
* an expression statement or similar subtree to decide whether the tree could
2113
* produce code that has any side effects. For an expression statement, we
2114
* define useless code as code with no side effects, because the main effect,
2115
* the value left on the stack after the code executes, will be discarded by a
2119
CheckSideEffects(JSContext *cx, JSTreeContext *tc, JSParseNode *pn,
2130
switch (pn->pn_arity) {
2133
* A named function is presumed useful: we can't yet know that it is
2134
* not called. The side effects are the creation of a scope object
2135
* to parent this function object, and the binding of the function's
2136
* name in that scope object. See comments at case JSOP_NAMEDFUNOBJ:
2139
fun = (JSFunction *) JS_GetPrivate(cx, ATOM_TO_OBJECT(pn->pn_funAtom));
2145
if (pn->pn_type == TOK_NEW ||
2146
pn->pn_type == TOK_LP ||
2147
pn->pn_type == TOK_LB ||
2148
pn->pn_type == TOK_RB ||
2149
pn->pn_type == TOK_RC) {
2151
* All invocation operations (construct: TOK_NEW, call: TOK_LP)
2152
* are presumed to be useful, because they may have side effects
2153
* even if their main effect (their return value) is discarded.
2155
* TOK_LB binary trees of 3 or more nodes are flattened into lists
2156
* to avoid too much recursion. All such lists must be presumed
2157
* to be useful because each index operation could invoke a getter
2158
* (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2159
* does not apply here: arguments[i][j] might invoke a getter).
2161
* Array and object initializers (TOK_RB and TOK_RC lists) must be
2162
* considered useful, because they are sugar for constructor calls
2163
* (to Array and Object, respectively).
2167
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2168
ok &= CheckSideEffects(cx, tc, pn2, answer);
2173
ok = CheckSideEffects(cx, tc, pn->pn_kid1, answer) &&
2174
CheckSideEffects(cx, tc, pn->pn_kid2, answer) &&
2175
CheckSideEffects(cx, tc, pn->pn_kid3, answer);
2179
if (pn->pn_type == TOK_ASSIGN) {
2181
* Assignment is presumed to be useful, even if the next operation
2182
* is another assignment overwriting this one's ostensible effect,
2183
* because the left operand may be a property with a setter that
2186
* The only exception is assignment of a useless value to a const
2187
* declared in the function currently being compiled.
2190
if (pn2->pn_type != TOK_NAME) {
2193
if (!BindNameToSlot(cx, tc, pn2, JS_FALSE))
2195
if (!CheckSideEffects(cx, tc, pn->pn_right, answer))
2198
(pn2->pn_slot < 0 || !(pn2->pn_attrs & JSPROP_READONLY))) {
2203
if (pn->pn_type == TOK_LB) {
2205
if (pn2->pn_type == TOK_NAME &&
2206
!BindNameToSlot(cx, tc, pn2, JS_FALSE)) {
2209
if (pn2->pn_op != JSOP_ARGUMENTS) {
2211
* Any indexed property reference could call a getter with
2212
* side effects, except for arguments[i] where arguments is
2218
ok = CheckSideEffects(cx, tc, pn->pn_left, answer) &&
2219
CheckSideEffects(cx, tc, pn->pn_right, answer);
2224
if (pn->pn_type == TOK_INC || pn->pn_type == TOK_DEC ||
2225
pn->pn_type == TOK_THROW ||
2226
#if JS_HAS_GENERATORS
2227
pn->pn_type == TOK_YIELD ||
2229
pn->pn_type == TOK_DEFSHARP) {
2230
/* All these operations have effects that we must commit. */
2232
} else if (pn->pn_type == TOK_DELETE) {
2234
switch (pn2->pn_type) {
2237
#if JS_HAS_XML_SUPPORT
2240
#if JS_HAS_LVALUE_RETURN
2244
/* All these delete addressing modes have effects too. */
2248
ok = CheckSideEffects(cx, tc, pn2, answer);
2252
ok = CheckSideEffects(cx, tc, pn->pn_kid, answer);
2258
* Take care to avoid trying to bind a label name (labels, both for
2259
* statements and property values in object initialisers, have pn_op
2260
* defaulted to JSOP_NOP).
2262
if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2263
if (!BindNameToSlot(cx, tc, pn, JS_FALSE))
2265
if (pn->pn_slot < 0 && pn->pn_op != JSOP_ARGUMENTS) {
2267
* Not an argument or local variable use, so this expression
2268
* could invoke a getter that has side effects.
2274
if (pn->pn_type == TOK_DOT) {
2275
if (pn2->pn_type == TOK_NAME &&
2276
!BindNameToSlot(cx, tc, pn2, JS_FALSE)) {
2279
if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2280
pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2282
* Any dotted property reference could call a getter, except
2283
* for arguments.length where arguments is unambiguous.
2288
ok = CheckSideEffects(cx, tc, pn2, answer);
2292
if (pn->pn_type == TOK_DEBUGGER)
2300
* Secret handshake with js_EmitTree's TOK_LP/TOK_NEW case logic, to flag all
2301
* uses of JSOP_GETMETHOD that implicitly qualify the method property's name
2302
* with a function:: prefix. All other JSOP_GETMETHOD and JSOP_SETMETHOD uses
2303
* must be explicit, so we need a distinct source note (SRC_METHODBASE rather
2304
* than SRC_PCBASE) for round-tripping through the beloved decompiler.
2306
#define JSPROP_IMPLICIT_FUNCTION_NAMESPACE 0x100
2309
SrcNoteForPropOp(JSParseNode *pn, JSOp op)
2311
return ((op == JSOP_GETMETHOD &&
2312
!(pn->pn_attrs & JSPROP_IMPLICIT_FUNCTION_NAMESPACE)) ||
2313
op == JSOP_SETMETHOD)
2319
EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2321
JSParseNode *pn2, *pndot, *pnup, *pndown;
2325
if (op == JSOP_GETPROP &&
2326
pn->pn_type == TOK_DOT &&
2327
pn2->pn_type == TOK_NAME) {
2328
/* Try to optimize arguments.length into JSOP_ARGCNT. */
2329
if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
2331
if (pn2->pn_op == JSOP_ARGUMENTS &&
2332
pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2333
return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2338
* If the object operand is also a dotted property reference, reverse the
2339
* list linked via pn_expr temporarily so we can iterate over it from the
2340
* bottom up (reversing again as we go), to avoid excessive recursion.
2342
if (pn2->pn_type == TOK_DOT) {
2345
top = CG_OFFSET(cg);
2347
/* Reverse pndot->pn_expr to point up, not down. */
2348
pndot->pn_offset = top;
2349
pndown = pndot->pn_expr;
2350
pndot->pn_expr = pnup;
2351
if (pndown->pn_type != TOK_DOT)
2357
/* pndown is a primary expression, not a dotted property reference. */
2358
if (!js_EmitTree(cx, cg, pndown))
2362
/* Walk back up the list, emitting annotated name ops. */
2363
if (js_NewSrcNote2(cx, cg, SrcNoteForPropOp(pndot, pndot->pn_op),
2364
CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2367
if (!EmitAtomOp(cx, pndot, pndot->pn_op, cg))
2370
/* Reverse the pn_expr link again. */
2371
pnup = pndot->pn_expr;
2372
pndot->pn_expr = pndown;
2374
} while ((pndot = pnup) != NULL);
2376
if (!js_EmitTree(cx, cg, pn2))
2380
if (js_NewSrcNote2(cx, cg, SrcNoteForPropOp(pn, op),
2381
CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2385
JS_ASSERT(op == JSOP_IMPORTALL);
2386
if (js_Emit1(cx, cg, op) < 0)
2389
if (!EmitAtomOp(cx, pn, op, cg))
2396
EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2399
JSParseNode *left, *right, *next, ltmp, rtmp;
2402
top = CG_OFFSET(cg);
2403
if (pn->pn_arity == PN_LIST) {
2404
/* Left-associative operator chain to avoid too much recursion. */
2405
JS_ASSERT(pn->pn_op == JSOP_GETELEM || pn->pn_op == JSOP_IMPORTELEM);
2406
JS_ASSERT(pn->pn_count >= 3);
2408
right = PN_LAST(pn);
2409
next = left->pn_next;
2410
JS_ASSERT(next != right);
2413
* Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2414
* one or more index expression and JSOP_GETELEM op pairs.
2416
if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2417
if (!BindNameToSlot(cx, &cg->treeContext, left, JS_FALSE))
2419
if (left->pn_op == JSOP_ARGUMENTS &&
2420
JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2421
(jsuint)slot < JS_BIT(16)) {
2422
left->pn_offset = next->pn_offset = top;
2423
EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2425
next = left->pn_next;
2430
* Check whether we generated JSOP_ARGSUB, just above, and have only
2431
* one more index expression to emit. Given arguments[0][j], we must
2432
* skip the while loop altogether, falling through to emit code for j
2433
* (in the subtree referenced by right), followed by the annotated op,
2434
* at the bottom of this function.
2436
JS_ASSERT(next != right || pn->pn_count == 3);
2437
if (left == pn->pn_head) {
2438
if (!js_EmitTree(cx, cg, left))
2441
while (next != right) {
2442
if (!js_EmitTree(cx, cg, next))
2444
if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2446
if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2448
next = next->pn_next;
2451
if (pn->pn_arity == PN_NAME) {
2453
* Set left and right so pn appears to be a TOK_LB node, instead
2454
* of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2455
* EmitDestructuringOps nearer below. In the destructuring case,
2456
* the base expression (pn_expr) of the name may be null, which
2457
* means we have to emit a JSOP_BINDNAME.
2462
left->pn_type = TOK_OBJECT;
2463
left->pn_op = JSOP_BINDNAME;
2464
left->pn_arity = PN_NULLARY;
2465
left->pn_pos = pn->pn_pos;
2466
left->pn_atom = pn->pn_atom;
2469
right->pn_type = TOK_STRING;
2470
JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2471
right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2474
right->pn_arity = PN_NULLARY;
2475
right->pn_pos = pn->pn_pos;
2476
right->pn_atom = pn->pn_atom;
2478
JS_ASSERT(pn->pn_arity == PN_BINARY);
2480
right = pn->pn_right;
2483
/* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2484
if (op == JSOP_GETELEM &&
2485
left->pn_type == TOK_NAME &&
2486
right->pn_type == TOK_NUMBER) {
2487
if (!BindNameToSlot(cx, &cg->treeContext, left, JS_FALSE))
2489
if (left->pn_op == JSOP_ARGUMENTS &&
2490
JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2491
(jsuint)slot < JS_BIT(16)) {
2492
left->pn_offset = right->pn_offset = top;
2493
EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2498
if (!js_EmitTree(cx, cg, left))
2502
/* The right side of the descendant operator is implicitly quoted. */
2503
JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2504
right->pn_op == JSOP_QNAMEPART);
2505
if (!js_EmitTree(cx, cg, right))
2507
if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2509
return js_Emit1(cx, cg, op) >= 0;
2513
EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2520
JSAtomListElement *ale;
2522
if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2524
return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2526
return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2528
atomIndex = (jsatomid)ival;
2529
if (atomIndex < JS_BIT(16)) {
2530
EMIT_UINT16_IMM_OP(JSOP_UINT16, atomIndex);
2534
if (atomIndex < JS_BIT(24)) {
2535
off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2538
pc = CG_CODE(cg, off);
2539
SET_LITERAL_INDEX(pc, atomIndex);
2543
atom = js_AtomizeInt(cx, ival, 0);
2545
atom = js_AtomizeDouble(cx, dval, 0);
2550
ale = js_IndexAtom(cx, atom, &cg->atomList);
2553
return EmitAtomIndexOp(cx, JSOP_NUMBER, ALE_INDEX(ale), cg);
2557
EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2558
JSStmtInfo *stmtInfo)
2561
JSBool ok, hasDefault, constPropagated;
2562
ptrdiff_t top, off, defaultOffset;
2563
JSParseNode *pn2, *pn3, *pn4;
2564
uint32 caseCount, tableLength;
2565
JSParseNode **table;
2570
JSAtomListElement *ale;
2572
size_t switchSize, tableSize;
2573
jsbytecode *pc, *savepc;
2574
#if JS_HAS_BLOCK_SCOPE
2579
/* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2580
switchOp = JSOP_TABLESWITCH;
2582
hasDefault = constPropagated = JS_FALSE;
2586
* If the switch contains let variables scoped by its body, model the
2587
* resulting block on the stack first, before emitting the discriminant's
2588
* bytecode (in case the discriminant contains a stack-model dependency
2589
* such as a let expression).
2592
#if JS_HAS_BLOCK_SCOPE
2593
if (pn2->pn_type == TOK_LEXICALSCOPE) {
2594
atom = pn2->pn_atom;
2595
obj = ATOM_TO_OBJECT(atom);
2596
OBJ_SET_BLOCK_DEPTH(cx, obj, cg->stackDepth);
2599
* Push the body's block scope before discriminant code-gen for proper
2600
* static block scope linkage in case the discriminant contains a let
2601
* expression. The block's locals must lie under the discriminant on
2602
* the stack so that case-dispatch bytecodes can find the discriminant
2605
js_PushBlockScope(&cg->treeContext, stmtInfo, atom, -1);
2606
stmtInfo->type = STMT_SWITCH;
2608
count = OBJ_BLOCK_COUNT(cx, obj);
2609
cg->stackDepth += count;
2610
if ((uintN)cg->stackDepth > cg->maxStackDepth)
2611
cg->maxStackDepth = cg->stackDepth;
2613
/* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2614
ale = js_IndexAtom(cx, atom, &cg->atomList);
2617
EMIT_ATOM_INDEX_OP(JSOP_ENTERBLOCK, ALE_INDEX(ale));
2620
* Pop the switch's statement info around discriminant code-gen. Note
2621
* how this leaves cg->treeContext.blockChain referencing the switch's
2622
* block scope object, which is necessary for correct block parenting
2623
* in the case where the discriminant contains a let expression.
2625
cg->treeContext.topStmt = stmtInfo->down;
2626
cg->treeContext.topScopeStmt = stmtInfo->downScope;
2637
* Emit code for the discriminant first (or nearly first, in the case of a
2638
* switch whose body is a block scope).
2640
if (!js_EmitTree(cx, cg, pn->pn_left))
2643
/* Switch bytecodes run from here till end of final case. */
2644
top = CG_OFFSET(cg);
2645
#if !JS_HAS_BLOCK_SCOPE
2646
js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2648
if (pn2->pn_type == TOK_LC) {
2649
js_PushStatement(&cg->treeContext, stmtInfo, STMT_SWITCH, top);
2651
/* Re-push the switch's statement info record. */
2652
cg->treeContext.topStmt = cg->treeContext.topScopeStmt = stmtInfo;
2654
/* Set the statement info record's idea of top. */
2655
stmtInfo->update = top;
2657
/* Advance pn2 to refer to the switch case list. */
2662
caseCount = pn2->pn_count;
2666
if (caseCount == 0 ||
2668
(hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
2673
#define INTMAP_LENGTH 256
2674
jsbitmap intmap_space[INTMAP_LENGTH];
2675
jsbitmap *intmap = NULL;
2676
int32 intmap_bitlen = 0;
2678
low = JSVAL_INT_MAX;
2679
high = JSVAL_INT_MIN;
2681
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2682
if (pn3->pn_type == TOK_DEFAULT) {
2683
hasDefault = JS_TRUE;
2684
caseCount--; /* one of the "cases" was the default */
2688
JS_ASSERT(pn3->pn_type == TOK_CASE);
2689
if (switchOp == JSOP_CONDSWITCH)
2693
switch (pn4->pn_type) {
2696
if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
2697
pn3->pn_val = INT_TO_JSVAL(i);
2699
atom = js_AtomizeDouble(cx, d, 0);
2704
pn3->pn_val = ATOM_KEY(atom);
2708
pn3->pn_val = ATOM_KEY(pn4->pn_atom);
2711
if (!pn4->pn_expr) {
2712
ok = js_LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
2715
if (!JSVAL_IS_VOID(v)) {
2717
constPropagated = JS_TRUE;
2723
if (pn4->pn_op == JSOP_TRUE) {
2724
pn3->pn_val = JSVAL_TRUE;
2727
if (pn4->pn_op == JSOP_FALSE) {
2728
pn3->pn_val = JSVAL_FALSE;
2733
switchOp = JSOP_CONDSWITCH;
2737
JS_ASSERT(JSVAL_IS_NUMBER(pn3->pn_val) ||
2738
JSVAL_IS_STRING(pn3->pn_val) ||
2739
JSVAL_IS_BOOLEAN(pn3->pn_val));
2741
if (switchOp != JSOP_TABLESWITCH)
2743
if (!JSVAL_IS_INT(pn3->pn_val)) {
2744
switchOp = JSOP_LOOKUPSWITCH;
2747
i = JSVAL_TO_INT(pn3->pn_val);
2748
if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
2749
switchOp = JSOP_LOOKUPSWITCH;
2758
* Check for duplicates, which require a JSOP_LOOKUPSWITCH.
2759
* We bias i by 65536 if it's negative, and hope that's a rare
2760
* case (because it requires a malloc'd bitmap).
2764
if (i >= intmap_bitlen) {
2766
i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
2767
intmap = intmap_space;
2768
intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
2770
/* Just grab 8K for the worst-case bitmap. */
2771
intmap_bitlen = JS_BIT(16);
2772
intmap = (jsbitmap *)
2774
(JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
2775
* sizeof(jsbitmap));
2777
JS_ReportOutOfMemory(cx);
2781
memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
2783
if (JS_TEST_BIT(intmap, i)) {
2784
switchOp = JSOP_LOOKUPSWITCH;
2787
JS_SET_BIT(intmap, i);
2791
if (intmap && intmap != intmap_space)
2792
JS_free(cx, intmap);
2797
* Compute table length and select lookup instead if overlarge or
2798
* more than half-sparse.
2800
if (switchOp == JSOP_TABLESWITCH) {
2801
tableLength = (uint32)(high - low + 1);
2802
if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
2803
switchOp = JSOP_LOOKUPSWITCH;
2804
} else if (switchOp == JSOP_LOOKUPSWITCH) {
2806
* Lookup switch supports only atom indexes below 64K limit.
2807
* Conservatively estimate the maximum possible index during
2808
* switch generation and use conditional switch if it exceeds
2811
if (caseCount + cg->atomList.count > JS_BIT(16))
2812
switchOp = JSOP_CONDSWITCH;
2817
* Emit a note with two offsets: first tells total switch code length,
2818
* second tells offset to first JSOP_CASE if condswitch.
2820
noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
2824
if (switchOp == JSOP_CONDSWITCH) {
2826
* 0 bytes of immediate for unoptimized ECMAv2 switch.
2829
} else if (switchOp == JSOP_TABLESWITCH) {
2831
* 3 offsets (len, low, high) before the table, 1 per entry.
2833
switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
2836
* JSOP_LOOKUPSWITCH:
2837
* 1 offset (len) and 1 atom index (npairs) before the table,
2838
* 1 atom index and 1 jump offset per entry.
2840
switchSize = (size_t)(JUMP_OFFSET_LEN + ATOM_INDEX_LEN +
2841
(ATOM_INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
2845
* Emit switchOp followed by switchSize bytes of jump or lookup table.
2847
* If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
2848
* to emit the immediate operand(s) by which bytecode readers such as
2849
* BuildSpanDepTable discover the length of the switch opcode *before*
2850
* calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
2851
* also important to zero all unknown jump offset immediate operands,
2852
* so they can be converted to span dependencies with null targets to
2853
* be computed later (js_EmitN zeros switchSize bytes after switchOp).
2855
if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
2859
if (switchOp == JSOP_CONDSWITCH) {
2860
intN caseNoteIndex = -1;
2861
JSBool beforeCases = JS_TRUE;
2863
/* Emit code for evaluating cases and jumping to case statements. */
2864
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2866
if (pn4 && !js_EmitTree(cx, cg, pn4))
2868
if (caseNoteIndex >= 0) {
2869
/* off is the previous JSOP_CASE's bytecode offset. */
2870
if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2871
CG_OFFSET(cg) - off)) {
2876
JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
2879
caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
2880
if (caseNoteIndex < 0)
2882
off = EmitJump(cx, cg, JSOP_CASE, 0);
2885
pn3->pn_offset = off;
2887
uintN noteCount, noteCountDelta;
2889
/* Switch note's second offset is to first JSOP_CASE. */
2890
noteCount = CG_NOTE_COUNT(cg);
2891
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
2895
noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
2896
if (noteCountDelta != 0)
2897
caseNoteIndex += noteCountDelta;
2898
beforeCases = JS_FALSE;
2903
* If we didn't have an explicit default (which could fall in between
2904
* cases, preventing us from fusing this js_SetSrcNoteOffset with the
2905
* call in the loop above), link the last case to the implicit default
2906
* for the decompiler.
2909
caseNoteIndex >= 0 &&
2910
!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
2911
CG_OFFSET(cg) - off)) {
2915
/* Emit default even if no explicit default statement. */
2916
defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
2917
if (defaultOffset < 0)
2920
pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
2922
if (switchOp == JSOP_TABLESWITCH) {
2923
/* Fill in switch bounds, which we know fit in 16-bit offsets. */
2924
SET_JUMP_OFFSET(pc, low);
2925
pc += JUMP_OFFSET_LEN;
2926
SET_JUMP_OFFSET(pc, high);
2927
pc += JUMP_OFFSET_LEN;
2930
* Use malloc to avoid arena bloat for programs with many switches.
2931
* We free table if non-null at label out, so all control flow must
2932
* exit this function through goto out or goto bad.
2934
if (tableLength != 0) {
2935
tableSize = (size_t)tableLength * sizeof *table;
2936
table = (JSParseNode **) JS_malloc(cx, tableSize);
2939
memset(table, 0, tableSize);
2940
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2941
if (pn3->pn_type == TOK_DEFAULT)
2943
i = JSVAL_TO_INT(pn3->pn_val);
2945
JS_ASSERT((uint32)i < tableLength);
2950
JS_ASSERT(switchOp == JSOP_LOOKUPSWITCH);
2952
/* Fill in the number of cases. */
2953
SET_ATOM_INDEX(pc, caseCount);
2954
pc += ATOM_INDEX_LEN;
2958
* After this point, all control flow involving JSOP_TABLESWITCH
2959
* must set ok and goto out to exit this function. To keep things
2960
* simple, all switchOp cases exit that way.
2962
if (constPropagated) {
2964
* Skip switchOp, as we are not setting jump offsets in the two
2965
* for loops below. We'll restore CG_NEXT(cg) from savepc after,
2966
* unless there was an error.
2968
savepc = CG_NEXT(cg);
2969
CG_NEXT(cg) = pc + 1;
2970
if (switchOp == JSOP_TABLESWITCH) {
2971
for (i = 0; i < (jsint)tableLength; i++) {
2974
(pn4 = pn3->pn_left) != NULL &&
2975
pn4->pn_type == TOK_NAME) {
2976
/* Note a propagated constant with the const's name. */
2977
JS_ASSERT(!pn4->pn_expr);
2978
ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
2982
if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
2983
ALE_INDEX(ale)) < 0) {
2987
pc += JUMP_OFFSET_LEN;
2990
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
2992
if (pn4 && pn4->pn_type == TOK_NAME) {
2993
/* Note a propagated constant with the const's name. */
2994
JS_ASSERT(!pn4->pn_expr);
2995
ale = js_IndexAtom(cx, pn4->pn_atom, &cg->atomList);
2999
if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
3000
ALE_INDEX(ale)) < 0) {
3004
pc += ATOM_INDEX_LEN + JUMP_OFFSET_LEN;
3007
CG_NEXT(cg) = savepc;
3011
/* Emit code for each case's statements, copying pn_offset up to pn3. */
3012
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3013
if (switchOp == JSOP_CONDSWITCH && pn3->pn_type != TOK_DEFAULT)
3014
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, pn3->pn_offset);
3015
pn4 = pn3->pn_right;
3016
ok = js_EmitTree(cx, cg, pn4);
3019
pn3->pn_offset = pn4->pn_offset;
3020
if (pn3->pn_type == TOK_DEFAULT)
3021
off = pn3->pn_offset - top;
3025
/* If no default case, offset for default is to end of switch. */
3026
off = CG_OFFSET(cg) - top;
3029
/* We better have set "off" by now. */
3030
JS_ASSERT(off != -1);
3032
/* Set the default offset (to end of switch if no default). */
3033
if (switchOp == JSOP_CONDSWITCH) {
3035
JS_ASSERT(defaultOffset != -1);
3036
ok = js_SetJumpOffset(cx, cg, CG_CODE(cg, defaultOffset),
3037
off - (defaultOffset - top));
3041
pc = CG_CODE(cg, top);
3042
ok = js_SetJumpOffset(cx, cg, pc, off);
3045
pc += JUMP_OFFSET_LEN;
3048
/* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3049
off = CG_OFFSET(cg) - top;
3050
ok = js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, off);
3054
if (switchOp == JSOP_TABLESWITCH) {
3055
/* Skip over the already-initialized switch bounds. */
3056
pc += 2 * JUMP_OFFSET_LEN;
3058
/* Fill in the jump table, if there is one. */
3059
for (i = 0; i < (jsint)tableLength; i++) {
3061
off = pn3 ? pn3->pn_offset - top : 0;
3062
ok = js_SetJumpOffset(cx, cg, pc, off);
3065
pc += JUMP_OFFSET_LEN;
3067
} else if (switchOp == JSOP_LOOKUPSWITCH) {
3068
/* Skip over the already-initialized number of cases. */
3069
pc += ATOM_INDEX_LEN;
3071
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3072
if (pn3->pn_type == TOK_DEFAULT)
3074
atom = js_AtomizeValue(cx, pn3->pn_val, 0);
3077
ale = js_IndexAtom(cx, atom, &cg->atomList);
3080
SET_ATOM_INDEX(pc, ALE_INDEX(ale));
3081
pc += ATOM_INDEX_LEN;
3083
off = pn3->pn_offset - top;
3084
ok = js_SetJumpOffset(cx, cg, pc, off);
3087
pc += JUMP_OFFSET_LEN;
3095
ok = js_PopStatementCG(cx, cg);
3097
#if JS_HAS_BLOCK_SCOPE
3098
if (ok && pn->pn_right->pn_type == TOK_LEXICALSCOPE) {
3099
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
3100
cg->stackDepth -= count;
3112
js_EmitFunctionBytecode(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
3114
if (!js_AllocTryNotes(cx, cg))
3117
if (cg->treeContext.flags & TCF_FUN_IS_GENERATOR) {
3118
/* JSOP_GENERATOR must be the first instruction. */
3119
CG_SWITCH_TO_PROLOG(cg);
3120
JS_ASSERT(CG_NEXT(cg) == CG_BASE(cg));
3121
if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
3123
CG_SWITCH_TO_MAIN(cg);
3126
return js_EmitTree(cx, cg, body) &&
3127
js_Emit1(cx, cg, JSOP_STOP) >= 0;
3131
js_EmitFunctionBody(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body,
3134
JSStackFrame *fp, frame;
3139
funobj = fun->object;
3140
JS_ASSERT(!fp || (fp->fun != fun && fp->varobj != funobj &&
3141
fp->scopeChain != funobj));
3142
memset(&frame, 0, sizeof frame);
3144
frame.varobj = frame.scopeChain = funobj;
3146
frame.flags = JS_HAS_COMPILE_N_GO_OPTION(cx)
3147
? JSFRAME_COMPILING | JSFRAME_COMPILE_N_GO
3148
: JSFRAME_COMPILING;
3150
ok = js_EmitFunctionBytecode(cx, cg, body);
3155
if (!js_NewScriptFromCG(cx, cg, fun))
3158
JS_ASSERT(FUN_INTERPRETED(fun));
3162
/* A macro for inlining at the top of js_EmitTree (whence it came). */
3163
#define UPDATE_LINE_NUMBER_NOTES(cx, cg, pn) \
3165
uintN line_ = (pn)->pn_pos.begin.lineno; \
3166
uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3167
if (delta_ != 0) { \
3169
* Encode any change in the current source line number by using \
3170
* either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3171
* whichever consumes less space. \
3173
* NB: We handle backward line number deltas (possible with for \
3174
* loops where the update part is emitted after the body, but its \
3175
* line number is <= any line number in the body) here by letting \
3176
* unsigned delta_ wrap to a very large number, which triggers a \
3179
CG_CURRENT_LINE(cg) = line_; \
3180
if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3181
if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3185
if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3187
} while (--delta_ != 0); \
3192
/* A function, so that we avoid macro-bloating all the other callsites. */
3194
UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3196
UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
3201
MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3202
JSParseNode *pn, jsatomid *result)
3205
JSAtomListElement *ale;
3207
if (pn->pn_slot >= 0) {
3208
atomIndex = (jsatomid) pn->pn_slot;
3210
ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
3213
atomIndex = ALE_INDEX(ale);
3216
if ((js_CodeSpec[pn->pn_op].format & JOF_TYPEMASK) == JOF_CONST &&
3217
(!(cg->treeContext.flags & TCF_IN_FUNCTION) ||
3218
(cg->treeContext.flags & TCF_FUN_HEAVYWEIGHT))) {
3219
/* Emit a prolog bytecode to predefine the variable. */
3220
CG_SWITCH_TO_PROLOG(cg);
3221
if (!UpdateLineNumberNotes(cx, cg, pn))
3223
EMIT_ATOM_INDEX_OP(prologOp, atomIndex);
3224
CG_SWITCH_TO_MAIN(cg);
3228
*result = atomIndex;
3232
#if JS_HAS_DESTRUCTURING
3235
(*DestructuringDeclEmitter)(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3239
EmitDestructuringDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3242
JS_ASSERT(pn->pn_type == TOK_NAME);
3243
if (!BindNameToSlot(cx, &cg->treeContext, pn, prologOp == JSOP_NOP))
3246
JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS);
3247
return MaybeEmitVarDecl(cx, cg, prologOp, pn, NULL);
3251
EmitDestructuringDecls(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
3254
JSParseNode *pn2, *pn3;
3255
DestructuringDeclEmitter emitter;
3257
if (pn->pn_type == TOK_RB) {
3258
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3259
if (pn2->pn_type == TOK_COMMA)
3261
emitter = (pn2->pn_type == TOK_NAME)
3262
? EmitDestructuringDecl
3263
: EmitDestructuringDecls;
3264
if (!emitter(cx, cg, prologOp, pn2))
3268
JS_ASSERT(pn->pn_type == TOK_RC);
3269
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3270
pn3 = pn2->pn_right;
3271
emitter = (pn3->pn_type == TOK_NAME)
3272
? EmitDestructuringDecl
3273
: EmitDestructuringDecls;
3274
if (!emitter(cx, cg, prologOp, pn3))
3282
EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn);
3285
EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3290
/* Skip any parenthesization. */
3291
while (pn->pn_type == TOK_RP)
3295
* Now emit the lvalue opcode sequence. If the lvalue is a nested
3296
* destructuring initialiser-form, call ourselves to handle it, then
3297
* pop the matched value. Otherwise emit an lvalue bytecode sequence
3298
* ending with a JSOP_ENUMELEM or equivalent op.
3300
if (pn->pn_type == TOK_RB || pn->pn_type == TOK_RC) {
3301
if (!EmitDestructuringOpsHelper(cx, cg, pn))
3303
if (wantpop && js_Emit1(cx, cg, JSOP_POP) < 0)
3306
if (pn->pn_type == TOK_NAME &&
3307
!BindNameToSlot(cx, &cg->treeContext, pn, JS_FALSE)) {
3311
switch (pn->pn_op) {
3314
* NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3315
* we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3316
* So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3318
if (!EmitElemOp(cx, pn, JSOP_ENUMELEM, cg))
3323
if (!EmitElemOp(cx, pn, JSOP_ENUMCONSTELEM, cg))
3329
slot = (jsuint) pn->pn_slot;
3330
EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, slot);
3338
slot = (jsuint) pn->pn_slot;
3339
EMIT_UINT16_IMM_OP(pn->pn_op, slot);
3340
if (wantpop && js_Emit1(cx, cg, JSOP_POP) < 0)
3345
#if JS_HAS_LVALUE_RETURN || JS_HAS_XML_SUPPORT
3349
top = CG_OFFSET(cg);
3350
if (!js_EmitTree(cx, cg, pn))
3352
if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
3354
if (js_Emit1(cx, cg, JSOP_ENUMELEM) < 0)
3368
* Recursive helper for EmitDestructuringOps.
3370
* Given a value to destructure on the stack, walk over an object or array
3371
* initialiser at pn, emitting bytecodes to match property values and store
3372
* them in the lvalues identified by the matched property names.
3375
EmitDestructuringOpsHelper(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3378
JSParseNode *pn2, *pn3;
3382
intN stackDepth = cg->stackDepth;
3383
JS_ASSERT(stackDepth != 0);
3384
JS_ASSERT(pn->pn_arity == PN_LIST);
3385
JS_ASSERT(pn->pn_type == TOK_RB || pn->pn_type == TOK_RC);
3388
if (pn->pn_count == 0) {
3389
/* Emit a DUP;POP sequence for the decompiler. */
3390
return js_Emit1(cx, cg, JSOP_DUP) >= 0 &&
3391
js_Emit1(cx, cg, JSOP_POP) >= 0;
3395
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
3397
* Duplicate the value being destructured to use as a reference base.
3399
if (js_Emit1(cx, cg, JSOP_DUP) < 0)
3403
* Now push the property name currently being matched, which is either
3404
* the array initialiser's current index, or the current property name
3405
* "label" on the left of a colon in the object initialiser. Set pn3
3406
* to the lvalue node, which is in the value-initializing position.
3409
if (pn->pn_type == TOK_RB) {
3410
if (!EmitNumberOp(cx, index, cg))
3414
JS_ASSERT(pn->pn_type == TOK_RC);
3415
JS_ASSERT(pn2->pn_type == TOK_COLON);
3417
if (pn3->pn_type == TOK_NUMBER) {
3419
* If we are emitting an object destructuring initialiser,
3420
* annotate the index op with SRC_INITPROP so we know we are
3421
* not decompiling an array initialiser.
3423
if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
3425
if (!EmitNumberOp(cx, pn3->pn_dval, cg))
3428
JS_ASSERT(pn3->pn_type == TOK_STRING ||
3429
pn3->pn_type == TOK_NAME);
3430
if (!EmitAtomOp(cx, pn3, JSOP_GETPROP, cg))
3432
doElemOp = JS_FALSE;
3434
pn3 = pn2->pn_right;
3439
* Ok, get the value of the matching property name. This leaves
3440
* that value on top of the value being destructured, so the stack
3441
* is one deeper than when we started.
3443
if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
3445
JS_ASSERT(cg->stackDepth == stackDepth + 1);
3448
/* Nullary comma node makes a hole in the array destructurer. */
3449
if (pn3->pn_type == TOK_COMMA && pn3->pn_arity == PN_NULLARY) {
3450
JS_ASSERT(pn->pn_type == TOK_RB);
3451
JS_ASSERT(pn2 == pn3);
3452
if (js_Emit1(cx, cg, JSOP_POP) < 0)
3455
if (!EmitDestructuringLHS(cx, cg, pn3, JS_TRUE))
3459
JS_ASSERT(cg->stackDepth == stackDepth);
3467
OpToDeclType(JSOp op)
3471
return SRC_DECL_LET;
3473
return SRC_DECL_CONST;
3475
return SRC_DECL_VAR;
3477
return SRC_DECL_NONE;
3482
EmitDestructuringOps(JSContext *cx, JSCodeGenerator *cg, JSOp declOp,
3486
* If we're called from a variable declaration, help the decompiler by
3487
* annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3488
* If the destructuring initialiser is empty, our helper will emit a
3489
* JSOP_DUP followed by a JSOP_POP for the decompiler.
3491
if (js_NewSrcNote2(cx, cg, SRC_DESTRUCT, OpToDeclType(declOp)) < 0)
3495
* Call our recursive helper to emit the destructuring assignments and
3496
* related stack manipulations.
3498
return EmitDestructuringOpsHelper(cx, cg, pn);
3502
EmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp declOp,
3503
JSParseNode *lhs, JSParseNode *rhs)
3505
jsuint depth, limit, slot;
3508
depth = limit = (uintN) cg->stackDepth;
3509
for (pn = rhs->pn_head; pn; pn = pn->pn_next) {
3510
if (limit == JS_BIT(16)) {
3511
js_ReportCompileErrorNumber(cx, rhs,
3512
JSREPORT_PN | JSREPORT_ERROR,
3513
JSMSG_ARRAY_INIT_TOO_BIG);
3517
if (pn->pn_type == TOK_COMMA) {
3518
if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
3521
JS_ASSERT(pn->pn_type != TOK_DEFSHARP);
3522
if (!js_EmitTree(cx, cg, pn))
3528
if (js_NewSrcNote2(cx, cg, SRC_GROUPASSIGN, OpToDeclType(declOp)) < 0)
3532
for (pn = lhs->pn_head; pn; pn = pn->pn_next) {
3534
EMIT_UINT16_IMM_OP(JSOP_GETLOCAL, slot);
3536
if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
3539
if (pn->pn_type == TOK_COMMA && pn->pn_arity == PN_NULLARY) {
3540
if (js_Emit1(cx, cg, JSOP_POP) < 0)
3543
if (!EmitDestructuringLHS(cx, cg, pn, pn->pn_next != NULL))
3549
EMIT_UINT16_IMM_OP(JSOP_SETSP, (jsatomid)depth);
3550
cg->stackDepth = (uintN) depth;
3555
* Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3556
* can emit a group assignment sequence, which results in 0 stack depth delta,
3557
* we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3560
MaybeEmitGroupAssignment(JSContext *cx, JSCodeGenerator *cg, JSOp declOp,
3561
JSParseNode *pn, JSOp *pop)
3563
JSParseNode *lhs, *rhs;
3565
JS_ASSERT(pn->pn_type == TOK_ASSIGN);
3566
JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_POPV);
3569
if (lhs->pn_type == TOK_RB && rhs->pn_type == TOK_RB &&
3570
lhs->pn_count <= rhs->pn_count &&
3571
(rhs->pn_count == 0 ||
3572
rhs->pn_head->pn_type != TOK_DEFSHARP)) {
3573
if (!EmitGroupAssignment(cx, cg, declOp, lhs, rhs))
3580
#endif /* JS_HAS_DESTRUCTURING */
3583
EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
3584
JSBool inLetHead, ptrdiff_t *headNoteIndex)
3587
JSBool let, forInVar;
3588
#if JS_HAS_BLOCK_SCOPE
3589
JSBool forInLet, popScope;
3590
JSStmtInfo *stmt, *scopeStmt;
3592
ptrdiff_t off, noteIndex, tmp;
3593
JSParseNode *pn2, *pn3;
3598
/* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3599
*headNoteIndex = -1;
3602
* Let blocks and expressions have a parenthesized head in which the new
3603
* scope is not yet open. Initializer evaluation uses the parent node's
3604
* lexical scope. If popScope is true below, then we hide the top lexical
3605
* block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3606
* it won't find any names in the new let block.
3608
* The same goes for let declarations in the head of any kind of for loop.
3609
* Unlike a let declaration 'let x = i' within a block, where x is hoisted
3610
* to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3611
* in the containing scope, and puts x in the loop body's scope.
3613
tc = &cg->treeContext;
3614
let = (pn->pn_op == JSOP_NOP);
3615
forInVar = (pn->pn_extra & PNX_FORINVAR) != 0;
3616
#if JS_HAS_BLOCK_SCOPE
3617
forInLet = let && forInVar;
3618
popScope = (inLetHead || (let && (tc->flags & TCF_IN_FOR_INIT)));
3619
JS_ASSERT(!popScope || let);
3622
off = noteIndex = -1;
3623
for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
3624
#if JS_HAS_DESTRUCTURING
3625
if (pn2->pn_type != TOK_NAME) {
3626
if (pn2->pn_type == TOK_RB || pn2->pn_type == TOK_RC) {
3628
* Emit variable binding ops, but not destructuring ops.
3629
* The parser (see Variables, jsparse.c) has ensured that
3630
* our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
3631
* and that case will emit the destructuring code only after
3632
* emitting an enumerating opcode and a branch that tests
3633
* whether the enumeration ended.
3635
JS_ASSERT(forInVar);
3636
JS_ASSERT(pn->pn_count == 1);
3637
if (!EmitDestructuringDecls(cx, cg, pn->pn_op, pn2))
3643
* A destructuring initialiser assignment preceded by var is
3644
* always evaluated promptly, even if it is to the left of 'in'
3645
* in a for-in loop. As with 'for (var x = i in o)...', this
3646
* will cause the entire 'var [a, b] = i' to be hoisted out of
3647
* the head of the loop.
3649
JS_ASSERT(pn2->pn_type == TOK_ASSIGN);
3650
if (pn->pn_count == 1 && !forInLet) {
3652
* If this is the only destructuring assignment in the list,
3653
* try to optimize to a group assignment. If we're in a let
3654
* head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
3655
* in pn->pn_op, to suppress a second (and misplaced) 'let'.
3657
JS_ASSERT(noteIndex < 0 && !pn2->pn_next);
3659
if (!MaybeEmitGroupAssignment(cx, cg,
3660
inLetHead ? JSOP_POP : pn->pn_op,
3664
if (op == JSOP_NOP) {
3665
pn->pn_extra = (pn->pn_extra & ~PNX_POPVAR) | PNX_GROUPINIT;
3671
if (!EmitDestructuringDecls(cx, cg, pn->pn_op, pn3))
3674
#if JS_HAS_BLOCK_SCOPE
3676
* If this is a 'for (let [x, y] = i in o) ...' let declaration,
3677
* throw away i if it is a useless expression.
3680
JSBool useful = JS_FALSE;
3682
JS_ASSERT(pn->pn_count == 1);
3683
if (!CheckSideEffects(cx, tc, pn2->pn_right, &useful))
3690
if (!js_EmitTree(cx, cg, pn2->pn_right))
3693
#if JS_HAS_BLOCK_SCOPE
3695
* The expression i in 'for (let [x, y] = i in o) ...', which is
3696
* pn2->pn_right above, appears to have side effects. We've just
3697
* emitted code to evaluate i, but we must not destructure i yet.
3698
* Let the TOK_FOR: code in js_EmitTree do the destructuring to
3699
* emit the right combination of source notes and bytecode for the
3702
* This has the effect of hoisting the evaluation of i out of the
3703
* for-in loop, without hoisting the let variables, which must of
3704
* course be scoped by the loop. Set PNX_POPVAR to cause JSOP_POP
3705
* to be emitted, just before returning from this function.
3708
pn->pn_extra |= PNX_POPVAR;
3715
* Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
3716
* that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
3717
* we will emit at the bottom of this function.
3719
if (!EmitDestructuringOps(cx, cg,
3720
inLetHead ? JSOP_POP : pn->pn_op,
3727
JS_ASSERT(pn2->pn_type == TOK_NAME);
3730
if (!BindNameToSlot(cx, &cg->treeContext, pn2, let))
3732
JS_ASSERT(pn2->pn_slot >= 0 || !let);
3735
if (op == JSOP_ARGUMENTS) {
3736
/* JSOP_ARGUMENTS => no initializer */
3737
JS_ASSERT(!pn2->pn_expr && !let);
3740
atomIndex = 0; /* quell GCC overwarning */
3743
if (!MaybeEmitVarDecl(cx, cg, pn->pn_op, pn2, &atomIndex))
3748
#if JS_HAS_BLOCK_SCOPE
3750
* If this is a 'for (let x = i in o) ...' let declaration,
3751
* throw away i if it is a useless expression.
3754
JSBool useful = JS_FALSE;
3756
JS_ASSERT(pn->pn_count == 1);
3757
if (!CheckSideEffects(cx, tc, pn3, &useful))
3764
if (op == JSOP_SETNAME) {
3766
EMIT_ATOM_INDEX_OP(JSOP_BINDNAME, atomIndex);
3768
if (pn->pn_op == JSOP_DEFCONST &&
3769
!js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom,
3774
#if JS_HAS_BLOCK_SCOPE
3775
/* Evaluate expr in the outer lexical scope if requested. */
3778
scopeStmt = tc->topScopeStmt;
3780
tc->topStmt = stmt->down;
3781
tc->topScopeStmt = scopeStmt->downScope;
3785
stmt = scopeStmt = NULL; /* quell GCC overwarning */
3790
oldflags = cg->treeContext.flags;
3791
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
3792
if (!js_EmitTree(cx, cg, pn3))
3794
cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
3796
#if JS_HAS_BLOCK_SCOPE
3799
tc->topScopeStmt = scopeStmt;
3806
* 'for (var x in o) ...' and 'for (var x = i in o) ...' call the
3807
* TOK_VAR case, but only the initialized case (a strange one that
3808
* falls out of ECMA-262's grammar) wants to run past this point.
3809
* Both cases must conditionally emit a JSOP_DEFVAR, above. Note
3810
* that the parser error-checks to ensure that pn->pn_count is 1.
3812
* 'for (let x = i in o) ...' must evaluate i before the loop, and
3813
* subject it to useless expression elimination. The variable list
3814
* in pn is a single let declaration if pn_op == JSOP_NOP. We test
3815
* the let local in order to break early in this case, as well as in
3816
* the 'for (var x in o)' case.
3818
* XXX Narcissus keeps track of variable declarations in the node
3819
* for the script being compiled, so there's no need to share any
3820
* conditional prolog code generation there. We could do likewise,
3821
* but it's a big change, requiring extra allocation, so probably
3822
* not worth the trouble for SpiderMonkey.
3824
JS_ASSERT(pn3 == pn2->pn_expr);
3825
if (forInVar && (!pn3 || let)) {
3826
JS_ASSERT(pn->pn_count == 1);
3830
if (pn2 == pn->pn_head &&
3832
js_NewSrcNote2(cx, cg, SRC_DECL,
3833
(pn->pn_op == JSOP_DEFCONST)
3835
: (pn->pn_op == JSOP_DEFVAR)
3837
: SRC_DECL_LET) < 0) {
3840
if (op == JSOP_ARGUMENTS) {
3841
if (js_Emit1(cx, cg, op) < 0)
3843
} else if (pn2->pn_slot >= 0) {
3844
EMIT_UINT16_IMM_OP(op, atomIndex);
3846
EMIT_ATOM_INDEX_OP(op, atomIndex);
3849
#if JS_HAS_DESTRUCTURING
3852
tmp = CG_OFFSET(cg);
3853
if (noteIndex >= 0) {
3854
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
3860
noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3861
if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
3865
/* If this is a let head, emit and return a srcnote on the pop. */
3867
*headNoteIndex = js_NewSrcNote(cx, cg, SRC_DECL);
3868
if (*headNoteIndex < 0)
3870
if (!(pn->pn_extra & PNX_POPVAR))
3871
return js_Emit1(cx, cg, JSOP_NOP) >= 0;
3874
return !(pn->pn_extra & PNX_POPVAR) || js_Emit1(cx, cg, JSOP_POP) >= 0;
3877
#if defined DEBUG_brendan || defined DEBUG_mrbkap
3879
GettableNoteForNextOp(JSCodeGenerator *cg)
3881
ptrdiff_t offset, target;
3882
jssrcnote *sn, *end;
3885
target = CG_OFFSET(cg);
3886
for (sn = CG_NOTES(cg), end = sn + CG_NOTE_COUNT(cg); sn < end;
3888
if (offset == target && SN_IS_GETTABLE(sn))
3890
offset += SN_DELTA(sn);
3897
js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
3899
JSBool ok, useful, wantval;
3900
JSStmtInfo *stmt, stmtInfo;
3901
ptrdiff_t top, off, tmp, beq, jmp;
3902
JSParseNode *pn2, *pn3;
3904
JSAtomListElement *ale;
3906
ptrdiff_t noteIndex;
3907
JSSrcNoteType noteType;
3914
if (!JS_CHECK_STACK_SIZE(cx, stackDummy)) {
3915
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED);
3921
pn->pn_offset = top = CG_OFFSET(cg);
3923
/* Emit notes to tell the current bytecode's source line number. */
3924
UPDATE_LINE_NUMBER_NOTES(cx, cg, pn);
3926
switch (pn->pn_type) {
3930
JSCodeGenerator *cg2;
3933
#if JS_HAS_XML_SUPPORT
3934
if (pn->pn_arity == PN_NULLARY) {
3935
if (js_Emit1(cx, cg, JSOP_GETFUNNS) < 0)
3941
/* Generate code for the function's body. */
3942
cg2mark = JS_ARENA_MARK(&cx->tempPool);
3943
JS_ARENA_ALLOCATE_TYPE(cg2, JSCodeGenerator, &cx->tempPool);
3945
JS_ReportOutOfMemory(cx);
3948
if (!js_InitCodeGenerator(cx, cg2, cg->codePool, cg->notePool,
3949
cg->filename, pn->pn_pos.begin.lineno,
3953
cg2->treeContext.flags = (uint16) (pn->pn_flags | TCF_IN_FUNCTION);
3954
cg2->treeContext.tryCount = pn->pn_tryCount;
3956
fun = (JSFunction *) JS_GetPrivate(cx, ATOM_TO_OBJECT(pn->pn_funAtom));
3957
if (!js_EmitFunctionBody(cx, cg2, pn->pn_body, fun))
3961
* We need an activation object if an inner peeks out, or if such
3962
* inner-peeking caused one of our inners to become heavyweight.
3964
if (cg2->treeContext.flags &
3965
(TCF_FUN_USES_NONLOCALS | TCF_FUN_HEAVYWEIGHT)) {
3966
cg->treeContext.flags |= TCF_FUN_HEAVYWEIGHT;
3968
js_FinishCodeGenerator(cx, cg2);
3969
JS_ARENA_RELEASE(&cx->tempPool, cg2mark);
3971
/* Make the function object a literal in the outer script's pool. */
3972
ale = js_IndexAtom(cx, pn->pn_funAtom, &cg->atomList);
3975
atomIndex = ALE_INDEX(ale);
3977
/* Emit a bytecode pointing to the closure object in its immediate. */
3978
if (pn->pn_op != JSOP_NOP) {
3979
EMIT_ATOM_INDEX_OP(pn->pn_op, atomIndex);
3983
/* Top-level named functions need a nop for decompilation. */
3984
noteIndex = js_NewSrcNote2(cx, cg, SRC_FUNCDEF, (ptrdiff_t)atomIndex);
3985
if (noteIndex < 0 ||
3986
js_Emit1(cx, cg, JSOP_NOP) < 0) {
3991
* Top-levels also need a prolog op to predefine their names in the
3992
* variable object, or if local, to fill their stack slots.
3994
CG_SWITCH_TO_PROLOG(cg);
3996
if (cg->treeContext.flags & TCF_IN_FUNCTION) {
3997
JSObject *obj, *pobj;
3999
JSScopeProperty *sprop;
4002
obj = OBJ_GET_PARENT(cx, fun->object);
4003
if (!js_LookupHiddenProperty(cx, obj, ATOM_TO_JSID(fun->atom),
4008
JS_ASSERT(prop && pobj == obj);
4009
sprop = (JSScopeProperty *) prop;
4010
JS_ASSERT(sprop->getter == js_GetLocalVariable);
4011
slot = sprop->shortid;
4012
OBJ_DROP_PROPERTY(cx, pobj, prop);
4015
* If this local function is declared in a body block induced by
4016
* let declarations, reparent fun->object to the compiler-created
4017
* body block object so that JSOP_DEFLOCALFUN can clone that block
4018
* into the runtime scope chain.
4020
stmt = cg->treeContext.topStmt;
4021
if (stmt && stmt->type == STMT_BLOCK &&
4022
stmt->down && stmt->down->type == STMT_BLOCK &&
4023
(stmt->down->flags & SIF_SCOPE)) {
4024
obj = ATOM_TO_OBJECT(stmt->down->atom);
4025
JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass);
4026
OBJ_SET_PARENT(cx, fun->object, obj);
4029
if (atomIndex >= JS_BIT(16)) {
4031
* Lots of literals in the outer function, so we have to emit
4032
* [JSOP_LITOPX, atomIndex, JSOP_DEFLOCALFUN, var slot].
4034
off = js_EmitN(cx, cg, JSOP_LITOPX, 3);
4037
pc = CG_CODE(cg, off);
4038
SET_LITERAL_INDEX(pc, atomIndex);
4039
EMIT_UINT16_IMM_OP(JSOP_DEFLOCALFUN, slot);
4041
/* Emit [JSOP_DEFLOCALFUN, var slot, atomIndex]. */
4042
off = js_EmitN(cx, cg, JSOP_DEFLOCALFUN,
4043
VARNO_LEN + ATOM_INDEX_LEN);
4046
pc = CG_CODE(cg, off);
4047
SET_VARNO(pc, slot);
4049
SET_ATOM_INDEX(pc, atomIndex);
4052
JS_ASSERT(!cg->treeContext.topStmt);
4053
EMIT_ATOM_INDEX_OP(JSOP_DEFFUN, atomIndex);
4056
CG_SWITCH_TO_MAIN(cg);
4060
#if JS_HAS_EXPORT_IMPORT
4063
if (pn2->pn_type == TOK_STAR) {
4065
* 'export *' must have no other elements in the list (what would
4068
if (js_Emit1(cx, cg, JSOP_EXPORTALL) < 0)
4072
* If not 'export *', the list consists of NAME nodes identifying
4073
* properties of the variables object to flag as exported.
4076
ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
4079
EMIT_ATOM_INDEX_OP(JSOP_EXPORTNAME, ALE_INDEX(ale));
4080
} while ((pn2 = pn2->pn_next) != NULL);
4085
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
4087
* Each subtree on an import list is rooted by a DOT or LB node.
4088
* A DOT may have a null pn_atom member, in which case pn_op must
4089
* be JSOP_IMPORTALL -- see EmitPropOp above.
4091
if (!js_EmitTree(cx, cg, pn2))
4095
#endif /* JS_HAS_EXPORT_IMPORT */
4098
/* Initialize so we can detect else-if chains and avoid recursion. */
4099
stmtInfo.type = STMT_IF;
4104
/* Emit code for the condition before pushing stmtInfo. */
4105
if (!js_EmitTree(cx, cg, pn->pn_kid1))
4107
top = CG_OFFSET(cg);
4108
if (stmtInfo.type == STMT_IF) {
4109
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_IF, top);
4112
* We came here from the goto further below that detects else-if
4113
* chains, so we must mutate stmtInfo back into a STMT_IF record.
4114
* Also (see below for why) we need a note offset for SRC_IF_ELSE
4115
* to help the decompiler. Actually, we need two offsets, one for
4116
* decompiling any else clause and the second for decompiling an
4117
* else-if chain without bracing, overindenting, or incorrectly
4118
* scoping let declarations.
4120
JS_ASSERT(stmtInfo.type == STMT_ELSE);
4121
stmtInfo.type = STMT_IF;
4122
stmtInfo.update = top;
4123
if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4125
if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 1, top - jmp))
4129
/* Emit an annotated branch-if-false around the then part. */
4131
noteIndex = js_NewSrcNote(cx, cg, pn3 ? SRC_IF_ELSE : SRC_IF);
4134
beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4138
/* Emit code for the then and optional else parts. */
4139
if (!js_EmitTree(cx, cg, pn->pn_kid2))
4142
/* Modify stmtInfo so we know we're in the else part. */
4143
stmtInfo.type = STMT_ELSE;
4146
* Emit a JSOP_BACKPATCH op to jump from the end of our then part
4147
* around the else part. The js_PopStatementCG call at the bottom
4148
* of this switch case will fix up the backpatch chain linked from
4151
jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
4155
/* Ensure the branch-if-false comes here, then emit the else. */
4156
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4157
if (pn3->pn_type == TOK_IF) {
4162
if (!js_EmitTree(cx, cg, pn3))
4166
* Annotate SRC_IF_ELSE with the offset from branch to jump, for
4167
* the decompiler's benefit. We can't just "back up" from the pc
4168
* of the else clause, because we don't know whether an extended
4169
* jump was required to leap from the end of the then clause over
4172
if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4175
/* No else part, fixup the branch-if-false to come here. */
4176
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4178
ok = js_PopStatementCG(cx, cg);
4182
/* Out of line to avoid bloating js_EmitTree's stack frame size. */
4183
ok = EmitSwitch(cx, cg, pn, &stmtInfo);
4187
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_WHILE_LOOP, top);
4188
if (!js_EmitTree(cx, cg, pn->pn_left))
4190
noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4193
beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4196
if (!js_EmitTree(cx, cg, pn->pn_right))
4198
jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4201
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4202
if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
4204
ok = js_PopStatementCG(cx, cg);
4208
/* Emit an annotated nop so we know to decompile a 'do' keyword. */
4209
if (js_NewSrcNote(cx, cg, SRC_WHILE) < 0 ||
4210
js_Emit1(cx, cg, JSOP_NOP) < 0) {
4214
/* Compile the loop body. */
4215
top = CG_OFFSET(cg);
4216
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_DO_LOOP, top);
4217
if (!js_EmitTree(cx, cg, pn->pn_left))
4220
/* Set loop and enclosing label update offsets, for continue. */
4223
stmt->update = CG_OFFSET(cg);
4224
} while ((stmt = stmt->down) != NULL && stmt->type == STMT_LABEL);
4226
/* Compile the loop condition, now that continues know where to go. */
4227
if (!js_EmitTree(cx, cg, pn->pn_right))
4231
* No source note needed, because JSOP_IFNE is used only for do-while.
4232
* If we ever use JSOP_IFNE for other purposes, we can still avoid yet
4233
* another note here, by storing (jmp - top) in the SRC_WHILE note's
4234
* offset, and fetching that delta in order to decompile recursively.
4236
if (EmitJump(cx, cg, JSOP_IFNE, top - CG_OFFSET(cg)) < 0)
4238
ok = js_PopStatementCG(cx, cg);
4242
beq = 0; /* suppress gcc warnings */
4244
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_FOR_LOOP, top);
4246
if (pn2->pn_type == TOK_IN) {
4249
/* Set stmtInfo type for later testing. */
4250
stmtInfo.type = STMT_FOR_IN_LOOP;
4254
* If the left part is 'var x', emit code to define x if necessary
4255
* using a prolog opcode, but do not emit a pop. If the left part
4256
* is 'var x = i', emit prolog code to define x if necessary; then
4257
* emit code to evaluate i, assign the result to x, and pop the
4258
* result off the stack.
4260
* All the logic to do this is implemented in the outer switch's
4261
* TOK_VAR case, conditioned on pn_extra flags set by the parser.
4263
* In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4264
* called here will generate the proper note for the assignment
4265
* op that sets x = i, hoisting the initialized var declaration
4266
* out of the loop: 'var x = i; for (x in o) ...'.
4268
* In the 'for (var x in o) ...' case, nothing but the prolog op
4269
* (if needed) should be generated here, we must emit the note
4270
* just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4271
* a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4273
* A 'for (let x = i in o)' loop must not be hoisted, since in
4274
* this form the let variable is scoped by the loop body (but not
4275
* the head). The initializer expression i must be evaluated for
4276
* any side effects. So we hoist only i in the let case.
4279
type = pn3->pn_type;
4280
cg->treeContext.flags |= TCF_IN_FOR_INIT;
4281
if (TOKEN_TYPE_IS_DECL(type) && !js_EmitTree(cx, cg, pn3))
4283
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
4285
/* Emit a push to allocate the iterator. */
4286
if (js_Emit1(cx, cg, JSOP_STARTITER) < 0)
4289
/* Compile the object expression to the right of 'in'. */
4290
if (!js_EmitTree(cx, cg, pn2->pn_right))
4294
* Emit a bytecode to convert top of stack value to the iterator
4295
* object depending on the loop variant (for-in, for-each-in, or
4296
* destructuring for-in).
4298
#if JS_HAS_DESTRUCTURING
4299
JS_ASSERT(pn->pn_op == JSOP_FORIN ||
4300
pn->pn_op == JSOP_FOREACHKEYVAL ||
4301
pn->pn_op == JSOP_FOREACH);
4303
JS_ASSERT(pn->pn_op == JSOP_FORIN || pn->pn_op == JSOP_FOREACH);
4305
if (js_Emit1(cx, cg, pn->pn_op) < 0)
4308
top = CG_OFFSET(cg);
4309
SET_STATEMENT_TOP(&stmtInfo, top);
4312
* Compile a JSOP_FOR* bytecode based on the left hand side.
4314
* Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4315
* or similar, to signify assignment, rather than declaration, to
4316
* the decompiler. EmitDestructuringOps takes a prolog bytecode
4317
* parameter and emits the appropriate source note, defaulting to
4318
* assignment, so JSOP_SETNAME is not critical here; many similar
4319
* ops could be used -- just not JSOP_NOP (which means 'let').
4324
#if JS_HAS_BLOCK_SCOPE
4328
JS_ASSERT(pn3->pn_arity == PN_LIST && pn3->pn_count == 1);
4330
#if JS_HAS_DESTRUCTURING
4331
if (pn3->pn_type == TOK_ASSIGN) {
4333
JS_ASSERT(pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC);
4335
if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4336
op = pn2->pn_left->pn_op;
4337
goto destructuring_for;
4340
JS_ASSERT(pn3->pn_type == TOK_NAME);
4343
* Always annotate JSOP_FORLOCAL if given input of the form
4344
* 'for (let x in * o)' -- the decompiler must not hoist the
4345
* 'let x' out of the loop head, or x will be bound in the
4346
* wrong scope. Likewise, but in this case only for the sake
4347
* of higher decompilation fidelity only, do not hoist 'var x'
4348
* when given 'for (var x in o)'. But 'for (var x = i in o)'
4349
* requires hoisting in order to preserve the initializer i.
4350
* The decompiler can only handle so much!
4353
#if JS_HAS_BLOCK_SCOPE
4357
js_NewSrcNote2(cx, cg, SRC_DECL,
4360
: SRC_DECL_LET) < 0) {
4365
if (pn3->pn_slot >= 0) {
4368
case JSOP_GETARG: /* FALL THROUGH */
4369
case JSOP_SETARG: op = JSOP_FORARG; break;
4370
case JSOP_GETVAR: /* FALL THROUGH */
4371
case JSOP_SETVAR: op = JSOP_FORVAR; break;
4372
case JSOP_GETGVAR: /* FALL THROUGH */
4373
case JSOP_SETGVAR: op = JSOP_FORNAME; break;
4374
case JSOP_GETLOCAL: /* FALL THROUGH */
4375
case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
4376
default: JS_ASSERT(0);
4379
pn3->pn_op = JSOP_FORNAME;
4380
if (!BindNameToSlot(cx, &cg->treeContext, pn3, JS_FALSE))
4384
if (pn3->pn_slot >= 0) {
4385
if (pn3->pn_attrs & JSPROP_READONLY) {
4386
JS_ASSERT(op == JSOP_FORVAR);
4389
atomIndex = (jsatomid) pn3->pn_slot;
4390
EMIT_UINT16_IMM_OP(op, atomIndex);
4392
if (!EmitAtomOp(cx, pn3, op, cg))
4399
if (!CheckSideEffects(cx, &cg->treeContext, pn3->pn_expr,
4404
if (!EmitPropOp(cx, pn3, JSOP_FORPROP, cg))
4410
#if JS_HAS_DESTRUCTURING
4415
#if JS_HAS_XML_SUPPORT
4418
#if JS_HAS_LVALUE_RETURN
4423
* We separate the first/next bytecode from the enumerator
4424
* variable binding to avoid any side-effects in the index
4425
* expression (e.g., for (x[i++] in {}) should not bind x[i]
4426
* or increment i at all).
4428
emitIFEQ = JS_FALSE;
4429
if (!js_Emit1(cx, cg, JSOP_FORELEM))
4433
* Emit a SRC_WHILE note with offset telling the distance to
4434
* the loop-closing jump (we can't reckon from the branch at
4435
* the top of the loop, because the loop-closing jump might
4436
* need to be an extended jump, independent of whether the
4437
* branch is short or long).
4439
noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4442
beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4446
#if JS_HAS_DESTRUCTURING
4447
if (pn3->pn_type == TOK_RB || pn3->pn_type == TOK_RC) {
4448
if (!EmitDestructuringOps(cx, cg, op, pn3))
4450
if (js_Emit1(cx, cg, JSOP_POP) < 0)
4455
#if JS_HAS_LVALUE_RETURN
4456
if (pn3->pn_type == TOK_LP) {
4457
JS_ASSERT(pn3->pn_op == JSOP_SETCALL);
4458
if (!js_EmitTree(cx, cg, pn3))
4460
if (!js_Emit1(cx, cg, JSOP_ENUMELEM))
4465
#if JS_HAS_XML_SUPPORT
4466
if (pn3->pn_type == TOK_UNARYOP) {
4467
JS_ASSERT(pn3->pn_op == JSOP_BINDXMLNAME);
4468
if (!js_EmitTree(cx, cg, pn3))
4470
if (!js_Emit1(cx, cg, JSOP_ENUMELEM))
4476
/* Now that we're safely past the IFEQ, commit side effects. */
4477
if (!EmitElemOp(cx, pn3, JSOP_ENUMELEM, cg))
4486
/* Annotate so the decompiler can find the loop-closing jump. */
4487
noteIndex = js_NewSrcNote(cx, cg, SRC_WHILE);
4491
/* Pop and test the loop condition generated by JSOP_FOR*. */
4492
beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4498
if (!pn2->pn_kid1) {
4499
/* No initializer: emit an annotated nop for the decompiler. */
4502
cg->treeContext.flags |= TCF_IN_FOR_INIT;
4503
#if JS_HAS_DESTRUCTURING
4505
if (pn3->pn_type == TOK_ASSIGN &&
4506
!MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4510
if (op == JSOP_POP) {
4511
if (!js_EmitTree(cx, cg, pn3))
4513
if (TOKEN_TYPE_IS_DECL(pn3->pn_type)) {
4515
* Check whether a destructuring-initialized var decl
4516
* was optimized to a group assignment. If so, we do
4517
* not need to emit a pop below, so switch to a nop,
4518
* just for the decompiler.
4520
JS_ASSERT(pn3->pn_arity == PN_LIST);
4521
if (pn3->pn_extra & PNX_GROUPINIT)
4525
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
4527
noteIndex = js_NewSrcNote(cx, cg, SRC_FOR);
4528
if (noteIndex < 0 ||
4529
js_Emit1(cx, cg, op) < 0) {
4533
top = CG_OFFSET(cg);
4534
SET_STATEMENT_TOP(&stmtInfo, top);
4535
if (!pn2->pn_kid2) {
4536
/* No loop condition: flag this fact in the source notes. */
4537
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, 0))
4540
if (!js_EmitTree(cx, cg, pn2->pn_kid2))
4542
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
4543
CG_OFFSET(cg) - top)) {
4546
beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
4551
/* Set pn3 (used below) here to avoid spurious gcc warnings. */
4555
/* Emit code for the loop body. */
4556
if (!js_EmitTree(cx, cg, pn->pn_right))
4559
if (pn2->pn_type != TOK_IN) {
4560
/* Set the second note offset so we can find the update part. */
4561
JS_ASSERT(noteIndex != -1);
4562
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
4563
CG_OFFSET(cg) - top)) {
4568
/* Set loop and enclosing "update" offsets, for continue. */
4571
stmt->update = CG_OFFSET(cg);
4572
} while ((stmt = stmt->down) != NULL &&
4573
stmt->type == STMT_LABEL);
4576
#if JS_HAS_DESTRUCTURING
4577
if (pn3->pn_type == TOK_ASSIGN &&
4578
!MaybeEmitGroupAssignment(cx, cg, op, pn3, &op)) {
4582
if (op == JSOP_POP) {
4583
if (!js_EmitTree(cx, cg, pn3))
4585
if (js_Emit1(cx, cg, op) < 0)
4589
/* Restore the absolute line number for source note readers. */
4590
off = (ptrdiff_t) pn->pn_pos.end.lineno;
4591
if (CG_CURRENT_LINE(cg) != (uintN) off) {
4592
if (js_NewSrcNote2(cx, cg, SRC_SETLINE, off) < 0)
4594
CG_CURRENT_LINE(cg) = (uintN) off;
4598
/* The third note offset helps us find the loop-closing jump. */
4599
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 2,
4600
CG_OFFSET(cg) - top)) {
4605
/* Emit the loop-closing jump and fixup all jump offsets. */
4606
jmp = EmitJump(cx, cg, JSOP_GOTO, top - CG_OFFSET(cg));
4610
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
4611
if (pn2->pn_type == TOK_IN) {
4612
/* Set the SRC_WHILE note offset so we can find the closing jump. */
4613
JS_ASSERT(noteIndex != -1);
4614
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, jmp - beq))
4618
/* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
4619
if (!js_PopStatementCG(cx, cg))
4622
if (pn2->pn_type == TOK_IN) {
4623
if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
4629
stmt = cg->treeContext.topStmt;
4632
ale = js_IndexAtom(cx, atom, &cg->atomList);
4635
while (stmt->type != STMT_LABEL || stmt->atom != atom)
4637
noteType = SRC_BREAK2LABEL;
4640
while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
4642
noteType = SRC_NULL;
4645
if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
4650
stmt = cg->treeContext.topStmt;
4653
/* Find the loop statement enclosed by the matching label. */
4654
JSStmtInfo *loop = NULL;
4655
ale = js_IndexAtom(cx, atom, &cg->atomList);
4658
while (stmt->type != STMT_LABEL || stmt->atom != atom) {
4659
if (STMT_IS_LOOP(stmt))
4664
noteType = SRC_CONT2LABEL;
4667
while (!STMT_IS_LOOP(stmt))
4669
noteType = SRC_CONTINUE;
4672
if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
4677
if (!js_EmitTree(cx, cg, pn->pn_left))
4679
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_WITH, CG_OFFSET(cg));
4680
if (js_Emit1(cx, cg, JSOP_ENTERWITH) < 0)
4682
if (!js_EmitTree(cx, cg, pn->pn_right))
4684
if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
4686
ok = js_PopStatementCG(cx, cg);
4691
ptrdiff_t start, end, catchJump, catchStart, finallyCatch;
4693
JSParseNode *lastCatch;
4695
catchJump = catchStart = finallyCatch = -1;
4698
* Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4701
* When a finally block is 'active' (STMT_FINALLY on the treeContext),
4702
* non-local jumps (including jumps-over-catches) result in a GOSUB
4703
* being written into the bytecode stream and fixed-up later (c.f.
4704
* EmitBackPatchOp and BackPatch).
4706
js_PushStatement(&cg->treeContext, &stmtInfo,
4707
pn->pn_kid3 ? STMT_FINALLY : STMT_TRY,
4711
* About JSOP_SETSP: an exception can be thrown while the stack is in
4712
* an unbalanced state, and this imbalance causes problems with things
4713
* like function invocation later on.
4715
* To fix this, we compute the 'balanced' stack depth upon try entry,
4716
* and then restore the stack to this depth when we hit the first catch
4717
* or finally block. We can't just zero the stack, because things like
4718
* for/in and with that are active upon entry to the block keep state
4719
* variables on the stack.
4721
depth = cg->stackDepth;
4723
/* Mark try location for decompilation, then emit try block. */
4724
if (js_Emit1(cx, cg, JSOP_TRY) < 0)
4726
start = CG_OFFSET(cg);
4727
if (!js_EmitTree(cx, cg, pn->pn_kid1))
4730
/* GOSUB to finally, if present. */
4732
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4734
jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(stmtInfo));
4738
/* JSOP_RETSUB pops the return pc-index, balancing the stack. */
4739
cg->stackDepth = depth;
4742
/* Emit (hidden) jump over catch and/or finally. */
4743
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4745
jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
4749
end = CG_OFFSET(cg);
4751
/* If this try has a catch block, emit it. */
4755
jsint count = 0; /* previous catch block's population */
4760
* The emitted code for a catch block looks like:
4762
* [throwing] only if 2nd+ catch block
4763
* [leaveblock] only if 2nd+ catch block
4764
* enterblock with SRC_CATCH
4766
* [dup] only if catchguard
4767
* setlocalpop <slot> or destructuring code
4768
* [< catchguard code >] if there's a catchguard
4769
* [ifeq <offset to next catch block>] " "
4770
* [pop] only if catchguard
4771
* < catch block contents >
4773
* goto <end of catch blocks> non-local; finally applies
4775
* If there's no catch block without a catchguard, the last
4776
* <offset to next catch block> points to rethrow code. This
4777
* code will [gosub] to the finally code if appropriate, and is
4778
* also used for the catch-all trynote for capturing exceptions
4779
* thrown from catch{} blocks.
4781
for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
4782
ptrdiff_t guardJump, catchNote;
4784
guardJump = GUARDJUMP(stmtInfo);
4785
if (guardJump == -1) {
4786
/* Set stack to original depth (see SETSP comment above). */
4787
EMIT_UINT16_IMM_OP(JSOP_SETSP, (jsatomid)depth);
4788
cg->stackDepth = depth;
4790
/* Fix up and clean up previous catch block. */
4791
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, guardJump);
4794
* Account for the pushed exception object that we still
4795
* have after the jumping from the previous guard.
4797
JS_ASSERT(cg->stackDepth == depth);
4798
cg->stackDepth = depth + 1;
4801
* Move exception back to cx->exception to prepare for
4802
* the next catch. We hide [throwing] from the decompiler
4803
* since it compensates for the hidden JSOP_DUP at the
4804
* start of the previous guarded catch.
4806
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
4807
js_Emit1(cx, cg, JSOP_THROWING) < 0) {
4812
* Emit an unbalanced [leaveblock] for the previous catch,
4813
* whose block object count is saved below.
4815
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4817
JS_ASSERT(count >= 0);
4818
EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, count);
4822
* Annotate the JSOP_ENTERBLOCK that's about to be generated
4823
* by the call to js_EmitTree immediately below. Save this
4824
* source note's index in stmtInfo for use by the TOK_CATCH:
4825
* case, where the length of the catch guard is set as the
4828
catchNote = js_NewSrcNote2(cx, cg, SRC_CATCH, 0);
4831
CATCHNOTE(stmtInfo) = catchNote;
4834
* Emit the lexical scope and catch body. Save the catch's
4835
* block object population via count, for use when targeting
4836
* guardJump at the next catch (the guard mismatch case).
4838
JS_ASSERT(pn3->pn_type == TOK_LEXICALSCOPE);
4839
count = OBJ_BLOCK_COUNT(cx, ATOM_TO_OBJECT(pn3->pn_atom));
4840
if (!js_EmitTree(cx, cg, pn3))
4843
/* gosub <finally>, if required */
4845
jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
4849
JS_ASSERT(cg->stackDepth == depth);
4853
* Jump over the remaining catch blocks. This will get fixed
4854
* up to jump to after catch/finally.
4856
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
4858
jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &catchJump);
4863
* Save a pointer to the last catch node to handle try-finally
4864
* and try-catch(guard)-finally special cases.
4866
lastCatch = pn3->pn_expr;
4871
* Last catch guard jumps to the rethrow code sequence if none of the
4872
* guards match. Target guardJump at the beginning of the rethrow
4873
* sequence, just in case a guard expression throws and leaves the
4876
if (lastCatch && lastCatch->pn_kid2) {
4877
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, GUARDJUMP(stmtInfo));
4879
/* Sync the stack to take into account pushed exception. */
4880
JS_ASSERT(cg->stackDepth == depth);
4881
cg->stackDepth = depth + 1;
4884
* Rethrow the exception, delegating executing of finally if any
4885
* to the exception handler.
4887
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
4888
js_Emit1(cx, cg, JSOP_THROW) < 0) {
4893
JS_ASSERT(cg->stackDepth == depth);
4895
/* Emit finally handler if any. */
4898
* We emit [setsp][gosub] to call try-finally when an exception is
4899
* thrown from try or try-catch blocks. The [gosub] and [retsub]
4900
* opcodes will take care of stacking and rethrowing any exception
4901
* pending across the finally.
4903
finallyCatch = CG_OFFSET(cg);
4904
EMIT_UINT16_IMM_OP(JSOP_SETSP, (jsatomid)depth);
4906
jmp = EmitBackPatchOp(cx, cg, JSOP_BACKPATCH,
4911
JS_ASSERT(cg->stackDepth == depth);
4912
JS_ASSERT((uintN)depth <= cg->maxStackDepth);
4915
* Fix up the gosubs that might have been emitted before non-local
4916
* jumps to the finally code.
4918
if (!BackPatch(cx, cg, GOSUBS(stmtInfo), CG_NEXT(cg), JSOP_GOSUB))
4922
* The stack budget must be balanced at this point. All [gosub]
4923
* calls emitted before this point will push two stack slots, one
4924
* for the pending exception (or JSVAL_HOLE if there is no pending
4925
* exception) and one for the [retsub] pc-index.
4927
JS_ASSERT(cg->stackDepth == depth);
4928
cg->stackDepth += 2;
4929
if ((uintN)cg->stackDepth > cg->maxStackDepth)
4930
cg->maxStackDepth = cg->stackDepth;
4932
/* Now indicate that we're emitting a subroutine body. */
4933
stmtInfo.type = STMT_SUBROUTINE;
4934
if (!UpdateLineNumberNotes(cx, cg, pn->pn_kid3))
4936
if (js_Emit1(cx, cg, JSOP_FINALLY) < 0 ||
4937
!js_EmitTree(cx, cg, pn->pn_kid3) ||
4938
js_Emit1(cx, cg, JSOP_RETSUB) < 0) {
4942
/* Restore stack depth budget to its balanced state. */
4943
JS_ASSERT(cg->stackDepth == depth + 2);
4944
cg->stackDepth = depth;
4946
if (!js_PopStatementCG(cx, cg))
4949
if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
4950
js_Emit1(cx, cg, JSOP_NOP) < 0) {
4954
/* Fix up the end-of-try/catch jumps to come here. */
4955
if (!BackPatch(cx, cg, catchJump, CG_NEXT(cg), JSOP_GOTO))
4959
* Add the try note last, to let post-order give us the right ordering
4960
* (first to last for a given nesting level, inner to outer by level).
4963
JS_ASSERT(end != -1 && catchStart != -1);
4964
if (!js_NewTryNote(cx, cg, start, end, catchStart))
4969
* If we've got a finally, mark try+catch region with additional
4970
* trynote to catch exceptions (re)thrown from a catch block or
4971
* for the try{}finally{} case.
4974
JS_ASSERT(finallyCatch != -1);
4975
if (!js_NewTryNote(cx, cg, start, finallyCatch, finallyCatch))
4983
ptrdiff_t catchStart, guardJump;
4986
* Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4987
* and save the block object atom.
4989
stmt = cg->treeContext.topStmt;
4990
JS_ASSERT(stmt->type == STMT_BLOCK && (stmt->flags & SIF_SCOPE));
4991
stmt->type = STMT_CATCH;
4992
catchStart = stmt->update;
4995
/* Go up one statement info record to the TRY or FINALLY record. */
4997
JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY);
4999
/* Pick up the pending exception and bind it to the catch variable. */
5000
if (js_Emit1(cx, cg, JSOP_EXCEPTION) < 0)
5004
* Dup the exception object if there is a guard for rethrowing to use
5005
* it later when rethrowing or in other catches.
5008
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5009
js_Emit1(cx, cg, JSOP_DUP) < 0) {
5015
switch (pn2->pn_type) {
5016
#if JS_HAS_DESTRUCTURING
5019
if (!EmitDestructuringOps(cx, cg, JSOP_NOP, pn2))
5021
if (js_Emit1(cx, cg, JSOP_POP) < 0)
5027
/* Inline BindNameToSlot, adding block depth to pn2->pn_slot. */
5028
pn2->pn_slot += OBJ_BLOCK_DEPTH(cx, ATOM_TO_OBJECT(atom));
5029
EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP, pn2->pn_slot);
5036
/* Emit the guard expression, if there is one. */
5038
if (!js_EmitTree(cx, cg, pn->pn_kid2))
5040
if (!js_SetSrcNoteOffset(cx, cg, CATCHNOTE(*stmt), 0,
5041
CG_OFFSET(cg) - catchStart)) {
5044
/* ifeq <next block> */
5045
guardJump = EmitJump(cx, cg, JSOP_IFEQ, 0);
5048
GUARDJUMP(*stmt) = guardJump;
5050
/* Pop duplicated exception object as we no longer need it. */
5051
if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0 ||
5052
js_Emit1(cx, cg, JSOP_POP) < 0) {
5057
/* Emit the catch body. */
5058
if (!js_EmitTree(cx, cg, pn->pn_kid3))
5062
* Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
5063
* our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
5065
off = cg->stackDepth;
5066
if (js_NewSrcNote2(cx, cg, SRC_CATCH, off) < 0)
5072
if (!EmitVariables(cx, cg, pn, JS_FALSE, ¬eIndex))
5077
/* Push a return value */
5080
if (!js_EmitTree(cx, cg, pn2))
5083
if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5088
* EmitNonLocalJumpFixup mutates op to JSOP_RETRVAL after emitting a
5089
* JSOP_SETRVAL if there are open try blocks having finally clauses.
5090
* We can't simply transfer control flow to our caller in that case,
5091
* because we must gosub to those clauses from inner to outer, with
5092
* the correct stack pointer (i.e., after popping any with, for/in,
5093
* etc., slots nested inside the finally's try).
5096
if (!EmitNonLocalJumpFixup(cx, cg, NULL, &op))
5098
if (js_Emit1(cx, cg, op) < 0)
5102
#if JS_HAS_GENERATORS
5105
if (!js_EmitTree(cx, cg, pn->pn_kid))
5108
if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
5111
if (js_Emit1(cx, cg, JSOP_YIELD) < 0)
5117
#if JS_HAS_XML_SUPPORT
5118
if (pn->pn_arity == PN_UNARY) {
5119
if (!js_EmitTree(cx, cg, pn->pn_kid))
5121
if (js_Emit1(cx, cg, pn->pn_op) < 0)
5127
JS_ASSERT(pn->pn_arity == PN_LIST);
5130
tmp = CG_OFFSET(cg);
5131
if (pn->pn_extra & PNX_NEEDBRACES) {
5132
noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5133
if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_NOP) < 0)
5137
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_BLOCK, top);
5138
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5139
if (!js_EmitTree(cx, cg, pn2))
5143
if (noteIndex >= 0 &&
5144
!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5145
CG_OFFSET(cg) - tmp)) {
5149
ok = js_PopStatementCG(cx, cg);
5153
JS_ASSERT(pn->pn_arity == PN_LIST);
5154
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_BODY, top);
5155
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
5156
if (!js_EmitTree(cx, cg, pn2))
5159
ok = js_PopStatementCG(cx, cg);
5166
* Top-level or called-from-a-native JS_Execute/EvaluateScript,
5167
* debugger, and eval frames may need the value of the ultimate
5168
* expression statement as the script's result, despite the fact
5169
* that it appears useless to the compiler.
5171
useful = wantval = !cx->fp->fun ||
5172
!FUN_INTERPRETED(cx->fp->fun) ||
5173
(cx->fp->flags & JSFRAME_SPECIAL);
5175
if (!CheckSideEffects(cx, &cg->treeContext, pn2, &useful))
5180
* Don't eliminate apparently useless expressions if they are
5181
* labeled expression statements. The tc->topStmt->update test
5182
* catches the case where we are nesting in js_EmitTree for a
5183
* labeled compound statement.
5186
(!cg->treeContext.topStmt ||
5187
cg->treeContext.topStmt->type != STMT_LABEL ||
5188
cg->treeContext.topStmt->update < CG_OFFSET(cg))) {
5189
CG_CURRENT_LINE(cg) = pn2->pn_pos.begin.lineno;
5190
if (!js_ReportCompileErrorNumber(cx, cg,
5194
JSMSG_USELESS_EXPR)) {
5198
op = wantval ? JSOP_POPV : JSOP_POP;
5199
#if JS_HAS_DESTRUCTURING
5201
pn2->pn_type == TOK_ASSIGN &&
5202
!MaybeEmitGroupAssignment(cx, cg, op, pn2, &op)) {
5206
if (op != JSOP_NOP) {
5207
if (!js_EmitTree(cx, cg, pn2))
5209
if (js_Emit1(cx, cg, op) < 0)
5217
/* Emit an annotated nop so we know to decompile a label. */
5219
ale = js_IndexAtom(cx, atom, &cg->atomList);
5223
noteType = (pn2->pn_type == TOK_LC ||
5224
(pn2->pn_type == TOK_LEXICALSCOPE &&
5225
pn2->pn_expr->pn_type == TOK_LC))
5228
noteIndex = js_NewSrcNote2(cx, cg, noteType,
5229
(ptrdiff_t) ALE_INDEX(ale));
5230
if (noteIndex < 0 ||
5231
js_Emit1(cx, cg, JSOP_NOP) < 0) {
5235
/* Emit code for the labeled statement. */
5236
js_PushStatement(&cg->treeContext, &stmtInfo, STMT_LABEL,
5238
stmtInfo.atom = atom;
5239
if (!js_EmitTree(cx, cg, pn2))
5241
if (!js_PopStatementCG(cx, cg))
5244
/* If the statement was compound, emit a note for the end brace. */
5245
if (noteType == SRC_LABELBRACE) {
5246
if (js_NewSrcNote(cx, cg, SRC_ENDBRACE) < 0 ||
5247
js_Emit1(cx, cg, JSOP_NOP) < 0) {
5255
* Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5256
* These notes help the decompiler bracket the bytecodes generated
5257
* from each sub-expression that follows a comma.
5259
off = noteIndex = -1;
5260
for (pn2 = pn->pn_head; ; pn2 = pn2->pn_next) {
5261
if (!js_EmitTree(cx, cg, pn2))
5263
tmp = CG_OFFSET(cg);
5264
if (noteIndex >= 0) {
5265
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5271
noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5272
if (noteIndex < 0 ||
5273
js_Emit1(cx, cg, JSOP_POP) < 0) {
5281
* Check left operand type and generate specialized code for it.
5282
* Specialize to avoid ECMA "reference type" values on the operand
5283
* stack, which impose pervasive runtime "GetValue" costs.
5286
JS_ASSERT(pn2->pn_type != TOK_RP);
5287
atomIndex = (jsatomid) -1;
5288
switch (pn2->pn_type) {
5290
if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
5292
if (pn2->pn_slot >= 0) {
5293
atomIndex = (jsatomid) pn2->pn_slot;
5295
ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
5298
atomIndex = ALE_INDEX(ale);
5299
EMIT_ATOM_INDEX_OP(JSOP_BINDNAME, atomIndex);
5303
if (!js_EmitTree(cx, cg, pn2->pn_expr))
5305
ale = js_IndexAtom(cx, pn2->pn_atom, &cg->atomList);
5308
atomIndex = ALE_INDEX(ale);
5311
JS_ASSERT(pn2->pn_arity == PN_BINARY);
5312
if (!js_EmitTree(cx, cg, pn2->pn_left))
5314
if (!js_EmitTree(cx, cg, pn2->pn_right))
5317
#if JS_HAS_DESTRUCTURING
5322
#if JS_HAS_LVALUE_RETURN
5324
if (!js_EmitTree(cx, cg, pn2))
5328
#if JS_HAS_XML_SUPPORT
5330
JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5331
if (!js_EmitTree(cx, cg, pn2->pn_kid))
5333
if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5342
#if JS_HAS_GETTER_SETTER
5343
if (op == JSOP_GETTER || op == JSOP_SETTER) {
5344
/* We'll emit these prefix bytecodes after emitting the r.h.s. */
5345
if (atomIndex != (jsatomid) -1 && atomIndex >= JS_BIT(16)) {
5346
ReportStatementTooLarge(cx, cg);
5351
/* If += or similar, dup the left operand and get its value. */
5352
if (op != JSOP_NOP) {
5353
switch (pn2->pn_type) {
5355
if (pn2->pn_op != JSOP_SETNAME) {
5356
EMIT_UINT16_IMM_OP((pn2->pn_op == JSOP_SETGVAR)
5358
: (pn2->pn_op == JSOP_SETARG)
5360
: (pn2->pn_op == JSOP_SETLOCAL)
5368
if (js_Emit1(cx, cg, JSOP_DUP) < 0)
5370
EMIT_ATOM_INDEX_OP((pn2->pn_type == TOK_NAME)
5376
#if JS_HAS_LVALUE_RETURN
5379
#if JS_HAS_XML_SUPPORT
5382
if (js_Emit1(cx, cg, JSOP_DUP2) < 0)
5384
if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
5391
/* Now emit the right operand (it may affect the namespace). */
5392
if (!js_EmitTree(cx, cg, pn->pn_right))
5395
/* If += etc., emit the binary operator with a decompiler note. */
5396
if (op != JSOP_NOP) {
5398
* Take care to avoid SRC_ASSIGNOP if the left-hand side is a
5399
* const declared in a function (i.e., with non-negative pn_slot
5400
* and JSPROP_READONLY in pn_attrs), as in this case (just a bit
5401
* further below) we will avoid emitting the assignment op.
5403
if (pn2->pn_type != TOK_NAME ||
5405
!(pn2->pn_attrs & JSPROP_READONLY)) {
5406
if (js_NewSrcNote(cx, cg, SRC_ASSIGNOP) < 0)
5409
if (js_Emit1(cx, cg, op) < 0)
5413
/* Left parts such as a.b.c and a[b].c need a decompiler note. */
5414
if (pn2->pn_type != TOK_NAME &&
5415
#if JS_HAS_DESTRUCTURING
5416
pn2->pn_type != TOK_RB &&
5417
pn2->pn_type != TOK_RC &&
5419
js_NewSrcNote2(cx, cg, SrcNoteForPropOp(pn2, pn2->pn_op),
5420
CG_OFFSET(cg) - top) < 0) {
5424
/* Finally, emit the specialized assignment bytecode. */
5425
switch (pn2->pn_type) {
5427
if (pn2->pn_slot < 0 || !(pn2->pn_attrs & JSPROP_READONLY)) {
5428
if (pn2->pn_slot >= 0) {
5429
EMIT_UINT16_IMM_OP(pn2->pn_op, atomIndex);
5432
EMIT_ATOM_INDEX_OP(pn2->pn_op, atomIndex);
5437
#if JS_HAS_LVALUE_RETURN
5440
if (js_Emit1(cx, cg, JSOP_SETELEM) < 0)
5443
#if JS_HAS_DESTRUCTURING
5446
if (!EmitDestructuringOps(cx, cg, JSOP_SETNAME, pn2))
5450
#if JS_HAS_XML_SUPPORT
5452
if (js_Emit1(cx, cg, JSOP_SETXMLNAME) < 0)
5462
/* Emit the condition, then branch if false to the else part. */
5463
if (!js_EmitTree(cx, cg, pn->pn_kid1))
5465
noteIndex = js_NewSrcNote(cx, cg, SRC_COND);
5468
beq = EmitJump(cx, cg, JSOP_IFEQ, 0);
5469
if (beq < 0 || !js_EmitTree(cx, cg, pn->pn_kid2))
5472
/* Jump around else, fixup the branch, emit else, fixup jump. */
5473
jmp = EmitJump(cx, cg, JSOP_GOTO, 0);
5476
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, beq);
5479
* Because each branch pushes a single value, but our stack budgeting
5480
* analysis ignores branches, we now have to adjust cg->stackDepth to
5481
* ignore the value pushed by the first branch. Execution will follow
5482
* only one path, so we must decrement cg->stackDepth.
5484
* Failing to do this will foil code, such as the try/catch/finally
5485
* exception handling code generator, that samples cg->stackDepth for
5486
* use at runtime (JSOP_SETSP), or in let expression and block code
5487
* generation, which must use the stack depth to compute local stack
5488
* indexes correctly.
5490
JS_ASSERT(cg->stackDepth > 0);
5492
if (!js_EmitTree(cx, cg, pn->pn_kid3))
5494
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5495
if (!js_SetSrcNoteOffset(cx, cg, noteIndex, 0, jmp - beq))
5502
* JSOP_OR converts the operand on the stack to boolean, and if true,
5503
* leaves the original operand value on the stack and jumps; otherwise
5504
* it pops and falls into the next bytecode, which evaluates the right
5505
* operand. The jump goes around the right operand evaluation.
5507
* JSOP_AND converts the operand on the stack to boolean, and if false,
5508
* leaves the original operand value on the stack and jumps; otherwise
5509
* it pops and falls into the right operand's bytecode.
5511
* Avoid tail recursion for long ||...|| expressions and long &&...&&
5512
* expressions or long mixtures of ||'s and &&'s that can easily blow
5513
* the stack, by forward-linking and then backpatching all the JSOP_OR
5514
* and JSOP_AND bytecodes' immediate jump-offset operands.
5517
if (!js_EmitTree(cx, cg, pn->pn_left))
5519
top = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5524
while (pn2->pn_type == TOK_OR || pn2->pn_type == TOK_AND) {
5526
if (!js_EmitTree(cx, cg, pn->pn_left))
5528
off = EmitJump(cx, cg, JSOP_BACKPATCH_POP, 0);
5531
if (!SetBackPatchDelta(cx, cg, CG_CODE(cg, jmp), off - jmp))
5536
if (!js_EmitTree(cx, cg, pn2))
5538
off = CG_OFFSET(cg);
5540
pc = CG_CODE(cg, top);
5541
tmp = GetJumpOffset(cg, pc);
5542
CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, off - top);
5545
} while ((pn3 = pn3->pn_right) != pn2);
5554
case TOK_INSTANCEOF:
5560
if (pn->pn_arity == PN_LIST) {
5561
/* Left-associative operator chain: avoid too much recursion. */
5563
if (!js_EmitTree(cx, cg, pn2))
5566
while ((pn2 = pn2->pn_next) != NULL) {
5567
if (!js_EmitTree(cx, cg, pn2))
5569
if (js_Emit1(cx, cg, op) < 0)
5573
#if JS_HAS_XML_SUPPORT
5577
if (pn->pn_arity == PN_NAME) {
5578
if (!js_EmitTree(cx, cg, pn->pn_expr))
5580
if (!EmitAtomOp(cx, pn, pn->pn_op, cg))
5586
* Binary :: has a right operand that brackets arbitrary code,
5587
* possibly including a let (a = b) ... expression. We must clear
5588
* TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
5590
oldflags = cg->treeContext.flags;
5591
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
5594
/* Binary operators that evaluate both operands unconditionally. */
5595
if (!js_EmitTree(cx, cg, pn->pn_left))
5597
if (!js_EmitTree(cx, cg, pn->pn_right))
5599
#if JS_HAS_XML_SUPPORT
5600
cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
5602
if (js_Emit1(cx, cg, pn->pn_op) < 0)
5608
#if JS_HAS_XML_SUPPORT
5611
JS_ASSERT(pn->pn_arity == PN_UNARY);
5618
/* Unary op, including unary +/-. */
5621
if (op == JSOP_TYPEOF) {
5622
for (pn3 = pn2; pn3->pn_type == TOK_RP; pn3 = pn3->pn_kid)
5624
if (pn3->pn_type != TOK_NAME)
5625
op = JSOP_TYPEOFEXPR;
5627
oldflags = cg->treeContext.flags;
5628
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
5629
if (!js_EmitTree(cx, cg, pn2))
5631
cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
5632
#if JS_HAS_XML_SUPPORT
5633
if (op == JSOP_XMLNAME &&
5634
js_NewSrcNote2(cx, cg, SRC_PCBASE,
5635
CG_OFFSET(cg) - pn2->pn_offset) < 0) {
5639
if (js_Emit1(cx, cg, op) < 0)
5649
/* Emit lvalue-specialized code for ++/-- operators. */
5651
JS_ASSERT(pn2->pn_type != TOK_RP);
5653
depth = cg->stackDepth;
5654
switch (pn2->pn_type) {
5657
if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
5660
if (pn2->pn_slot >= 0) {
5661
if (pn2->pn_attrs & JSPROP_READONLY) {
5662
/* Incrementing a declared const: just get its value. */
5663
op = ((js_CodeSpec[op].format & JOF_TYPEMASK) == JOF_CONST)
5667
atomIndex = (jsatomid) pn2->pn_slot;
5668
EMIT_UINT16_IMM_OP(op, atomIndex);
5670
if (!EmitAtomOp(cx, pn2, op, cg))
5675
if (!EmitPropOp(cx, pn2, op, cg))
5680
if (!EmitElemOp(cx, pn2, op, cg))
5684
#if JS_HAS_LVALUE_RETURN
5686
if (!js_EmitTree(cx, cg, pn2))
5688
depth = cg->stackDepth;
5689
if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
5690
CG_OFFSET(cg) - pn2->pn_offset) < 0) {
5693
if (js_Emit1(cx, cg, op) < 0)
5697
#if JS_HAS_XML_SUPPORT
5699
JS_ASSERT(pn2->pn_op == JSOP_SETXMLNAME);
5700
if (!js_EmitTree(cx, cg, pn2->pn_kid))
5702
if (js_Emit1(cx, cg, JSOP_BINDXMLNAME) < 0)
5704
depth = cg->stackDepth;
5705
if (js_Emit1(cx, cg, op) < 0)
5714
* Allocate another stack slot for GC protection in case the initial
5715
* value being post-incremented or -decremented is not a number, but
5716
* converts to a jsdouble. In the TOK_NAME cases, op has 0 operand
5717
* uses and 1 definition, so we don't need an extra stack slot -- we
5718
* can use the one allocated for the def.
5720
if (pn2->pn_type != TOK_NAME &&
5721
(js_CodeSpec[op].format & JOF_POST) &&
5722
(uintN)depth == cg->maxStackDepth) {
5723
++cg->maxStackDepth;
5730
* Under ECMA 3, deleting a non-reference returns true -- but alas we
5731
* must evaluate the operand if it appears it might have side effects.
5734
switch (pn2->pn_type) {
5736
pn2->pn_op = JSOP_DELNAME;
5737
if (!BindNameToSlot(cx, &cg->treeContext, pn2, JS_FALSE))
5740
if (op == JSOP_FALSE) {
5741
if (js_Emit1(cx, cg, op) < 0)
5744
if (!EmitAtomOp(cx, pn2, op, cg))
5749
if (!EmitPropOp(cx, pn2, JSOP_DELPROP, cg))
5752
#if JS_HAS_XML_SUPPORT
5754
if (!EmitElemOp(cx, pn2, JSOP_DELDESC, cg))
5758
#if JS_HAS_LVALUE_RETURN
5760
if (pn2->pn_op != JSOP_SETCALL) {
5761
JS_ASSERT(pn2->pn_op == JSOP_CALL || pn2->pn_op == JSOP_EVAL);
5762
pn2->pn_op = JSOP_SETCALL;
5764
top = CG_OFFSET(cg);
5765
if (!js_EmitTree(cx, cg, pn2))
5767
if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
5769
if (js_Emit1(cx, cg, JSOP_DELELEM) < 0)
5774
if (!EmitElemOp(cx, pn2, JSOP_DELELEM, cg))
5779
* If useless, just emit JSOP_TRUE; otherwise convert delete foo()
5780
* to foo(), true (a comma expression, requiring SRC_PCDELTA).
5783
if (!CheckSideEffects(cx, &cg->treeContext, pn2, &useful))
5786
off = noteIndex = -1;
5788
if (!js_EmitTree(cx, cg, pn2))
5790
off = CG_OFFSET(cg);
5791
noteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
5792
if (noteIndex < 0 || js_Emit1(cx, cg, JSOP_POP) < 0)
5795
if (js_Emit1(cx, cg, JSOP_TRUE) < 0)
5797
if (noteIndex >= 0) {
5798
tmp = CG_OFFSET(cg);
5799
if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0, tmp-off))
5805
#if JS_HAS_XML_SUPPORT
5807
if (!js_EmitTree(cx, cg, pn->pn_left))
5809
jmp = js_Emit3(cx, cg, JSOP_FILTER, 0, 0);
5812
if (!js_EmitTree(cx, cg, pn->pn_right))
5814
if (js_Emit1(cx, cg, JSOP_ENDFILTER) < 0)
5816
CHECK_AND_SET_JUMP_OFFSET_AT(cx, cg, jmp);
5822
* Pop a stack operand, convert it to object, get a property named by
5823
* this bytecode's immediate-indexed atom operand, and push its value
5824
* (not a reference to it). This bytecode sets the virtual machine's
5825
* "obj" register to the left operand's ToObject conversion result,
5826
* for use by JSOP_PUSHOBJ.
5828
ok = EmitPropOp(cx, pn, pn->pn_op, cg);
5832
#if JS_HAS_XML_SUPPORT
5836
* Pop two operands, convert the left one to object and the right one
5837
* to property name (atom or tagged int), get the named property, and
5838
* push its value. Set the "obj" register to the result of ToObject
5839
* on the left operand.
5841
ok = EmitElemOp(cx, pn, pn->pn_op, cg);
5850
* Emit function call or operator new (constructor call) code.
5851
* First, emit code for the left operand to evaluate the callable or
5852
* constructable object expression.
5854
* For E4X, if this expression is a dotted member reference, select
5855
* JSOP_GETMETHOD instead of JSOP_GETPROP. ECMA-357 separates XML
5856
* method lookup from the normal property id lookup done for native
5860
#if JS_HAS_XML_SUPPORT
5861
if (pn2->pn_type == TOK_DOT && pn2->pn_op != JSOP_GETMETHOD) {
5862
JS_ASSERT(pn2->pn_op == JSOP_GETPROP);
5863
pn2->pn_op = JSOP_GETMETHOD;
5864
pn2->pn_attrs |= JSPROP_IMPLICIT_FUNCTION_NAMESPACE;
5867
if (!js_EmitTree(cx, cg, pn2))
5871
* Push the virtual machine's "obj" register, which was set by a
5872
* name, property, or element get (or set) bytecode.
5874
if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
5877
/* Remember start of callable-object bytecode for decompilation hint. */
5881
* Emit code for each argument in order, then emit the JSOP_*CALL or
5882
* JSOP_NEW bytecode with a two-byte immediate telling how many args
5883
* were pushed on the operand stack.
5885
oldflags = cg->treeContext.flags;
5886
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
5887
for (pn2 = pn2->pn_next; pn2; pn2 = pn2->pn_next) {
5888
if (!js_EmitTree(cx, cg, pn2))
5891
cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
5892
if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
5895
argc = pn->pn_count - 1;
5896
if (js_Emit3(cx, cg, pn->pn_op, ARGC_HI(argc), ARGC_LO(argc)) < 0)
5901
case TOK_LEXICALSCOPE:
5907
obj = ATOM_TO_OBJECT(atom);
5908
js_PushBlockScope(&cg->treeContext, &stmtInfo, atom, CG_OFFSET(cg));
5910
OBJ_SET_BLOCK_DEPTH(cx, obj, cg->stackDepth);
5911
count = OBJ_BLOCK_COUNT(cx, obj);
5912
cg->stackDepth += count;
5913
if ((uintN)cg->stackDepth > cg->maxStackDepth)
5914
cg->maxStackDepth = cg->stackDepth;
5917
* If this lexical scope is not for a catch block, let block or let
5918
* expression, or any kind of for loop (where the scope starts in the
5919
* head after the first part if for (;;), else in the body if for-in);
5920
* and if our container is top-level but not a function body, or else
5921
* a block statement; then emit a SRC_BRACE note. All other container
5922
* statements get braces by default from the decompiler.
5925
type = pn->pn_expr->pn_type;
5926
if (type != TOK_CATCH && type != TOK_LET && type != TOK_FOR &&
5927
(!(stmt = stmtInfo.down)
5928
? !(cg->treeContext.flags & TCF_IN_FUNCTION)
5929
: stmt->type == STMT_BLOCK)) {
5930
#if defined DEBUG_brendan || defined DEBUG_mrbkap
5931
/* There must be no source note already output for the next op. */
5932
JS_ASSERT(CG_NOTE_COUNT(cg) == 0 ||
5933
CG_LAST_NOTE_OFFSET(cg) != CG_OFFSET(cg) ||
5934
!GettableNoteForNextOp(cg));
5936
noteIndex = js_NewSrcNote2(cx, cg, SRC_BRACE, 0);
5941
ale = js_IndexAtom(cx, atom, &cg->atomList);
5944
JS_ASSERT(CG_OFFSET(cg) == top);
5945
EMIT_ATOM_INDEX_OP(JSOP_ENTERBLOCK, ALE_INDEX(ale));
5947
if (!js_EmitTree(cx, cg, pn->pn_expr))
5951
if (op == JSOP_LEAVEBLOCKEXPR) {
5952
if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
5955
if (noteIndex >= 0 &&
5956
!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5957
CG_OFFSET(cg) - top)) {
5962
/* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
5963
EMIT_UINT16_IMM_OP(op, count);
5964
cg->stackDepth -= count;
5966
ok = js_PopStatementCG(cx, cg);
5970
#if JS_HAS_BLOCK_SCOPE
5972
/* Let statements have their variable declarations on the left. */
5973
if (pn->pn_arity == PN_BINARY) {
5980
/* Non-null pn2 means that pn is the variable list from a let head. */
5981
JS_ASSERT(pn->pn_arity == PN_LIST);
5982
if (!EmitVariables(cx, cg, pn, pn2 != NULL, ¬eIndex))
5985
/* Thus non-null pn2 is the body of the let block or expression. */
5986
tmp = CG_OFFSET(cg);
5987
if (pn2 && !js_EmitTree(cx, cg, pn2))
5990
if (noteIndex >= 0 &&
5991
!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 0,
5992
CG_OFFSET(cg) - tmp)) {
5996
#endif /* JS_HAS_BLOCK_SCOPE */
5998
#if JS_HAS_GENERATORS
6001
* The array object's stack index is in cg->arrayCompSlot. See below
6002
* under the array initialiser code generator for array comprehension
6005
if (!js_EmitTree(cx, cg, pn->pn_kid))
6007
EMIT_UINT16_IMM_OP(pn->pn_op, cg->arrayCompSlot);
6012
#if JS_HAS_GENERATORS
6016
* Emit code for [a, b, c] of the form:
6017
* t = new Array; t[0] = a; t[1] = b; t[2] = c; t;
6018
* but use a stack slot for t and avoid dup'ing and popping it via
6019
* the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
6021
ale = js_IndexAtom(cx, CLASS_ATOM(cx, Array), &cg->atomList);
6024
EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
6025
if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
6027
if (js_Emit1(cx, cg, JSOP_NEWINIT) < 0)
6031
#if JS_HAS_SHARP_VARS
6032
if (pn2 && pn2->pn_type == TOK_DEFSHARP) {
6033
EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid)pn2->pn_num);
6038
#if JS_HAS_GENERATORS
6039
if (pn->pn_type == TOK_ARRAYCOMP) {
6043
* Pass the new array's stack index to the TOK_ARRAYPUSH case by
6044
* storing it in pn->pn_extra, then simply traverse the TOK_FOR
6045
* node and its kids under pn2 to generate this comprehension.
6047
JS_ASSERT(cg->stackDepth > 0);
6048
saveSlot = cg->arrayCompSlot;
6049
cg->arrayCompSlot = (uint32) (cg->stackDepth - 1);
6050
if (!js_EmitTree(cx, cg, pn2))
6052
cg->arrayCompSlot = saveSlot;
6054
/* Emit the usual op needed for decompilation. */
6055
if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6059
#endif /* JS_HAS_GENERATORS */
6061
for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) {
6062
if (!EmitNumberOp(cx, atomIndex, cg))
6065
/* FIXME 260106: holes in a sparse initializer are void-filled. */
6066
if (pn2->pn_type == TOK_COMMA) {
6067
if (js_Emit1(cx, cg, JSOP_PUSH) < 0)
6070
if (!js_EmitTree(cx, cg, pn2))
6074
if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6078
if (pn->pn_extra & PNX_ENDCOMMA) {
6079
/* Emit a source note so we know to decompile an extra comma. */
6080
if (js_NewSrcNote(cx, cg, SRC_CONTINUE) < 0)
6084
/* Emit an op for sharp array cleanup and decompilation. */
6085
if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6091
* Emit code for {p:a, '%q':b, 2:c} of the form:
6092
* t = new Object; t.p = a; t['%q'] = b; t[2] = c; t;
6093
* but use a stack slot for t and avoid dup'ing and popping it via
6094
* the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
6096
ale = js_IndexAtom(cx, CLASS_ATOM(cx, Object), &cg->atomList);
6099
EMIT_ATOM_INDEX_OP(JSOP_NAME, ALE_INDEX(ale));
6101
if (js_Emit1(cx, cg, JSOP_PUSHOBJ) < 0)
6103
if (js_Emit1(cx, cg, JSOP_NEWINIT) < 0)
6107
#if JS_HAS_SHARP_VARS
6108
if (pn2 && pn2->pn_type == TOK_DEFSHARP) {
6109
EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid)pn2->pn_num);
6114
for (; pn2; pn2 = pn2->pn_next) {
6115
/* Emit an index for t[2], else map an atom for t.p or t['%q']. */
6117
switch (pn3->pn_type) {
6119
if (!EmitNumberOp(cx, pn3->pn_dval, cg))
6124
ale = js_IndexAtom(cx, pn3->pn_atom, &cg->atomList);
6132
/* Emit code for the property initializer. */
6133
if (!js_EmitTree(cx, cg, pn2->pn_right))
6136
#if JS_HAS_GETTER_SETTER
6138
if (op == JSOP_GETTER || op == JSOP_SETTER) {
6139
if (pn3->pn_type != TOK_NUMBER &&
6140
ALE_INDEX(ale) >= JS_BIT(16)) {
6141
ReportStatementTooLarge(cx, cg);
6144
if (js_Emit1(cx, cg, op) < 0)
6148
/* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6149
if (pn3->pn_type == TOK_NUMBER) {
6150
if (js_NewSrcNote(cx, cg, SRC_INITPROP) < 0)
6152
if (js_Emit1(cx, cg, JSOP_INITELEM) < 0)
6155
EMIT_ATOM_INDEX_OP(JSOP_INITPROP, ALE_INDEX(ale));
6159
/* Emit an op for sharpArray cleanup and decompilation. */
6160
if (js_Emit1(cx, cg, JSOP_ENDINIT) < 0)
6164
#if JS_HAS_SHARP_VARS
6166
if (!js_EmitTree(cx, cg, pn->pn_kid))
6168
EMIT_UINT16_IMM_OP(JSOP_DEFSHARP, (jsatomid) pn->pn_num);
6172
EMIT_UINT16_IMM_OP(JSOP_USESHARP, (jsatomid) pn->pn_num);
6174
#endif /* JS_HAS_SHARP_VARS */
6181
* The node for (e) has e as its kid, enabling users who want to nest
6182
* assignment expressions in conditions to avoid the error correction
6183
* done by Condition (from x = y to x == y) by double-parenthesizing.
6185
oldflags = cg->treeContext.flags;
6186
cg->treeContext.flags &= ~TCF_IN_FOR_INIT;
6187
if (!js_EmitTree(cx, cg, pn->pn_kid))
6189
cg->treeContext.flags |= oldflags & TCF_IN_FOR_INIT;
6190
if (js_Emit1(cx, cg, JSOP_GROUP) < 0)
6196
if (!BindNameToSlot(cx, &cg->treeContext, pn, JS_FALSE))
6199
if (op == JSOP_ARGUMENTS) {
6200
if (js_Emit1(cx, cg, op) < 0)
6204
if (pn->pn_slot >= 0) {
6205
atomIndex = (jsatomid) pn->pn_slot;
6206
EMIT_UINT16_IMM_OP(op, atomIndex);
6211
#if JS_HAS_XML_SUPPORT
6216
case TOK_XMLCOMMENT:
6221
* The scanner and parser associate JSOP_NAME with TOK_NAME, although
6222
* other bytecodes may result instead (JSOP_BINDNAME/JSOP_SETNAME,
6223
* JSOP_FORNAME, etc.). Among JSOP_*NAME* variants, only JSOP_NAME
6224
* may generate the first operand of a call or new expression, so only
6225
* it sets the "obj" virtual machine register to the object along the
6226
* scope chain in which the name was found.
6228
* Token types for STRING and OBJECT have corresponding bytecode ops
6229
* in pn_op and emit the same format as NAME, so they share this code.
6231
ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6235
ok = EmitNumberOp(cx, pn->pn_dval, cg);
6238
#if JS_HAS_XML_SUPPORT
6242
if (js_Emit1(cx, cg, pn->pn_op) < 0)
6246
#if JS_HAS_DEBUGGER_KEYWORD
6248
if (js_Emit1(cx, cg, JSOP_DEBUGGER) < 0)
6251
#endif /* JS_HAS_DEBUGGER_KEYWORD */
6253
#if JS_HAS_XML_SUPPORT
6256
if (pn->pn_op == JSOP_XMLOBJECT) {
6257
ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6261
JS_ASSERT(pn->pn_type == TOK_XMLLIST || pn->pn_count != 0);
6262
switch (pn->pn_head ? pn->pn_head->pn_type : TOK_XMLLIST) {
6270
if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6274
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6275
if (pn2->pn_type == TOK_LC &&
6276
js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6279
if (!js_EmitTree(cx, cg, pn2))
6281
if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6285
if (pn->pn_extra & PNX_XMLROOT) {
6286
if (pn->pn_count == 0) {
6287
JS_ASSERT(pn->pn_type == TOK_XMLLIST);
6288
atom = cx->runtime->atomState.emptyAtom;
6289
ale = js_IndexAtom(cx, atom, &cg->atomList);
6292
EMIT_ATOM_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6294
if (js_Emit1(cx, cg, pn->pn_op) < 0)
6299
JS_ASSERT(pn->pn_count != 0);
6304
if (pn->pn_op == JSOP_XMLOBJECT) {
6305
ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6315
if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
6318
ale = js_IndexAtom(cx,
6319
(pn->pn_type == TOK_XMLETAGO)
6320
? cx->runtime->atomState.etagoAtom
6321
: cx->runtime->atomState.stagoAtom,
6325
EMIT_ATOM_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6327
JS_ASSERT(pn->pn_count != 0);
6329
if (pn2->pn_type == TOK_LC && js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0)
6331
if (!js_EmitTree(cx, cg, pn2))
6333
if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6336
for (pn2 = pn2->pn_next, i = 0; pn2; pn2 = pn2->pn_next, i++) {
6337
if (pn2->pn_type == TOK_LC &&
6338
js_Emit1(cx, cg, JSOP_STARTXMLEXPR) < 0) {
6341
if (!js_EmitTree(cx, cg, pn2))
6343
if ((i & 1) && pn2->pn_type == TOK_LC) {
6344
if (js_Emit1(cx, cg, JSOP_TOATTRVAL) < 0)
6347
if (js_Emit1(cx, cg,
6348
(i & 1) ? JSOP_ADDATTRVAL : JSOP_ADDATTRNAME) < 0) {
6353
ale = js_IndexAtom(cx,
6354
(pn->pn_type == TOK_XMLPTAGC)
6355
? cx->runtime->atomState.ptagcAtom
6356
: cx->runtime->atomState.tagcAtom,
6360
EMIT_ATOM_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
6361
if (js_Emit1(cx, cg, JSOP_ADD) < 0)
6364
if ((pn->pn_extra & PNX_XMLROOT) && js_Emit1(cx, cg, pn->pn_op) < 0)
6370
if (pn->pn_arity == PN_LIST) {
6371
JS_ASSERT(pn->pn_count != 0);
6372
for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
6373
if (!js_EmitTree(cx, cg, pn2))
6375
if (pn2 != pn->pn_head && js_Emit1(cx, cg, JSOP_ADD) < 0)
6379
JS_ASSERT(pn->pn_arity == PN_NULLARY);
6380
ok = EmitAtomOp(cx, pn, pn->pn_op, cg);
6385
ale = js_IndexAtom(cx, pn->pn_atom2, &cg->atomList);
6388
if (!EmitAtomIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
6390
if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
6393
#endif /* JS_HAS_XML_SUPPORT */
6399
if (ok && --cg->emitLevel == 0 && cg->spanDeps)
6400
ok = OptimizeSpanDeps(cx, cg);
6405
/* XXX get rid of offsetBias, it's used only by SRC_FOR and SRC_DECL */
6406
JS_FRIEND_DATA(JSSrcNoteSpec) js_SrcNoteSpec[] = {
6409
{"if-else", 2, 0, 1},
6412
{"continue", 0, 0, 0},
6414
{"pcdelta", 1, 0, 1},
6415
{"assignop", 0, 0, 0},
6418
{"hidden", 0, 0, 0},
6419
{"pcbase", 1, 0, -1},
6421
{"labelbrace", 1, 0, 0},
6422
{"endbrace", 0, 0, 0},
6423
{"break2label", 1, 0, 0},
6424
{"cont2label", 1, 0, 0},
6425
{"switch", 2, 0, 1},
6426
{"funcdef", 1, 0, 0},
6428
{"extended", -1, 0, 0},
6429
{"newline", 0, 0, 0},
6430
{"setline", 1, 0, 0},
6431
{"xdelta", 0, 0, 0},
6435
AllocSrcNote(JSContext *cx, JSCodeGenerator *cg)
6441
index = CG_NOTE_COUNT(cg);
6442
if (((uintN)index & CG_NOTE_MASK(cg)) == 0) {
6443
pool = cg->notePool;
6444
size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6445
if (!CG_NOTES(cg)) {
6446
/* Allocate the first note array lazily; leave noteMask alone. */
6447
JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg), jssrcnote *, pool, size);
6449
/* Grow by doubling note array size; update noteMask on success. */
6450
JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6452
CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6454
if (!CG_NOTES(cg)) {
6455
JS_ReportOutOfMemory(cx);
6460
CG_NOTE_COUNT(cg) = index + 1;
6465
js_NewSrcNote(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type)
6469
ptrdiff_t offset, delta, xdelta;
6472
* Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6473
* incrementing CG_NOTE_COUNT(cg).
6475
index = AllocSrcNote(cx, cg);
6478
sn = &CG_NOTES(cg)[index];
6481
* Compute delta from the last annotated bytecode's offset. If it's too
6482
* big to fit in sn, allocate one or more xdelta notes and reset sn.
6484
offset = CG_OFFSET(cg);
6485
delta = offset - CG_LAST_NOTE_OFFSET(cg);
6486
CG_LAST_NOTE_OFFSET(cg) = offset;
6487
if (delta >= SN_DELTA_LIMIT) {
6489
xdelta = JS_MIN(delta, SN_XDELTA_MASK);
6490
SN_MAKE_XDELTA(sn, xdelta);
6492
index = AllocSrcNote(cx, cg);
6495
sn = &CG_NOTES(cg)[index];
6496
} while (delta >= SN_DELTA_LIMIT);
6500
* Initialize type and delta, then allocate the minimum number of notes
6501
* needed for type's arity. Usually, we won't need more, but if an offset
6502
* does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6504
SN_MAKE_NOTE(sn, type, delta);
6505
for (n = (intN)js_SrcNoteSpec[type].arity; n > 0; n--) {
6506
if (js_NewSrcNote(cx, cg, SRC_NULL) < 0)
6513
js_NewSrcNote2(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
6518
index = js_NewSrcNote(cx, cg, type);
6520
if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset))
6527
js_NewSrcNote3(JSContext *cx, JSCodeGenerator *cg, JSSrcNoteType type,
6528
ptrdiff_t offset1, ptrdiff_t offset2)
6532
index = js_NewSrcNote(cx, cg, type);
6534
if (!js_SetSrcNoteOffset(cx, cg, index, 0, offset1))
6536
if (!js_SetSrcNoteOffset(cx, cg, index, 1, offset2))
6543
GrowSrcNotes(JSContext *cx, JSCodeGenerator *cg)
6548
/* Grow by doubling note array size; update noteMask on success. */
6549
pool = cg->notePool;
6550
size = SRCNOTE_SIZE(CG_NOTE_MASK(cg) + 1);
6551
JS_ARENA_GROW_CAST(CG_NOTES(cg), jssrcnote *, pool, size, size);
6552
if (!CG_NOTES(cg)) {
6553
JS_ReportOutOfMemory(cx);
6556
CG_NOTE_MASK(cg) = (CG_NOTE_MASK(cg) << 1) | 1;
6561
js_AddToSrcNoteDelta(JSContext *cx, JSCodeGenerator *cg, jssrcnote *sn,
6564
ptrdiff_t base, limit, newdelta, diff;
6568
* Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
6569
* main script note deltas, and only by a small positive amount.
6571
JS_ASSERT(cg->current == &cg->main);
6572
JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT);
6574
base = SN_DELTA(sn);
6575
limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT;
6576
newdelta = base + delta;
6577
if (newdelta < limit) {
6578
SN_SET_DELTA(sn, newdelta);
6580
index = sn - cg->main.notes;
6581
if ((cg->main.noteCount & cg->main.noteMask) == 0) {
6582
if (!GrowSrcNotes(cx, cg))
6584
sn = cg->main.notes + index;
6586
diff = cg->main.noteCount - index;
6587
cg->main.noteCount++;
6588
memmove(sn + 1, sn, SRCNOTE_SIZE(diff));
6589
SN_MAKE_XDELTA(sn, delta);
6595
JS_FRIEND_API(uintN)
6596
js_SrcNoteLength(jssrcnote *sn)
6601
arity = (intN)js_SrcNoteSpec[SN_TYPE(sn)].arity;
6602
for (base = sn++; arity; sn++, arity--) {
6603
if (*sn & SN_3BYTE_OFFSET_FLAG)
6609
JS_FRIEND_API(ptrdiff_t)
6610
js_GetSrcNoteOffset(jssrcnote *sn, uintN which)
6612
/* Find the offset numbered which (i.e., skip exactly which offsets). */
6613
JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
6614
JS_ASSERT(which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
6615
for (sn++; which; sn++, which--) {
6616
if (*sn & SN_3BYTE_OFFSET_FLAG)
6619
if (*sn & SN_3BYTE_OFFSET_FLAG) {
6620
return (ptrdiff_t)(((uint32)(sn[0] & SN_3BYTE_OFFSET_MASK) << 16)
6624
return (ptrdiff_t)*sn;
6628
js_SetSrcNoteOffset(JSContext *cx, JSCodeGenerator *cg, uintN index,
6629
uintN which, ptrdiff_t offset)
6634
if ((jsuword)offset >= (jsuword)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG << 16)) {
6635
ReportStatementTooLarge(cx, cg);
6639
/* Find the offset numbered which (i.e., skip exactly which offsets). */
6640
sn = &CG_NOTES(cg)[index];
6641
JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA);
6642
JS_ASSERT(which < js_SrcNoteSpec[SN_TYPE(sn)].arity);
6643
for (sn++; which; sn++, which--) {
6644
if (*sn & SN_3BYTE_OFFSET_FLAG)
6648
/* See if the new offset requires three bytes. */
6649
if (offset > (ptrdiff_t)SN_3BYTE_OFFSET_MASK) {
6650
/* Maybe this offset was already set to a three-byte value. */
6651
if (!(*sn & SN_3BYTE_OFFSET_FLAG)) {
6652
/* Losing, need to insert another two bytes for this offset. */
6653
index = PTRDIFF(sn, CG_NOTES(cg), jssrcnote);
6656
* Simultaneously test to see if the source note array must grow to
6657
* accomodate either the first or second byte of additional storage
6658
* required by this 3-byte offset.
6660
if (((CG_NOTE_COUNT(cg) + 1) & CG_NOTE_MASK(cg)) <= 1) {
6661
if (!GrowSrcNotes(cx, cg))
6663
sn = CG_NOTES(cg) + index;
6665
CG_NOTE_COUNT(cg) += 2;
6667
diff = CG_NOTE_COUNT(cg) - (index + 3);
6668
JS_ASSERT(diff >= 0);
6670
memmove(sn + 3, sn + 1, SRCNOTE_SIZE(diff));
6672
*sn++ = (jssrcnote)(SN_3BYTE_OFFSET_FLAG | (offset >> 16));
6673
*sn++ = (jssrcnote)(offset >> 8);
6675
*sn = (jssrcnote)offset;
6680
#define DEBUG_srcnotesize
6683
#ifdef DEBUG_srcnotesize
6685
static uint32 hist[NBINS];
6687
void DumpSrcNoteSizeHist()
6693
fp = fopen("/tmp/srcnotes.hist", "w");
6696
setvbuf(fp, NULL, _IONBF, 0);
6698
fprintf(fp, "SrcNote size histogram:\n");
6699
for (i = 0; i < NBINS; i++) {
6700
fprintf(fp, "%4u %4u ", JS_BIT(i), hist[i]);
6701
for (n = (int) JS_HOWMANY(hist[i], 10); n > 0; --n)
6710
* Fill in the storage at notes with prolog and main srcnotes; the space at
6711
* notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
6712
* SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
6713
* CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
6716
js_FinishTakingSrcNotes(JSContext *cx, JSCodeGenerator *cg, jssrcnote *notes)
6718
uintN prologCount, mainCount, totalCount;
6719
ptrdiff_t offset, delta;
6722
JS_ASSERT(cg->current == &cg->main);
6724
prologCount = cg->prolog.noteCount;
6725
if (prologCount && cg->prolog.currentLine != cg->firstLine) {
6726
CG_SWITCH_TO_PROLOG(cg);
6727
if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)cg->firstLine) < 0)
6729
prologCount = cg->prolog.noteCount;
6730
CG_SWITCH_TO_MAIN(cg);
6733
* Either no prolog srcnotes, or no line number change over prolog.
6734
* We don't need a SRC_SETLINE, but we may need to adjust the offset
6735
* of the first main note, by adding to its delta and possibly even
6736
* prepending SRC_XDELTA notes to it to account for prolog bytecodes
6737
* that came at and after the last annotated bytecode.
6739
offset = CG_PROLOG_OFFSET(cg) - cg->prolog.lastNoteOffset;
6740
JS_ASSERT(offset >= 0);
6741
if (offset > 0 && cg->main.noteCount != 0) {
6742
/* NB: Use as much of the first main note's delta as we can. */
6743
sn = cg->main.notes;
6744
delta = SN_IS_XDELTA(sn)
6745
? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK)
6746
: SN_DELTA_MASK - (*sn & SN_DELTA_MASK);
6750
if (!js_AddToSrcNoteDelta(cx, cg, sn, delta))
6755
delta = JS_MIN(offset, SN_XDELTA_MASK);
6756
sn = cg->main.notes;
6761
mainCount = cg->main.noteCount;
6762
totalCount = prologCount + mainCount;
6764
memcpy(notes, cg->prolog.notes, SRCNOTE_SIZE(prologCount));
6765
memcpy(notes + prologCount, cg->main.notes, SRCNOTE_SIZE(mainCount));
6766
SN_MAKE_TERMINATOR(¬es[totalCount]);
6769
{ int bin = JS_CeilingLog2(totalCount);
6779
js_AllocTryNotes(JSContext *cx, JSCodeGenerator *cg)
6784
size = TRYNOTE_SIZE(cg->treeContext.tryCount);
6785
if (size <= cg->tryNoteSpace)
6789
* Allocate trynotes from cx->tempPool.
6790
* XXX Too much growing and we bloat, as other tempPool allocators block
6791
* in-place growth, and we never recycle old free space in an arena.
6792
* YYY But once we consume an entire arena, we'll realloc it, letting the
6793
* malloc heap recycle old space, while still freeing _en masse_ via the
6797
size = JS_ROUNDUP(size, TRYNOTE_SIZE(TRYNOTE_CHUNK));
6798
JS_ARENA_ALLOCATE_CAST(cg->tryBase, JSTryNote *, &cx->tempPool, size);
6801
cg->tryNoteSpace = size;
6802
cg->tryNext = cg->tryBase;
6804
delta = PTRDIFF((char *)cg->tryNext, (char *)cg->tryBase, char);
6805
incr = size - cg->tryNoteSpace;
6806
incr = JS_ROUNDUP(incr, TRYNOTE_SIZE(TRYNOTE_CHUNK));
6807
size = cg->tryNoteSpace;
6808
JS_ARENA_GROW_CAST(cg->tryBase, JSTryNote *, &cx->tempPool, size, incr);
6811
cg->tryNoteSpace = size + incr;
6812
cg->tryNext = (JSTryNote *)((char *)cg->tryBase + delta);
6818
js_NewTryNote(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t start,
6819
ptrdiff_t end, ptrdiff_t catchStart)
6823
JS_ASSERT(cg->tryBase <= cg->tryNext);
6824
JS_ASSERT(catchStart >= 0);
6827
tn->length = end - start;
6828
tn->catchStart = catchStart;
6833
js_FinishTakingTryNotes(JSContext *cx, JSCodeGenerator *cg, JSTryNote *notes)
6837
count = PTRDIFF(cg->tryNext, cg->tryBase, JSTryNote);
6841
memcpy(notes, cg->tryBase, TRYNOTE_SIZE(count));
6842
notes[count].start = 0;
6843
notes[count].length = CG_OFFSET(cg);
6844
notes[count].catchStart = 0;