2
* Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3
* Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4
* Copyright (C) 2012 Igalia, S.L.
6
* Redistribution and use in source and binary forms, with or without
7
* modification, are permitted provided that the following conditions
10
* 1. Redistributions of source code must retain the above copyright
11
* notice, this list of conditions and the following disclaimer.
12
* 2. Redistributions in binary form must reproduce the above copyright
13
* notice, this list of conditions and the following disclaimer in the
14
* documentation and/or other materials provided with the distribution.
15
* 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16
* its contributors may be used to endorse or promote products derived
17
* from this software without specific prior written permission.
19
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32
#include "BytecodeGenerator.h"
34
#include "BatchedTransitionOptimizer.h"
36
#include "Interpreter.h"
37
#include "JSActivation.h"
38
#include "JSFunction.h"
39
#include "JSNameScope.h"
40
#include "LowLevelInterpreter.h"
42
#include "StrongInlines.h"
43
#include <wtf/text/WTFString.h>
50
The layout of a register frame looks like this:
61
assuming (x) and (y) generated temporaries t1 and t2, you would have
63
------------------------------------
64
| x | y | g | v2 | v1 | t1 | t2 | <-- value held
65
------------------------------------
66
| -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
67
------------------------------------
68
| params->|<-locals | temps->
70
Because temporary registers are allocated in a stack-like fashion, we
71
can reclaim them with a simple popping algorithm. The same goes for labels.
72
(We never reclaim parameter or local registers, because parameters and
73
locals are DontDelete.)
75
The register layout before a function call looks like this:
85
> <------------------------------
86
< > reserved: call frame | 1 | <-- value held
87
> >snip< <------------------------------
88
< > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
89
> <------------------------------
90
| params->|<-locals | temps->
92
The call instruction fills in the "call frame" registers. It also pads
93
missing arguments at the end of the call:
95
> <-----------------------------------
96
< > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
97
> >snip< <-----------------------------------
98
< > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
99
> <-----------------------------------
100
| params->|<-locals | temps->
102
After filling in missing arguments, the call instruction sets up the new
103
stack frame to overlap the end of the old stack frame:
105
|----------------------------------> <
106
| reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
107
|----------------------------------> >snip< <
108
| -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
109
|----------------------------------> <
110
| | params->|<-locals | temps->
112
That way, arguments are "copied" into the callee's stack frame for free.
114
If the caller supplies too many arguments, this trick doesn't work. The
115
extra arguments protrude into space reserved for locals and temporaries.
116
In that case, the call instruction makes a real copy of the call frame header,
117
along with just the arguments expected by the callee, leaving the original
118
call frame header and arguments behind. (The call instruction can't just discard
119
extra arguments, because the "arguments" object may access them later.)
120
This copying strategy ensures that all named values will be at the indices
121
expected by the callee.
124
void Label::setLocation(unsigned location)
126
m_location = location;
128
unsigned size = m_unresolvedJumps.size();
129
for (unsigned i = 0; i < size; ++i)
130
m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
134
void ResolveResult::checkValidity()
138
case ReadOnlyRegister:
145
ASSERT_NOT_REACHED();
150
ParserError BytecodeGenerator::generate()
152
SamplingRegion samplingRegion("Bytecode Generation");
154
m_codeBlock->setThisRegister(m_thisRegister.index());
156
m_scopeNode->emitBytecode(*this);
158
for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
159
TryRange& range = m_tryRanges[i];
160
ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
161
UnlinkedHandlerInfo info = {
162
static_cast<uint32_t>(range.start->bind(0, 0)), static_cast<uint32_t>(range.end->bind(0, 0)),
163
static_cast<uint32_t>(range.tryData->target->bind(0, 0)),
164
range.tryData->targetScopeDepth
166
m_codeBlock->addExceptionHandler(info);
169
m_codeBlock->instructions() = RefCountedArray<UnlinkedInstruction>(m_instructions);
171
m_codeBlock->shrinkToFit();
173
if (m_expressionTooDeep)
174
return ParserError::OutOfMemory;
175
return ParserError::ErrorNone;
178
bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
180
int index = m_calleeRegisters.size();
181
SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
182
SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
184
if (!result.isNewEntry) {
185
r0 = ®isterFor(result.iterator->value.getIndex());
193
void BytecodeGenerator::preserveLastVar()
195
if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
196
m_lastVar = &m_calleeRegisters.last();
199
BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
200
: m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
201
, m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
202
#if ENABLE(BYTECODE_COMMENTS)
203
, m_currentCommentString(0)
206
, m_scopeNode(programNode)
207
, m_codeBlock(globalData, codeBlock)
208
, m_thisRegister(CallFrame::thisArgumentOffset())
209
, m_emptyValueRegister(0)
211
, m_dynamicScopeDepth(0)
212
, m_codeType(GlobalCode)
213
, m_nextConstantOffset(0)
214
, m_globalConstantIndex(0)
215
, m_hasCreatedActivation(true)
216
, m_firstLazyFunction(0)
217
, m_lastLazyFunction(0)
218
, m_globalData(&globalData)
219
, m_lastOpcodeID(op_end)
221
, m_lastOpcodePosition(0)
223
, m_stack(wtfThreadData().stack())
224
, m_usesExceptions(false)
225
, m_expressionTooDeep(false)
227
if (m_shouldEmitDebugHooks)
228
m_codeBlock->setNeedsFullScopeChain(true);
230
m_codeBlock->setNumParameters(1); // Allocate space for "this"
232
prependComment("entering Program block");
233
emitOpcode(op_enter);
235
const VarStack& varStack = programNode->varStack();
236
const FunctionStack& functionStack = programNode->functionStack();
238
for (size_t i = 0; i < functionStack.size(); ++i) {
239
FunctionBodyNode* function = functionStack[i];
240
UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
241
codeBlock->addFunctionDeclaration(*m_globalData, function->ident(), unlinkedFunction);
244
for (size_t i = 0; i < varStack.size(); ++i)
245
codeBlock->addVariableDeclaration(*varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
249
BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
250
: m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
251
, m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
252
, m_symbolTable(codeBlock->symbolTable())
253
#if ENABLE(BYTECODE_COMMENTS)
254
, m_currentCommentString(0)
256
, m_scopeNode(functionBody)
257
, m_codeBlock(globalData, codeBlock)
258
, m_activationRegister(0)
259
, m_emptyValueRegister(0)
261
, m_dynamicScopeDepth(0)
262
, m_codeType(FunctionCode)
263
, m_nextConstantOffset(0)
264
, m_globalConstantIndex(0)
265
, m_hasCreatedActivation(false)
266
, m_firstLazyFunction(0)
267
, m_lastLazyFunction(0)
268
, m_globalData(&globalData)
269
, m_lastOpcodeID(op_end)
271
, m_lastOpcodePosition(0)
273
, m_stack(wtfThreadData().stack())
274
, m_usesExceptions(false)
275
, m_expressionTooDeep(false)
277
if (m_shouldEmitDebugHooks)
278
m_codeBlock->setNeedsFullScopeChain(true);
280
m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
281
m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
283
prependComment("entering Function block");
284
emitOpcode(op_enter);
285
if (m_codeBlock->needsFullScopeChain()) {
286
m_activationRegister = addVar();
287
prependComment("activation for Full Scope Chain");
288
emitInitLazyRegister(m_activationRegister);
289
m_codeBlock->setActivationRegister(m_activationRegister->index());
292
m_symbolTable->setCaptureStart(m_codeBlock->m_numVars);
294
if (functionBody->usesArguments() || codeBlock->usesEval() || m_shouldEmitDebugHooks) { // May reify arguments object.
295
RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
296
RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
298
// We can save a little space by hard-coding the knowledge that the two
299
// 'arguments' values are stored in consecutive registers, and storing
300
// only the index of the assignable one.
301
codeBlock->setArgumentsRegister(argumentsRegister->index());
302
ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
304
prependComment("arguments for Full Scope Chain");
305
emitInitLazyRegister(argumentsRegister);
306
prependComment("unmodified arguments for Full Scope Chain");
307
emitInitLazyRegister(unmodifiedArgumentsRegister);
309
if (m_codeBlock->isStrictMode()) {
310
prependComment("create arguments for strict mode");
311
emitOpcode(op_create_arguments);
312
instructions().append(argumentsRegister->index());
315
// The debugger currently retrieves the arguments object from an activation rather than pulling
316
// it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>),
317
// but for now we force eager creation of the arguments object when debugging.
318
if (m_shouldEmitDebugHooks) {
319
prependComment("create arguments for debug hooks");
320
emitOpcode(op_create_arguments);
321
instructions().append(argumentsRegister->index());
325
bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
327
bool capturesAnyArgumentByName = false;
328
Vector<RegisterID*> capturedArguments;
329
if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
330
FunctionParameters& parameters = *functionBody->parameters();
331
capturedArguments.resize(parameters.size());
332
for (size_t i = 0; i < parameters.size(); ++i) {
333
capturedArguments[i] = 0;
334
if (!functionBody->captures(parameters[i]) && !shouldCaptureAllTheThings)
336
capturesAnyArgumentByName = true;
337
capturedArguments[i] = addVar();
341
if (capturesAnyArgumentByName && !codeBlock->isStrictMode()) {
342
size_t parameterCount = m_symbolTable->parameterCount();
343
OwnArrayPtr<SlowArgument> slowArguments = adoptArrayPtr(new SlowArgument[parameterCount]);
344
for (size_t i = 0; i < parameterCount; ++i) {
345
if (!capturedArguments[i]) {
346
ASSERT(slowArguments[i].status == SlowArgument::Normal);
347
slowArguments[i].index = CallFrame::argumentOffset(i);
350
slowArguments[i].status = SlowArgument::Captured;
351
slowArguments[i].index = capturedArguments[i]->index();
353
m_symbolTable->setSlowArguments(slowArguments.release());
356
RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
358
const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
359
const DeclarationStacks::VarStack& varStack = functionBody->varStack();
361
// Captured variables and functions go first so that activations don't have
362
// to step over the non-captured locals to mark them.
363
m_hasCreatedActivation = false;
364
if (functionBody->hasCapturedVariables()) {
365
for (size_t i = 0; i < functionStack.size(); ++i) {
366
FunctionBodyNode* function = functionStack[i];
367
const Identifier& ident = function->ident();
368
if (functionBody->captures(ident)) {
369
if (!m_hasCreatedActivation) {
370
m_hasCreatedActivation = true;
371
prependComment("activation for captured vars");
372
emitOpcode(op_create_activation);
373
instructions().append(m_activationRegister->index());
375
m_functions.add(ident.impl());
376
prependComment("captured function var");
377
emitNewFunction(addVar(ident, false), function);
380
for (size_t i = 0; i < varStack.size(); ++i) {
381
const Identifier& ident = *varStack[i].first;
382
if (functionBody->captures(ident))
383
addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
386
bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
387
if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
388
m_hasCreatedActivation = true;
389
prependComment("cannot lazily create functions");
390
emitOpcode(op_create_activation);
391
instructions().append(m_activationRegister->index());
394
m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
396
m_firstLazyFunction = codeBlock->m_numVars;
397
for (size_t i = 0; i < functionStack.size(); ++i) {
398
FunctionBodyNode* function = functionStack[i];
399
const Identifier& ident = function->ident();
400
if (!functionBody->captures(ident)) {
401
m_functions.add(ident.impl());
402
RefPtr<RegisterID> reg = addVar(ident, false);
403
// Don't lazily create functions that override the name 'arguments'
404
// as this would complicate lazy instantiation of actual arguments.
405
prependComment("a function that override 'arguments'");
406
if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
407
emitNewFunction(reg.get(), function);
409
emitInitLazyRegister(reg.get());
410
m_lazyFunctions.set(reg->index(), function);
414
m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
415
for (size_t i = 0; i < varStack.size(); ++i) {
416
const Identifier& ident = *varStack[i].first;
417
if (!functionBody->captures(ident))
418
addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
421
if (shouldCaptureAllTheThings)
422
m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
424
FunctionParameters& parameters = *functionBody->parameters();
425
m_parameters.grow(parameters.size() + 1); // reserve space for "this"
427
// Add "this" as a parameter
428
int nextParameterIndex = CallFrame::thisArgumentOffset();
429
m_thisRegister.setIndex(nextParameterIndex--);
430
m_codeBlock->addParameter();
432
for (size_t i = 0; i < parameters.size(); ++i, --nextParameterIndex) {
433
int index = nextParameterIndex;
434
if (capturedArguments.size() && capturedArguments[i]) {
435
ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(parameters[i])) || shouldCaptureAllTheThings);
436
index = capturedArguments[i]->index();
437
RegisterID original(nextParameterIndex);
438
emitMove(capturedArguments[i], &original);
440
addParameter(parameters[i], index);
444
// We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
445
addCallee(functionBody, calleeRegister);
447
if (isConstructor()) {
448
prependComment("'this' because we are a Constructor function");
450
RefPtr<RegisterID> func = newTemporary();
452
UnlinkedValueProfile profile = emitProfiledOpcode(op_get_callee);
453
instructions().append(func->index());
454
instructions().append(profile);
456
emitOpcode(op_create_this);
457
instructions().append(m_thisRegister.index());
458
instructions().append(func->index());
459
} else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
460
UnlinkedValueProfile profile = emitProfiledOpcode(op_convert_this);
461
instructions().append(m_thisRegister.index());
462
instructions().append(profile);
466
BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
467
: m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
468
, m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
469
, m_symbolTable(codeBlock->symbolTable())
470
#if ENABLE(BYTECODE_COMMENTS)
471
, m_currentCommentString(0)
473
, m_scopeNode(evalNode)
474
, m_codeBlock(globalData, codeBlock)
475
, m_thisRegister(CallFrame::thisArgumentOffset())
476
, m_emptyValueRegister(0)
478
, m_dynamicScopeDepth(0)
479
, m_codeType(EvalCode)
480
, m_nextConstantOffset(0)
481
, m_globalConstantIndex(0)
482
, m_hasCreatedActivation(true)
483
, m_firstLazyFunction(0)
484
, m_lastLazyFunction(0)
485
, m_globalData(&globalData)
486
, m_lastOpcodeID(op_end)
488
, m_lastOpcodePosition(0)
490
, m_stack(wtfThreadData().stack())
491
, m_usesExceptions(false)
492
, m_expressionTooDeep(false)
494
m_codeBlock->setNeedsFullScopeChain(true);
496
m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
497
m_codeBlock->setNumParameters(1);
499
prependComment("entering Eval block");
500
emitOpcode(op_enter);
502
const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
503
for (size_t i = 0; i < functionStack.size(); ++i)
504
m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
506
const DeclarationStacks::VarStack& varStack = evalNode->varStack();
507
unsigned numVariables = varStack.size();
508
Vector<Identifier> variables;
509
variables.reserveCapacity(numVariables);
510
for (size_t i = 0; i < numVariables; ++i)
511
variables.append(*varStack[i].first);
512
codeBlock->adoptVariables(variables);
516
BytecodeGenerator::~BytecodeGenerator()
520
RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
522
emitOpcode(op_init_lazy_reg);
523
instructions().append(reg->index());
527
RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
529
if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
532
m_calleeRegister.setIndex(JSStack::Callee);
534
// If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
535
if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks) {
536
emitOpcode(op_push_name_scope);
537
instructions().append(addConstant(functionBodyNode->ident()));
538
instructions().append(m_calleeRegister.index());
539
instructions().append(ReadOnly | DontDelete);
543
if (!functionBodyNode->captures(functionBodyNode->ident()))
544
return &m_calleeRegister;
546
// Move the callee into the captured section of the stack.
547
return emitMove(addVar(), &m_calleeRegister);
550
void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
552
if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
555
// If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
556
if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
559
ASSERT(calleeRegister);
560
symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
563
void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
565
// Parameters overwrite var declarations, but not function declarations.
566
StringImpl* rep = ident.impl();
567
if (!m_functions.contains(rep)) {
568
symbolTable().set(rep, parameterIndex);
569
RegisterID& parameter = registerFor(parameterIndex);
570
parameter.setIndex(parameterIndex);
573
// To maintain the calling convention, we have to allocate unique space for
574
// each parameter, even if the parameter doesn't make it into the symbol table.
575
m_codeBlock->addParameter();
578
bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
580
if (ident != propertyNames().arguments)
583
if (!shouldOptimizeLocals())
586
SymbolTableEntry entry = symbolTable().get(ident.impl());
590
if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
596
RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
598
ASSERT(willResolveToArguments(propertyNames().arguments));
600
SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
601
ASSERT(!entry.isNull());
602
return ®isterFor(entry.getIndex());
605
RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
607
if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
609
emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
613
RegisterID* BytecodeGenerator::newRegister()
615
m_calleeRegisters.append(m_calleeRegisters.size());
616
m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
617
return &m_calleeRegisters.last();
620
RegisterID* BytecodeGenerator::newTemporary()
622
// Reclaim free register IDs.
623
while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
624
m_calleeRegisters.removeLast();
626
RegisterID* result = newRegister();
627
result->setTemporary();
631
PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
633
// Reclaim free label scopes.
634
while (m_labelScopes.size() && !m_labelScopes.last().refCount())
635
m_labelScopes.removeLast();
637
// Allocate new label scope.
638
LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
639
m_labelScopes.append(scope);
640
return &m_labelScopes.last();
643
PassRefPtr<Label> BytecodeGenerator::newLabel()
645
// Reclaim free label IDs.
646
while (m_labels.size() && !m_labels.last().refCount())
647
m_labels.removeLast();
649
// Allocate new label ID.
650
m_labels.append(this);
651
return &m_labels.last();
654
PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
656
unsigned newLabelIndex = instructions().size();
657
l0->setLocation(newLabelIndex);
659
if (m_codeBlock->numberOfJumpTargets()) {
660
unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
661
ASSERT(lastLabelIndex <= newLabelIndex);
662
if (newLabelIndex == lastLabelIndex) {
663
// Peephole optimizations have already been disabled by emitting the last label
668
m_codeBlock->addJumpTarget(newLabelIndex);
670
// This disables peephole optimizations when an instruction is a jump target
671
m_lastOpcodeID = op_end;
675
void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
678
size_t opcodePosition = instructions().size();
679
ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
680
m_lastOpcodePosition = opcodePosition;
683
instructions().append(opcodeID);
684
m_lastOpcodeID = opcodeID;
687
#if ENABLE(BYTECODE_COMMENTS)
688
// Record a comment in the CodeBlock's comments list for the current opcode
689
// that is about to be emitted.
690
void BytecodeGenerator::emitComment()
692
if (m_currentCommentString) {
693
size_t opcodePosition = instructions().size();
694
Comment comment = { opcodePosition, m_currentCommentString };
695
m_codeBlock->bytecodeComments().append(comment);
696
m_currentCommentString = 0;
700
// Register a comment to be associated with the next opcode that will be emitted.
701
void BytecodeGenerator::prependComment(const char* string)
703
m_currentCommentString = string;
707
UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
709
#if ENABLE(VALUE_PROFILER)
710
return m_codeBlock->addArrayProfile();
716
UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
718
#if ENABLE(VALUE_PROFILER)
719
return m_codeBlock->addArrayAllocationProfile();
725
UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
727
#if ENABLE(VALUE_PROFILER)
728
UnlinkedValueProfile result = m_codeBlock->addValueProfile();
730
UnlinkedValueProfile result = 0;
732
emitOpcode(opcodeID);
736
void BytecodeGenerator::emitLoopHint()
739
emitOpcode(op_loop_hint);
743
void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
745
ASSERT(instructions().size() >= 4);
746
size_t size = instructions().size();
747
dstIndex = instructions().at(size - 3).u.operand;
748
src1Index = instructions().at(size - 2).u.operand;
749
src2Index = instructions().at(size - 1).u.operand;
752
void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
754
ASSERT(instructions().size() >= 3);
755
size_t size = instructions().size();
756
dstIndex = instructions().at(size - 2).u.operand;
757
srcIndex = instructions().at(size - 1).u.operand;
760
void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
762
ASSERT(instructions().size() >= 4);
763
instructions().shrink(instructions().size() - 4);
764
m_lastOpcodeID = op_end;
767
void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
769
ASSERT(instructions().size() >= 3);
770
instructions().shrink(instructions().size() - 3);
771
m_lastOpcodeID = op_end;
774
PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
776
size_t begin = instructions().size();
777
emitOpcode(target->isForward() ? op_jmp : op_loop);
778
instructions().append(target->bind(begin, instructions().size()));
782
PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
784
if (m_lastOpcodeID == op_less) {
789
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
791
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
794
size_t begin = instructions().size();
795
emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
796
instructions().append(src1Index);
797
instructions().append(src2Index);
798
instructions().append(target->bind(begin, instructions().size()));
801
} else if (m_lastOpcodeID == op_lesseq) {
806
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
808
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
811
size_t begin = instructions().size();
812
emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
813
instructions().append(src1Index);
814
instructions().append(src2Index);
815
instructions().append(target->bind(begin, instructions().size()));
818
} else if (m_lastOpcodeID == op_greater) {
823
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
825
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
828
size_t begin = instructions().size();
829
emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
830
instructions().append(src1Index);
831
instructions().append(src2Index);
832
instructions().append(target->bind(begin, instructions().size()));
835
} else if (m_lastOpcodeID == op_greatereq) {
840
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
842
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
845
size_t begin = instructions().size();
846
emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
847
instructions().append(src1Index);
848
instructions().append(src2Index);
849
instructions().append(target->bind(begin, instructions().size()));
852
} else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
856
retrieveLastUnaryOp(dstIndex, srcIndex);
858
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
861
size_t begin = instructions().size();
862
emitOpcode(op_jeq_null);
863
instructions().append(srcIndex);
864
instructions().append(target->bind(begin, instructions().size()));
867
} else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
871
retrieveLastUnaryOp(dstIndex, srcIndex);
873
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
876
size_t begin = instructions().size();
877
emitOpcode(op_jneq_null);
878
instructions().append(srcIndex);
879
instructions().append(target->bind(begin, instructions().size()));
884
size_t begin = instructions().size();
886
emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
887
instructions().append(cond->index());
888
instructions().append(target->bind(begin, instructions().size()));
892
PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
894
if (m_lastOpcodeID == op_less && target->isForward()) {
899
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
901
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
904
size_t begin = instructions().size();
905
emitOpcode(op_jnless);
906
instructions().append(src1Index);
907
instructions().append(src2Index);
908
instructions().append(target->bind(begin, instructions().size()));
911
} else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
916
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
918
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
921
size_t begin = instructions().size();
922
emitOpcode(op_jnlesseq);
923
instructions().append(src1Index);
924
instructions().append(src2Index);
925
instructions().append(target->bind(begin, instructions().size()));
928
} else if (m_lastOpcodeID == op_greater && target->isForward()) {
933
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
935
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
938
size_t begin = instructions().size();
939
emitOpcode(op_jngreater);
940
instructions().append(src1Index);
941
instructions().append(src2Index);
942
instructions().append(target->bind(begin, instructions().size()));
945
} else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
950
retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
952
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
955
size_t begin = instructions().size();
956
emitOpcode(op_jngreatereq);
957
instructions().append(src1Index);
958
instructions().append(src2Index);
959
instructions().append(target->bind(begin, instructions().size()));
962
} else if (m_lastOpcodeID == op_not) {
966
retrieveLastUnaryOp(dstIndex, srcIndex);
968
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
971
size_t begin = instructions().size();
972
emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
973
instructions().append(srcIndex);
974
instructions().append(target->bind(begin, instructions().size()));
977
} else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
981
retrieveLastUnaryOp(dstIndex, srcIndex);
983
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
986
size_t begin = instructions().size();
987
emitOpcode(op_jneq_null);
988
instructions().append(srcIndex);
989
instructions().append(target->bind(begin, instructions().size()));
992
} else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
996
retrieveLastUnaryOp(dstIndex, srcIndex);
998
if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1001
size_t begin = instructions().size();
1002
emitOpcode(op_jeq_null);
1003
instructions().append(srcIndex);
1004
instructions().append(target->bind(begin, instructions().size()));
1009
size_t begin = instructions().size();
1010
emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
1011
instructions().append(cond->index());
1012
instructions().append(target->bind(begin, instructions().size()));
1016
PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
1018
size_t begin = instructions().size();
1020
emitOpcode(op_jneq_ptr);
1021
instructions().append(cond->index());
1022
instructions().append(Special::CallFunction);
1023
instructions().append(target->bind(begin, instructions().size()));
1027
PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
1029
size_t begin = instructions().size();
1031
emitOpcode(op_jneq_ptr);
1032
instructions().append(cond->index());
1033
instructions().append(Special::ApplyFunction);
1034
instructions().append(target->bind(begin, instructions().size()));
1038
unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1040
StringImpl* rep = ident.impl();
1041
IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1042
if (result.isNewEntry)
1043
m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
1045
return result.iterator->value;
1048
// We can't hash JSValue(), so we use a dedicated data member to cache it.
1049
RegisterID* BytecodeGenerator::addConstantEmptyValue()
1051
if (!m_emptyValueRegister) {
1052
int index = m_nextConstantOffset;
1053
m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1054
++m_nextConstantOffset;
1055
m_codeBlock->addConstant(JSValue());
1056
m_emptyValueRegister = &m_constantPoolRegisters[index];
1059
return m_emptyValueRegister;
1062
RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
1065
return addConstantEmptyValue();
1067
int index = m_nextConstantOffset;
1068
JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
1069
if (result.isNewEntry) {
1070
m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1071
++m_nextConstantOffset;
1072
m_codeBlock->addConstant(v);
1074
index = result.iterator->value;
1075
return &m_constantPoolRegisters[index];
1078
unsigned BytecodeGenerator::addRegExp(RegExp* r)
1080
return m_codeBlock->addRegExp(r);
1083
RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1086
instructions().append(dst->index());
1087
instructions().append(src->index());
1091
RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1093
emitOpcode(opcodeID);
1094
instructions().append(dst->index());
1095
instructions().append(src->index());
1099
RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1101
emitOpcode(op_pre_inc);
1102
instructions().append(srcDst->index());
1106
RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1108
emitOpcode(op_pre_dec);
1109
instructions().append(srcDst->index());
1113
RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1115
emitOpcode(op_post_inc);
1116
instructions().append(dst->index());
1117
instructions().append(srcDst->index());
1121
RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1123
emitOpcode(op_post_dec);
1124
instructions().append(dst->index());
1125
instructions().append(srcDst->index());
1129
RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1131
emitOpcode(opcodeID);
1132
instructions().append(dst->index());
1133
instructions().append(src1->index());
1134
instructions().append(src2->index());
1136
if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1137
opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1138
instructions().append(types.toInt());
1143
RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1145
if (m_lastOpcodeID == op_typeof) {
1149
retrieveLastUnaryOp(dstIndex, srcIndex);
1151
if (src1->index() == dstIndex
1152
&& src1->isTemporary()
1153
&& m_codeBlock->isConstantRegisterIndex(src2->index())
1154
&& m_codeBlock->constantRegister(src2->index()).get().isString()) {
1155
const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1156
if (value == "undefined") {
1158
emitOpcode(op_is_undefined);
1159
instructions().append(dst->index());
1160
instructions().append(srcIndex);
1163
if (value == "boolean") {
1165
emitOpcode(op_is_boolean);
1166
instructions().append(dst->index());
1167
instructions().append(srcIndex);
1170
if (value == "number") {
1172
emitOpcode(op_is_number);
1173
instructions().append(dst->index());
1174
instructions().append(srcIndex);
1177
if (value == "string") {
1179
emitOpcode(op_is_string);
1180
instructions().append(dst->index());
1181
instructions().append(srcIndex);
1184
if (value == "object") {
1186
emitOpcode(op_is_object);
1187
instructions().append(dst->index());
1188
instructions().append(srcIndex);
1191
if (value == "function") {
1193
emitOpcode(op_is_function);
1194
instructions().append(dst->index());
1195
instructions().append(srcIndex);
1201
emitOpcode(opcodeID);
1202
instructions().append(dst->index());
1203
instructions().append(src1->index());
1204
instructions().append(src2->index());
1208
RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1210
return emitLoad(dst, jsBoolean(b));
1213
RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1215
// FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1216
// Later we can do the extra work to handle that like the other cases. They also don't
1217
// work correctly with NaN as a key.
1218
if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1219
return emitLoad(dst, jsNumber(number));
1220
JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1222
valueInMap = jsNumber(number);
1223
return emitLoad(dst, valueInMap);
1226
RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1228
JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1230
stringInMap = jsOwnedString(globalData(), identifier.string());
1231
return emitLoad(dst, JSValue(stringInMap));
1234
RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1236
RegisterID* constantID = addConstantValue(v);
1238
return emitMove(dst, constantID);
1242
ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1244
if (property == propertyNames().thisIdentifier)
1245
return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1247
// Check if the property should be allocated in a register.
1248
if (m_codeType != GlobalCode && shouldOptimizeLocals() && m_symbolTable) {
1249
SymbolTableEntry entry = symbolTable().get(property.impl());
1250
if (!entry.isNull()) {
1251
if (property == propertyNames().arguments)
1252
createArgumentsIfNecessary();
1253
unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1254
RegisterID* local = createLazyRegisterIfNecessary(®isterFor(entry.getIndex()));
1255
return ResolveResult::registerResolve(local, flags);
1258
return ResolveResult::dynamicResolve();
1261
ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1263
// Register-allocated const declarations.
1264
if (m_codeType != EvalCode && m_codeType != GlobalCode && m_symbolTable) {
1265
SymbolTableEntry entry = symbolTable().get(property.impl());
1266
if (!entry.isNull()) {
1267
unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1268
RegisterID* local = createLazyRegisterIfNecessary(®isterFor(entry.getIndex()));
1269
return ResolveResult::registerResolve(local, flags);
1273
return ResolveResult::dynamicResolve();
1276
void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1278
size_t begin = instructions().size();
1279
emitOpcode(op_check_has_instance);
1280
instructions().append(dst->index());
1281
instructions().append(value->index());
1282
instructions().append(base->index());
1283
instructions().append(target->bind(begin, instructions().size()));
1286
RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1288
emitOpcode(op_instanceof);
1289
instructions().append(dst->index());
1290
instructions().append(value->index());
1291
instructions().append(basePrototype->index());
1295
bool BytecodeGenerator::shouldAvoidResolveGlobal()
1297
return !m_labelScopes.size();
1300
RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1303
if (resolveResult.isRegister())
1304
return emitGetLocalVar(dst, resolveResult, property);
1306
UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve);
1307
instructions().append(dst->index());
1308
instructions().append(addConstant(property));
1309
instructions().append(getResolveOperations(property));
1310
instructions().append(profile);
1314
RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1316
ASSERT_UNUSED(resolveResult, !resolveResult.isRegister());
1317
// We can't optimise at all :-(
1318
UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1319
instructions().append(dst->index());
1320
instructions().append(addConstant(property));
1321
instructions().append(false);
1322
instructions().append(getResolveBaseOperations(property));
1323
instructions().append(0);
1324
instructions().append(profile);
1328
RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property, NonlocalResolveInfo& verifier)
1330
ASSERT_UNUSED(resolveResult, !resolveResult.isRegister());
1331
// We can't optimise at all :-(
1332
UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1333
instructions().append(dst->index());
1334
instructions().append(addConstant(property));
1335
instructions().append(m_codeBlock->isStrictMode());
1336
uint32_t putToBaseIndex = 0;
1337
instructions().append(getResolveBaseForPutOperations(property, putToBaseIndex));
1338
verifier.resolved(putToBaseIndex);
1339
instructions().append(putToBaseIndex);
1340
instructions().append(profile);
1344
RegisterID* BytecodeGenerator::emitResolveWithBaseForPut(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property, NonlocalResolveInfo& verifier)
1346
ASSERT_UNUSED(resolveResult, !resolveResult.isRegister());
1347
UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_base);
1348
instructions().append(baseDst->index());
1349
instructions().append(propDst->index());
1350
instructions().append(addConstant(property));
1351
uint32_t putToBaseIndex = 0;
1352
instructions().append(getResolveWithBaseForPutOperations(property, putToBaseIndex));
1353
verifier.resolved(putToBaseIndex);
1354
instructions().append(putToBaseIndex);
1355
instructions().append(profile);
1359
RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1361
if (resolveResult.isRegister()) {
1362
emitLoad(baseDst, jsUndefined());
1363
emitGetLocalVar(propDst, resolveResult, property);
1367
UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_this);
1368
instructions().append(baseDst->index());
1369
instructions().append(propDst->index());
1370
instructions().append(addConstant(property));
1371
instructions().append(getResolveWithThisOperations(property));
1372
instructions().append(profile);
1376
RegisterID* BytecodeGenerator::emitGetLocalVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier&)
1378
switch (resolveResult.type()) {
1379
case ResolveResult::Register:
1380
case ResolveResult::ReadOnlyRegister:
1381
if (dst == ignoredResult())
1383
return moveToDestinationIfNeeded(dst, resolveResult.local());
1386
ASSERT_NOT_REACHED();
1391
RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1393
ASSERT(m_codeType == GlobalCode);
1394
emitOpcode(op_init_global_const_nop);
1395
instructions().append(0);
1396
instructions().append(value->index());
1397
instructions().append(0);
1398
instructions().append(addConstant(identifier));
1402
RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1404
m_codeBlock->addPropertyAccessInstruction(instructions().size());
1406
UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1407
instructions().append(dst->index());
1408
instructions().append(base->index());
1409
instructions().append(addConstant(property));
1410
instructions().append(0);
1411
instructions().append(0);
1412
instructions().append(0);
1413
instructions().append(0);
1414
instructions().append(profile);
1418
RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1420
emitOpcode(op_get_arguments_length);
1421
instructions().append(dst->index());
1422
ASSERT(base->index() == m_codeBlock->argumentsRegister());
1423
instructions().append(base->index());
1424
instructions().append(addConstant(propertyNames().length));
1428
RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1430
m_codeBlock->addPropertyAccessInstruction(instructions().size());
1432
emitOpcode(op_put_by_id);
1433
instructions().append(base->index());
1434
instructions().append(addConstant(property));
1435
instructions().append(value->index());
1436
instructions().append(0);
1437
instructions().append(0);
1438
instructions().append(0);
1439
instructions().append(0);
1440
instructions().append(0);
1444
RegisterID* BytecodeGenerator::emitPutToBase(RegisterID* base, const Identifier& property, RegisterID* value, NonlocalResolveInfo& resolveInfo)
1446
emitOpcode(op_put_to_base);
1447
instructions().append(base->index());
1448
instructions().append(addConstant(property));
1449
instructions().append(value->index());
1450
instructions().append(resolveInfo.put());
1454
RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1456
m_codeBlock->addPropertyAccessInstruction(instructions().size());
1458
emitOpcode(op_put_by_id);
1459
instructions().append(base->index());
1460
instructions().append(addConstant(property));
1461
instructions().append(value->index());
1462
instructions().append(0);
1463
instructions().append(0);
1464
instructions().append(0);
1465
instructions().append(0);
1466
instructions().append(
1467
property != m_globalData->propertyNames->underscoreProto
1468
&& PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1472
void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1474
emitOpcode(op_put_getter_setter);
1475
instructions().append(base->index());
1476
instructions().append(addConstant(property));
1477
instructions().append(getter->index());
1478
instructions().append(setter->index());
1481
RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1483
emitOpcode(op_del_by_id);
1484
instructions().append(dst->index());
1485
instructions().append(base->index());
1486
instructions().append(addConstant(property));
1490
RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1492
UnlinkedArrayProfile arrayProfile = newArrayProfile();
1493
UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1494
instructions().append(dst->index());
1495
ASSERT(base->index() == m_codeBlock->argumentsRegister());
1496
instructions().append(base->index());
1497
instructions().append(property->index());
1498
instructions().append(arrayProfile);
1499
instructions().append(profile);
1503
RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1505
for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1506
ForInContext& context = m_forInContextStack[i - 1];
1507
if (context.propertyRegister == property) {
1508
emitOpcode(op_get_by_pname);
1509
instructions().append(dst->index());
1510
instructions().append(base->index());
1511
instructions().append(property->index());
1512
instructions().append(context.expectedSubscriptRegister->index());
1513
instructions().append(context.iterRegister->index());
1514
instructions().append(context.indexRegister->index());
1518
UnlinkedArrayProfile arrayProfile = newArrayProfile();
1519
UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1520
instructions().append(dst->index());
1521
instructions().append(base->index());
1522
instructions().append(property->index());
1523
instructions().append(arrayProfile);
1524
instructions().append(profile);
1528
RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1530
UnlinkedArrayProfile arrayProfile = newArrayProfile();
1531
emitOpcode(op_put_by_val);
1532
instructions().append(base->index());
1533
instructions().append(property->index());
1534
instructions().append(value->index());
1535
instructions().append(arrayProfile);
1539
RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1541
emitOpcode(op_del_by_val);
1542
instructions().append(dst->index());
1543
instructions().append(base->index());
1544
instructions().append(property->index());
1548
RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1550
emitOpcode(op_put_by_index);
1551
instructions().append(base->index());
1552
instructions().append(index);
1553
instructions().append(value->index());
1557
RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1559
emitOpcode(op_new_object);
1560
instructions().append(dst->index());
1564
unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1566
return m_codeBlock->addConstantBuffer(length);
1569
JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1571
JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1573
stringInMap = jsString(globalData(), identifier.string());
1574
addConstantValue(stringInMap);
1579
RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1581
#if !ASSERT_DISABLED
1582
unsigned checkLength = 0;
1584
bool hadVariableExpression = false;
1586
for (ElementNode* n = elements; n; n = n->next()) {
1587
if (!n->value()->isNumber() && !n->value()->isString()) {
1588
hadVariableExpression = true;
1593
#if !ASSERT_DISABLED
1597
if (!hadVariableExpression) {
1598
ASSERT(length == checkLength);
1599
unsigned constantBufferIndex = addConstantBuffer(length);
1600
JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1602
for (ElementNode* n = elements; index < length; n = n->next()) {
1603
if (n->value()->isNumber())
1604
constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1606
ASSERT(n->value()->isString());
1607
constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1610
emitOpcode(op_new_array_buffer);
1611
instructions().append(dst->index());
1612
instructions().append(constantBufferIndex);
1613
instructions().append(length);
1614
instructions().append(newArrayAllocationProfile());
1619
Vector<RefPtr<RegisterID>, 16> argv;
1620
for (ElementNode* n = elements; n; n = n->next()) {
1623
argv.append(newTemporary());
1624
// op_new_array requires the initial values to be a sequential range of registers
1625
ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1626
emitNode(argv.last().get(), n->value());
1628
emitOpcode(op_new_array);
1629
instructions().append(dst->index());
1630
instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1631
instructions().append(argv.size()); // argc
1632
instructions().append(newArrayAllocationProfile());
1636
RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1638
return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1641
RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1643
FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1645
ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1646
return emitNewFunctionInternal(dst, ptr.iterator->value, true);
1649
RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1651
createActivationIfNecessary();
1652
emitOpcode(op_new_func);
1653
instructions().append(dst->index());
1654
instructions().append(index);
1655
instructions().append(doNullCheck);
1659
RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1661
emitOpcode(op_new_regexp);
1662
instructions().append(dst->index());
1663
instructions().append(addRegExp(regExp));
1667
RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1669
FunctionBodyNode* function = n->body();
1670
unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1672
createActivationIfNecessary();
1673
emitOpcode(op_new_func_exp);
1674
instructions().append(r0->index());
1675
instructions().append(index);
1679
RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1681
return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, startOffset, endOffset);
1684
void BytecodeGenerator::createArgumentsIfNecessary()
1686
if (m_codeType != FunctionCode)
1689
if (!m_codeBlock->usesArguments())
1692
// If we're in strict mode we tear off the arguments on function
1693
// entry, so there's no need to check if we need to create them
1695
if (m_codeBlock->isStrictMode())
1698
emitOpcode(op_create_arguments);
1699
instructions().append(m_codeBlock->argumentsRegister());
1702
void BytecodeGenerator::createActivationIfNecessary()
1704
if (m_hasCreatedActivation)
1706
if (!m_codeBlock->needsFullScopeChain())
1708
emitOpcode(op_create_activation);
1709
instructions().append(m_activationRegister->index());
1712
RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1714
return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, startOffset, endOffset);
1717
ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1719
if (identifier == m_globalData->propertyNames->Object)
1720
return ExpectObjectConstructor;
1721
if (identifier == m_globalData->propertyNames->Array)
1722
return ExpectArrayConstructor;
1723
return NoExpectedFunction;
1726
ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1728
RefPtr<Label> realCall = newLabel();
1729
switch (expectedFunction) {
1730
case ExpectObjectConstructor: {
1731
// If the number of arguments is non-zero, then we can't do anything interesting.
1732
if (callArguments.argumentCountIncludingThis() >= 2)
1733
return NoExpectedFunction;
1735
size_t begin = instructions().size();
1736
emitOpcode(op_jneq_ptr);
1737
instructions().append(func->index());
1738
instructions().append(Special::ObjectConstructor);
1739
instructions().append(realCall->bind(begin, instructions().size()));
1741
if (dst != ignoredResult()) {
1742
emitOpcode(op_new_object);
1743
instructions().append(dst->index());
1748
case ExpectArrayConstructor: {
1749
// If you're doing anything other than "new Array()" or "new Array(foo)" then we
1750
// don't do inline it, for now. The only reason is that call arguments are in
1751
// the opposite order of what op_new_array expects, so we'd either need to change
1752
// how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1753
// things sounds like it's worth it.
1754
if (callArguments.argumentCountIncludingThis() > 2)
1755
return NoExpectedFunction;
1757
size_t begin = instructions().size();
1758
emitOpcode(op_jneq_ptr);
1759
instructions().append(func->index());
1760
instructions().append(Special::ArrayConstructor);
1761
instructions().append(realCall->bind(begin, instructions().size()));
1763
if (dst != ignoredResult()) {
1764
if (callArguments.argumentCountIncludingThis() == 2) {
1765
emitOpcode(op_new_array_with_size);
1766
instructions().append(dst->index());
1767
instructions().append(callArguments.argumentRegister(0)->index());
1768
instructions().append(newArrayAllocationProfile());
1770
ASSERT(callArguments.argumentCountIncludingThis() == 1);
1771
emitOpcode(op_new_array);
1772
instructions().append(dst->index());
1773
instructions().append(0);
1774
instructions().append(0);
1775
instructions().append(newArrayAllocationProfile());
1782
ASSERT(expectedFunction == NoExpectedFunction);
1783
return NoExpectedFunction;
1786
size_t begin = instructions().size();
1788
instructions().append(done->bind(begin, instructions().size()));
1789
emitLabel(realCall.get());
1791
return expectedFunction;
1794
RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1796
ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1797
ASSERT(func->refCount());
1799
if (m_shouldEmitProfileHooks)
1800
emitMove(callArguments.profileHookRegister(), func);
1802
// Generate code for arguments.
1803
unsigned argument = 0;
1804
for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1805
emitNode(callArguments.argumentRegister(argument++), n);
1807
// Reserve space for call frame.
1808
Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize> callFrame;
1809
for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1810
callFrame.append(newTemporary());
1812
if (m_shouldEmitProfileHooks) {
1813
emitOpcode(op_profile_will_call);
1814
instructions().append(callArguments.profileHookRegister()->index());
1817
emitExpressionInfo(divot, startOffset, endOffset);
1819
RefPtr<Label> done = newLabel();
1820
expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1823
UnlinkedArrayProfile arrayProfile = newArrayProfile();
1824
emitOpcode(opcodeID);
1825
instructions().append(func->index()); // func
1826
instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1827
instructions().append(callArguments.registerOffset()); // registerOffset
1829
instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1831
instructions().append(0);
1833
instructions().append(arrayProfile);
1834
if (dst != ignoredResult()) {
1835
UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1836
instructions().append(dst->index()); // dst
1837
instructions().append(profile);
1840
if (expectedFunction != NoExpectedFunction)
1841
emitLabel(done.get());
1843
if (m_shouldEmitProfileHooks) {
1844
emitOpcode(op_profile_did_call);
1845
instructions().append(callArguments.profileHookRegister()->index());
1851
RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1853
if (m_shouldEmitProfileHooks) {
1854
emitMove(profileHookRegister, func);
1855
emitOpcode(op_profile_will_call);
1856
instructions().append(profileHookRegister->index());
1859
emitExpressionInfo(divot, startOffset, endOffset);
1862
emitOpcode(op_call_varargs);
1863
instructions().append(func->index());
1864
instructions().append(thisRegister->index());
1865
instructions().append(arguments->index());
1866
instructions().append(firstFreeRegister->index());
1867
if (dst != ignoredResult()) {
1868
UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1869
instructions().append(dst->index());
1870
instructions().append(profile);
1872
if (m_shouldEmitProfileHooks) {
1873
emitOpcode(op_profile_did_call);
1874
instructions().append(profileHookRegister->index());
1879
RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1881
if (m_codeBlock->needsFullScopeChain()) {
1882
emitOpcode(op_tear_off_activation);
1883
instructions().append(m_activationRegister->index());
1886
if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1887
emitOpcode(op_tear_off_arguments);
1888
instructions().append(m_codeBlock->argumentsRegister());
1889
instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1892
// Constructors use op_ret_object_or_this to check the result is an
1893
// object, unless we can trivially determine the check is not
1894
// necessary (currently, if the return value is 'this').
1895
if (isConstructor() && (src->index() != m_thisRegister.index())) {
1896
emitOpcode(op_ret_object_or_this);
1897
instructions().append(src->index());
1898
instructions().append(m_thisRegister.index());
1901
return emitUnaryNoDstOp(op_ret, src);
1904
RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1906
emitOpcode(opcodeID);
1907
instructions().append(src->index());
1911
RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1913
ASSERT(func->refCount());
1915
if (m_shouldEmitProfileHooks)
1916
emitMove(callArguments.profileHookRegister(), func);
1918
// Generate code for arguments.
1919
unsigned argument = 0;
1920
if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1921
for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1922
emitNode(callArguments.argumentRegister(argument++), n);
1925
if (m_shouldEmitProfileHooks) {
1926
emitOpcode(op_profile_will_call);
1927
instructions().append(callArguments.profileHookRegister()->index());
1930
// Reserve space for call frame.
1931
Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize> callFrame;
1932
for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1933
callFrame.append(newTemporary());
1935
emitExpressionInfo(divot, startOffset, endOffset);
1937
RefPtr<Label> done = newLabel();
1938
expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1940
emitOpcode(op_construct);
1941
instructions().append(func->index()); // func
1942
instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1943
instructions().append(callArguments.registerOffset()); // registerOffset
1945
instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1947
instructions().append(0);
1949
instructions().append(0);
1950
if (dst != ignoredResult()) {
1951
UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1952
instructions().append(dst->index()); // dst
1953
instructions().append(profile);
1956
if (expectedFunction != NoExpectedFunction)
1957
emitLabel(done.get());
1959
if (m_shouldEmitProfileHooks) {
1960
emitOpcode(op_profile_did_call);
1961
instructions().append(callArguments.profileHookRegister()->index());
1967
RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1969
emitOpcode(op_strcat);
1970
instructions().append(dst->index());
1971
instructions().append(src->index());
1972
instructions().append(count);
1977
void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1979
emitOpcode(op_to_primitive);
1980
instructions().append(dst->index());
1981
instructions().append(src->index());
1984
RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
1986
ControlFlowContext context;
1987
context.isFinallyBlock = false;
1988
m_scopeContextStack.append(context);
1989
m_dynamicScopeDepth++;
1991
return emitUnaryNoDstOp(op_push_with_scope, scope);
1994
void BytecodeGenerator::emitPopScope()
1996
ASSERT(m_scopeContextStack.size());
1997
ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1999
emitOpcode(op_pop_scope);
2001
m_scopeContextStack.removeLast();
2002
m_dynamicScopeDepth--;
2005
void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine, int column)
2007
#if ENABLE(DEBUG_WITH_BREAKPOINT)
2008
if (debugHookID != DidReachBreakpoint)
2011
if (!m_shouldEmitDebugHooks)
2014
emitOpcode(op_debug);
2015
instructions().append(debugHookID);
2016
instructions().append(firstLine);
2017
instructions().append(lastLine);
2018
instructions().append(column);
2021
void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2023
ControlFlowContext scope;
2024
scope.isFinallyBlock = true;
2025
FinallyContext context = {
2027
static_cast<unsigned>(m_scopeContextStack.size()),
2028
static_cast<unsigned>(m_switchContextStack.size()),
2029
static_cast<unsigned>(m_forInContextStack.size()),
2030
static_cast<unsigned>(m_tryContextStack.size()),
2031
static_cast<unsigned>(m_labelScopes.size()),
2035
scope.finallyContext = context;
2036
m_scopeContextStack.append(scope);
2040
void BytecodeGenerator::popFinallyContext()
2042
ASSERT(m_scopeContextStack.size());
2043
ASSERT(m_scopeContextStack.last().isFinallyBlock);
2044
ASSERT(m_finallyDepth > 0);
2045
m_scopeContextStack.removeLast();
2049
LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2051
// Reclaim free label scopes.
2053
// The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2054
// however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2055
// size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
2056
// cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2057
// loop condition is a workaround.
2058
while (m_labelScopes.size()) {
2059
if (m_labelScopes.last().refCount())
2061
m_labelScopes.removeLast();
2064
if (!m_labelScopes.size())
2067
// We special-case the following, which is a syntax error in Firefox:
2070
if (name.isEmpty()) {
2071
for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2072
LabelScope* scope = &m_labelScopes[i];
2073
if (scope->type() != LabelScope::NamedLabel) {
2074
ASSERT(scope->breakTarget());
2081
for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2082
LabelScope* scope = &m_labelScopes[i];
2083
if (scope->name() && *scope->name() == name) {
2084
ASSERT(scope->breakTarget());
2091
LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2093
// Reclaim free label scopes.
2094
while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2095
m_labelScopes.removeLast();
2097
if (!m_labelScopes.size())
2100
if (name.isEmpty()) {
2101
for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2102
LabelScope* scope = &m_labelScopes[i];
2103
if (scope->type() == LabelScope::Loop) {
2104
ASSERT(scope->continueTarget());
2111
// Continue to the loop nested nearest to the label scope that matches
2113
LabelScope* result = 0;
2114
for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2115
LabelScope* scope = &m_labelScopes[i];
2116
if (scope->type() == LabelScope::Loop) {
2117
ASSERT(scope->continueTarget());
2120
if (scope->name() && *scope->name() == name)
2121
return result; // may be 0
2126
PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2128
while (topScope > bottomScope) {
2129
// First we count the number of dynamic scopes we need to remove to get
2130
// to a finally block.
2131
int nNormalScopes = 0;
2132
while (topScope > bottomScope) {
2133
if (topScope->isFinallyBlock)
2139
if (nNormalScopes) {
2140
size_t begin = instructions().size();
2142
// We need to remove a number of dynamic scopes to get to the next
2144
emitOpcode(op_jmp_scopes);
2145
instructions().append(nNormalScopes);
2147
// If topScope == bottomScope then there isn't actually a finally block
2148
// left to emit, so make the jmp_scopes jump directly to the target label
2149
if (topScope == bottomScope) {
2150
instructions().append(target->bind(begin, instructions().size()));
2154
// Otherwise we just use jmp_scopes to pop a group of scopes and go
2155
// to the next instruction
2156
RefPtr<Label> nextInsn = newLabel();
2157
instructions().append(nextInsn->bind(begin, instructions().size()));
2158
emitLabel(nextInsn.get());
2161
Vector<ControlFlowContext> savedScopeContextStack;
2162
Vector<SwitchInfo> savedSwitchContextStack;
2163
Vector<ForInContext> savedForInContextStack;
2164
Vector<TryContext> poppedTryContexts;
2165
SegmentedVector<LabelScope, 8> savedLabelScopes;
2166
while (topScope > bottomScope && topScope->isFinallyBlock) {
2167
RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2169
// Save the current state of the world while instating the state of the world
2170
// for the finally block.
2171
FinallyContext finallyContext = topScope->finallyContext;
2172
bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2173
bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2174
bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2175
bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2176
bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2177
int topScopeIndex = -1;
2178
int bottomScopeIndex = -1;
2180
topScopeIndex = topScope - m_scopeContextStack.begin();
2181
bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2182
savedScopeContextStack = m_scopeContextStack;
2183
m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2186
savedSwitchContextStack = m_switchContextStack;
2187
m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2190
savedForInContextStack = m_forInContextStack;
2191
m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2194
while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2195
ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2196
TryContext context = m_tryContextStack.last();
2197
m_tryContextStack.removeLast();
2199
range.start = context.start;
2200
range.end = beforeFinally;
2201
range.tryData = context.tryData;
2202
m_tryRanges.append(range);
2203
poppedTryContexts.append(context);
2206
if (flipLabelScopes) {
2207
savedLabelScopes = m_labelScopes;
2208
while (m_labelScopes.size() > finallyContext.labelScopesSize)
2209
m_labelScopes.removeLast();
2211
int savedFinallyDepth = m_finallyDepth;
2212
m_finallyDepth = finallyContext.finallyDepth;
2213
int savedDynamicScopeDepth = m_dynamicScopeDepth;
2214
m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2216
// Emit the finally block.
2217
emitNode(finallyContext.finallyBlock);
2219
RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2221
// Restore the state of the world.
2223
m_scopeContextStack = savedScopeContextStack;
2224
topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2225
bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2228
m_switchContextStack = savedSwitchContextStack;
2230
m_forInContextStack = savedForInContextStack;
2232
ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2233
for (unsigned i = poppedTryContexts.size(); i--;) {
2234
TryContext context = poppedTryContexts[i];
2235
context.start = afterFinally;
2236
m_tryContextStack.append(context);
2238
poppedTryContexts.clear();
2240
if (flipLabelScopes)
2241
m_labelScopes = savedLabelScopes;
2242
m_finallyDepth = savedFinallyDepth;
2243
m_dynamicScopeDepth = savedDynamicScopeDepth;
2248
return emitJump(target);
2251
PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2253
ASSERT(scopeDepth() - targetScopeDepth >= 0);
2254
ASSERT(target->isForward());
2256
size_t scopeDelta = scopeDepth() - targetScopeDepth;
2257
ASSERT(scopeDelta <= m_scopeContextStack.size());
2259
return emitJump(target);
2262
return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2264
size_t begin = instructions().size();
2266
emitOpcode(op_jmp_scopes);
2267
instructions().append(scopeDelta);
2268
instructions().append(target->bind(begin, instructions().size()));
2272
RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2274
size_t begin = instructions().size();
2276
emitOpcode(op_get_pnames);
2277
instructions().append(dst->index());
2278
instructions().append(base->index());
2279
instructions().append(i->index());
2280
instructions().append(size->index());
2281
instructions().append(breakTarget->bind(begin, instructions().size()));
2285
RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2287
size_t begin = instructions().size();
2289
emitOpcode(op_next_pname);
2290
instructions().append(dst->index());
2291
instructions().append(base->index());
2292
instructions().append(i->index());
2293
instructions().append(size->index());
2294
instructions().append(iter->index());
2295
instructions().append(target->bind(begin, instructions().size()));
2299
TryData* BytecodeGenerator::pushTry(Label* start)
2302
tryData.target = newLabel();
2303
tryData.targetScopeDepth = UINT_MAX;
2304
m_tryData.append(tryData);
2305
TryData* result = &m_tryData.last();
2307
TryContext tryContext;
2308
tryContext.start = start;
2309
tryContext.tryData = result;
2311
m_tryContextStack.append(tryContext);
2316
RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2318
m_usesExceptions = true;
2320
ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2323
tryRange.start = m_tryContextStack.last().start;
2325
tryRange.tryData = m_tryContextStack.last().tryData;
2326
m_tryRanges.append(tryRange);
2327
m_tryContextStack.removeLast();
2329
emitLabel(tryRange.tryData->target.get());
2330
tryRange.tryData->targetScopeDepth = m_dynamicScopeDepth;
2332
emitOpcode(op_catch);
2333
instructions().append(targetRegister->index());
2334
return targetRegister;
2337
void BytecodeGenerator::emitThrowReferenceError(const String& message)
2339
emitOpcode(op_throw_static_error);
2340
instructions().append(addConstantValue(jsString(globalData(), message))->index());
2341
instructions().append(true);
2344
void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2346
ControlFlowContext context;
2347
context.isFinallyBlock = false;
2348
m_scopeContextStack.append(context);
2349
m_dynamicScopeDepth++;
2351
emitOpcode(op_push_name_scope);
2352
instructions().append(addConstant(property));
2353
instructions().append(value->index());
2354
instructions().append(attributes);
2357
void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2359
SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2361
case SwitchInfo::SwitchImmediate:
2362
emitOpcode(op_switch_imm);
2364
case SwitchInfo::SwitchCharacter:
2365
emitOpcode(op_switch_char);
2367
case SwitchInfo::SwitchString:
2368
emitOpcode(op_switch_string);
2371
ASSERT_NOT_REACHED();
2374
instructions().append(0); // place holder for table index
2375
instructions().append(0); // place holder for default target
2376
instructions().append(scrutineeRegister->index());
2377
m_switchContextStack.append(info);
2380
static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2383
ASSERT(node->isNumber());
2384
double value = static_cast<NumberNode*>(node)->value();
2385
int32_t key = static_cast<int32_t>(value);
2386
ASSERT(key == value);
2392
static void prepareJumpTableForImmediateSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2394
jumpTable.min = min;
2395
jumpTable.branchOffsets.resize(max - min + 1);
2396
jumpTable.branchOffsets.fill(0);
2397
for (uint32_t i = 0; i < clauseCount; ++i) {
2398
// We're emitting this after the clause labels should have been fixed, so
2399
// the labels should not be "forward" references
2400
ASSERT(!labels[i]->isForward());
2401
jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2405
static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2408
ASSERT(node->isString());
2409
StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2410
ASSERT(clause->length() == 1);
2412
int32_t key = (*clause)[0];
2418
static void prepareJumpTableForCharacterSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2420
jumpTable.min = min;
2421
jumpTable.branchOffsets.resize(max - min + 1);
2422
jumpTable.branchOffsets.fill(0);
2423
for (uint32_t i = 0; i < clauseCount; ++i) {
2424
// We're emitting this after the clause labels should have been fixed, so
2425
// the labels should not be "forward" references
2426
ASSERT(!labels[i]->isForward());
2427
jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2431
static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2433
for (uint32_t i = 0; i < clauseCount; ++i) {
2434
// We're emitting this after the clause labels should have been fixed, so
2435
// the labels should not be "forward" references
2436
ASSERT(!labels[i]->isForward());
2438
ASSERT(nodes[i]->isString());
2439
StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2440
jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2444
void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2446
SwitchInfo switchInfo = m_switchContextStack.last();
2447
m_switchContextStack.removeLast();
2448
if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2449
instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2450
instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2452
UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2453
prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2454
} else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2455
instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2456
instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2458
UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2459
prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2461
ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2462
instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2463
instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2465
UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2466
prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2470
RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2472
// It would be nice to do an even better job of identifying exactly where the expression is.
2473
// And we could make the caller pass the node pointer in, if there was some way of getting
2474
// that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2475
// is still good enough to get us an accurate line number.
2476
m_expressionTooDeep = true;
2477
return newTemporary();
2480
void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2482
m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2485
bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2487
RegisterID* registerID = resolve(ident).local();
2488
if (!registerID || registerID->index() >= 0)
2490
return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2493
void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2495
if (!isStrictMode())
2497
emitOpcode(op_throw_static_error);
2498
instructions().append(addConstantValue(jsString(globalData(), StrictModeReadonlyPropertyWriteError))->index());
2499
instructions().append(false);