2
* Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
4
* Redistribution and use in source and binary forms, with or without
5
* modification, are permitted provided that the following conditions
7
* 1. Redistributions of source code must retain the above copyright
8
* notice, this list of conditions and the following disclaimer.
9
* 2. Redistributions in binary form must reproduce the above copyright
10
* notice, this list of conditions and the following disclaimer in the
11
* documentation and/or other materials provided with the distribution.
13
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#include "DFGRepatch.h"
31
#include "DFGCCallHelpers.h"
32
#include "DFGScratchRegisterAllocator.h"
33
#include "DFGSpeculativeJIT.h"
34
#include "DFGThunks.h"
35
#include "GCAwareJITStubRoutine.h"
36
#include "LinkBuffer.h"
37
#include "Operations.h"
38
#include "PolymorphicPutByIdList.h"
39
#include "RepatchBuffer.h"
40
#include <wtf/StringPrintStream.h>
42
namespace JSC { namespace DFG {
44
static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
46
RepatchBuffer repatchBuffer(codeblock);
47
repatchBuffer.relink(call, newCalleeFunction);
50
static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
52
RepatchBuffer repatchBuffer(codeBlock);
54
// Only optimize once!
55
repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
57
// Patch the structure check & the offset of the load.
58
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
59
repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
62
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
64
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
65
#elif USE(JSVALUE32_64)
67
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
68
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
70
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
71
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
76
static void addStructureTransitionCheck(
77
JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
78
MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
80
if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
81
structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
82
#if DFG_ENABLE(JIT_ASSERT)
83
// If we execute this code, the object must have the structure we expect. Assert
84
// this in debug modes.
85
jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
86
MacroAssembler::Jump ok = jit.branchPtr(
87
MacroAssembler::Equal,
88
MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
89
MacroAssembler::TrustedImmPtr(structure));
96
jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
99
MacroAssembler::NotEqual,
100
MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
101
MacroAssembler::TrustedImmPtr(structure)));
104
static void addStructureTransitionCheck(
105
JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
106
MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
108
if (prototype.isNull())
111
ASSERT(prototype.isCell());
113
addStructureTransitionCheck(
114
prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
115
failureCases, scratchGPR);
118
static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
120
if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
121
repatchBuffer.replaceWithJump(
122
RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(
123
stubInfo.callReturnLocation.dataLabelPtrAtOffset(
124
-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)),
125
CodeLocationLabel(target));
129
repatchBuffer.relink(
130
stubInfo.callReturnLocation.jumpAtOffset(
131
stubInfo.patch.dfg.deltaCallToStructCheck),
132
CodeLocationLabel(target));
135
static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
137
if (needToRestoreScratch) {
138
stubJit.pop(scratchGPR);
140
success = stubJit.jump();
142
// link failure cases here, so we can pop scratchGPR, and then jump back.
143
failureCases.link(&stubJit);
145
stubJit.pop(scratchGPR);
147
fail = stubJit.jump();
151
success = stubJit.jump();
154
static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
156
patchBuffer.link(success, successLabel);
158
if (needToRestoreScratch) {
159
patchBuffer.link(fail, slowCaseBegin);
163
// link failure cases directly back to normal path
164
patchBuffer.link(failureCases, slowCaseBegin);
167
static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
169
linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
172
static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
174
JSGlobalData* globalData = &exec->globalData();
176
MacroAssembler stubJit;
178
GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
179
#if USE(JSVALUE32_64)
180
GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
182
GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
183
GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
184
bool needToRestoreScratch = false;
186
if (scratchGPR == InvalidGPRReg) {
188
scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
190
scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
192
stubJit.push(scratchGPR);
193
needToRestoreScratch = true;
196
MacroAssembler::JumpList failureCases;
198
failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
200
Structure* currStructure = structure;
201
WriteBarrier<Structure>* it = chain->head();
202
JSObject* protoObject = 0;
203
for (unsigned i = 0; i < count; ++i, ++it) {
204
protoObject = asObject(currStructure->prototypeForLookup(exec));
205
addStructureTransitionCheck(
206
protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit,
207
failureCases, scratchGPR);
208
currStructure = it->get();
211
if (isInlineOffset(offset)) {
213
stubJit.load64(protoObject->locationForOffset(offset), resultGPR);
214
#elif USE(JSVALUE32_64)
215
stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
216
stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
217
stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
220
stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR);
222
stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
223
#elif USE(JSVALUE32_64)
224
stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
225
stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
229
MacroAssembler::Jump success, fail;
231
emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
233
LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
235
linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
237
stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
239
("DFG prototype chain access stub for %s, return point %p",
240
toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
243
static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
245
// FIXME: Write a test that proves we need to check for recursion here just
246
// like the interpreter does, then add a check for recursion.
248
CodeBlock* codeBlock = exec->codeBlock();
249
JSGlobalData* globalData = &exec->globalData();
251
if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
252
GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
253
#if USE(JSVALUE32_64)
254
GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
256
GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
257
GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
258
bool needToRestoreScratch = false;
260
MacroAssembler stubJit;
262
if (scratchGPR == InvalidGPRReg) {
264
scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
266
scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
268
stubJit.push(scratchGPR);
269
needToRestoreScratch = true;
272
MacroAssembler::JumpList failureCases;
274
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR);
275
stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR);
276
failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
277
failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
279
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
280
stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
281
failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
284
stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
285
#elif USE(JSVALUE32_64)
286
stubJit.move(scratchGPR, resultGPR);
287
stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
290
MacroAssembler::Jump success, fail;
292
emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
294
LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
296
linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
298
stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
300
("DFG GetById array length stub for %s, return point %p",
301
toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
302
stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
304
RepatchBuffer repatchBuffer(codeBlock);
305
replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
306
repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
311
// FIXME: should support length access for String.
313
// FIXME: Cache property access for immediates.
314
if (!baseValue.isCell())
316
JSCell* baseCell = baseValue.asCell();
317
Structure* structure = baseCell->structure();
318
if (!slot.isCacheable())
320
if (!structure->propertyAccessesAreCacheable())
323
// Optimize self access.
324
if (slot.slotBase() == baseValue) {
325
if ((slot.cachedPropertyType() != PropertySlot::Value)
326
|| !MacroAssembler::isCompactPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset()))) {
327
dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
331
dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
332
stubInfo.initGetByIdSelf(*globalData, codeBlock->ownerExecutable(), structure);
336
if (structure->isDictionary())
339
// FIXME: optimize getters and setters
340
if (slot.cachedPropertyType() != PropertySlot::Value)
343
PropertyOffset offset = slot.cachedOffset();
344
size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
345
if (count == InvalidPrototypeChain)
348
StructureChain* prototypeChain = structure->prototypeChain(exec);
350
ASSERT(slot.slotBase().isObject());
352
generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
354
RepatchBuffer repatchBuffer(codeBlock);
355
replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
356
repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList);
358
stubInfo.initGetByIdChain(*globalData, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
362
void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
364
bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
366
dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
369
static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
371
if (!baseValue.isCell()
372
|| !slot.isCacheable()
373
|| baseValue.asCell()->structure()->isUncacheableDictionary()
374
|| slot.slotBase() != baseValue)
377
if (!stubInfo.patch.dfg.registersFlushed) {
378
// We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
379
// non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
380
// if registers were not flushed, don't do non-Value caching.
381
if (slot.cachedPropertyType() != PropertySlot::Value)
385
CodeBlock* codeBlock = exec->codeBlock();
386
JSCell* baseCell = baseValue.asCell();
387
Structure* structure = baseCell->structure();
388
JSGlobalData* globalData = &exec->globalData();
390
ASSERT(slot.slotBase().isObject());
392
PolymorphicAccessStructureList* polymorphicStructureList;
395
if (stubInfo.accessType == access_unset) {
396
ASSERT(!stubInfo.stubRoutine);
397
polymorphicStructureList = new PolymorphicAccessStructureList();
398
stubInfo.initGetByIdSelfList(polymorphicStructureList, 0);
400
} else if (stubInfo.accessType == access_get_by_id_self) {
401
ASSERT(!stubInfo.stubRoutine);
402
polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
403
stubInfo.initGetByIdSelfList(polymorphicStructureList, 1);
406
polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
407
listIndex = stubInfo.u.getByIdSelfList.listSize;
410
if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
411
stubInfo.u.getByIdSelfList.listSize++;
413
GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
414
#if USE(JSVALUE32_64)
415
GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
417
GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
418
GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
420
CCallHelpers stubJit(globalData, codeBlock);
422
MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
424
// The strategy we use for stubs is as follows:
425
// 1) Call DFG helper that calls the getter.
426
// 2) Check if there was an exception, and if there was, call yet another
429
bool isDirect = false;
430
MacroAssembler::Call operationCall;
431
MacroAssembler::Call handlerCall;
432
FunctionPtr operationFunction;
433
MacroAssembler::Jump success;
435
if (slot.cachedPropertyType() == PropertySlot::Getter
436
|| slot.cachedPropertyType() == PropertySlot::Custom) {
437
if (slot.cachedPropertyType() == PropertySlot::Getter) {
438
ASSERT(scratchGPR != InvalidGPRReg);
439
ASSERT(baseGPR != scratchGPR);
440
if (isInlineOffset(slot.cachedOffset())) {
442
stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
444
stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
447
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
449
stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
451
stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
454
stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
455
operationFunction = operationCallGetter;
457
stubJit.setupArgumentsWithExecState(
459
MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
460
MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident)));
461
operationFunction = operationCallCustomGetter;
464
// Need to make sure that whenever this call is made in the future, we remember the
465
// place that we made it from. It just so happens to be the place that we are at
468
MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()),
469
CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
471
operationCall = stubJit.call();
473
stubJit.move(GPRInfo::returnValueGPR, resultGPR);
475
stubJit.setupResults(resultGPR, resultTagGPR);
477
success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
479
stubJit.setupArgumentsWithExecState(
480
MacroAssembler::TrustedImmPtr(&stubInfo));
481
handlerCall = stubJit.call();
482
stubJit.jump(GPRInfo::returnValueGPR2);
484
if (isInlineOffset(slot.cachedOffset())) {
486
stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
488
if (baseGPR == resultTagGPR) {
489
stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
490
stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
492
stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
493
stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
497
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
499
stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
501
stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
502
stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
505
success = stubJit.jump();
509
LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
511
CodeLocationLabel lastProtoBegin;
513
lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
515
lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
516
ASSERT(!!lastProtoBegin);
518
patchBuffer.link(wrongStruct, lastProtoBegin);
519
patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
521
patchBuffer.link(operationCall, operationFunction);
522
patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
525
PassRefPtr<JITStubRoutine> tmp =
526
createJITStubRoutine(
529
("DFG GetById polymorphic list access for %s, return point %p",
530
toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
531
stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
533
codeBlock->ownerExecutable(),
534
slot.cachedPropertyType() == PropertySlot::Getter
535
|| slot.cachedPropertyType() == PropertySlot::Custom);
536
RefPtr<JITStubRoutine> stubRoutine = tmp;
538
polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
540
RepatchBuffer repatchBuffer(codeBlock);
541
repatchBuffer.relink(
542
stubInfo.callReturnLocation.jumpAtOffset(
543
stubInfo.patch.dfg.deltaCallToStructCheck),
544
CodeLocationLabel(stubRoutine->code().code()));
546
if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
553
void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
555
bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
557
dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
560
static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
562
if (!baseValue.isCell()
563
|| !slot.isCacheable()
564
|| baseValue.asCell()->structure()->isDictionary()
565
|| baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
566
|| slot.slotBase() == baseValue
567
|| slot.cachedPropertyType() != PropertySlot::Value)
570
ASSERT(slot.slotBase().isObject());
572
PropertyOffset offset = slot.cachedOffset();
573
size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
574
if (count == InvalidPrototypeChain)
577
Structure* structure = baseValue.asCell()->structure();
578
StructureChain* prototypeChain = structure->prototypeChain(exec);
579
CodeBlock* codeBlock = exec->codeBlock();
580
JSGlobalData* globalData = &exec->globalData();
582
PolymorphicAccessStructureList* polymorphicStructureList;
585
if (stubInfo.accessType == access_get_by_id_chain) {
586
ASSERT(!!stubInfo.stubRoutine);
587
polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
588
stubInfo.stubRoutine.clear();
589
stubInfo.initGetByIdProtoList(polymorphicStructureList, 1);
591
ASSERT(stubInfo.accessType == access_get_by_id_proto_list);
592
polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList;
593
listIndex = stubInfo.u.getByIdProtoList.listSize;
596
if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
597
stubInfo.u.getByIdProtoList.listSize++;
599
CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
600
ASSERT(!!lastProtoBegin);
602
RefPtr<JITStubRoutine> stubRoutine;
604
generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine);
606
polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, true);
608
RepatchBuffer repatchBuffer(codeBlock);
609
replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
611
if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
618
void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
620
bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo);
622
dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
625
static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
627
if (slot.isStrictMode()) {
628
if (putKind == Direct)
629
return operationPutByIdDirectStrict;
630
return operationPutByIdStrict;
632
if (putKind == Direct)
633
return operationPutByIdDirectNonStrict;
634
return operationPutByIdNonStrict;
637
static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
639
if (slot.isStrictMode()) {
640
if (putKind == Direct)
641
return operationPutByIdDirectStrictBuildList;
642
return operationPutByIdStrictBuildList;
644
if (putKind == Direct)
645
return operationPutByIdDirectNonStrictBuildList;
646
return operationPutByIdNonStrictBuildList;
649
static void emitPutReplaceStub(
653
const PutPropertySlot& slot,
654
StructureStubInfo& stubInfo,
656
Structure* structure,
657
CodeLocationLabel failureLabel,
658
RefPtr<JITStubRoutine>& stubRoutine)
660
JSGlobalData* globalData = &exec->globalData();
661
GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
662
#if USE(JSVALUE32_64)
663
GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
665
GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
666
GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
667
bool needToRestoreScratch = false;
668
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
670
const bool writeBarrierNeeded = true;
672
const bool writeBarrierNeeded = false;
675
MacroAssembler stubJit;
677
if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
679
scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
681
scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR);
683
needToRestoreScratch = true;
684
stubJit.push(scratchGPR);
687
MacroAssembler::Jump badStructure = stubJit.branchPtr(
688
MacroAssembler::NotEqual,
689
MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
690
MacroAssembler::TrustedImmPtr(structure));
692
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
694
scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
696
scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR);
698
stubJit.push(scratchGPR2);
699
SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
700
stubJit.pop(scratchGPR2);
704
if (isInlineOffset(slot.cachedOffset()))
705
stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
707
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
708
stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
710
#elif USE(JSVALUE32_64)
711
if (isInlineOffset(slot.cachedOffset())) {
712
stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
713
stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
715
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
716
stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
717
stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
721
MacroAssembler::Jump success;
722
MacroAssembler::Jump failure;
724
if (needToRestoreScratch) {
725
stubJit.pop(scratchGPR);
726
success = stubJit.jump();
728
badStructure.link(&stubJit);
729
stubJit.pop(scratchGPR);
730
failure = stubJit.jump();
732
success = stubJit.jump();
733
failure = badStructure;
736
LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
737
patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
738
patchBuffer.link(failure, failureLabel);
740
stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
742
("DFG PutById replace stub for %s, return point %p",
743
toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
744
stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
747
static void emitPutTransitionStub(
751
const PutPropertySlot& slot,
752
StructureStubInfo& stubInfo,
754
Structure* structure,
755
Structure* oldStructure,
756
StructureChain* prototypeChain,
757
CodeLocationLabel failureLabel,
758
RefPtr<JITStubRoutine>& stubRoutine)
760
JSGlobalData* globalData = &exec->globalData();
762
GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
763
#if USE(JSVALUE32_64)
764
GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
766
GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
768
ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters);
769
allocator.lock(baseGPR);
770
#if USE(JSVALUE32_64)
771
allocator.lock(valueTagGPR);
773
allocator.lock(valueGPR);
775
CCallHelpers stubJit(globalData);
777
GPRReg scratchGPR1 = allocator.allocateScratchGPR();
778
ASSERT(scratchGPR1 != baseGPR);
779
ASSERT(scratchGPR1 != valueGPR);
781
bool needSecondScratch = false;
782
bool needThirdScratch = false;
783
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
784
needSecondScratch = true;
786
if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
787
&& oldStructure->outOfLineCapacity()) {
788
needSecondScratch = true;
789
needThirdScratch = true;
793
if (needSecondScratch) {
794
scratchGPR2 = allocator.allocateScratchGPR();
795
ASSERT(scratchGPR2 != baseGPR);
796
ASSERT(scratchGPR2 != valueGPR);
797
ASSERT(scratchGPR2 != scratchGPR1);
799
scratchGPR2 = InvalidGPRReg;
801
if (needThirdScratch) {
802
scratchGPR3 = allocator.allocateScratchGPR();
803
ASSERT(scratchGPR3 != baseGPR);
804
ASSERT(scratchGPR3 != valueGPR);
805
ASSERT(scratchGPR3 != scratchGPR1);
806
ASSERT(scratchGPR3 != scratchGPR2);
808
scratchGPR3 = InvalidGPRReg;
810
allocator.preserveReusedRegistersByPushing(stubJit);
812
MacroAssembler::JumpList failureCases;
814
ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
816
failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
818
addStructureTransitionCheck(
819
oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
822
if (putKind == NotDirect) {
823
for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
824
addStructureTransitionCheck(
825
(*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
830
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
831
ASSERT(needSecondScratch);
832
ASSERT(scratchGPR2 != InvalidGPRReg);
833
// Must always emit this write barrier as the structure transition itself requires it
834
SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess);
837
MacroAssembler::JumpList slowPath;
839
bool scratchGPR1HasStorage = false;
841
if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
842
size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
843
CopiedAllocator* copiedAllocator = &globalData->heap.storageAllocator();
845
if (!oldStructure->outOfLineCapacity()) {
846
stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
847
slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
848
stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
849
stubJit.negPtr(scratchGPR1);
850
stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
851
stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
853
size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
854
ASSERT(newSize > oldSize);
856
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
857
stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
858
slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
859
stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
860
stubJit.negPtr(scratchGPR1);
861
stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
862
stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
863
// We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
864
for (ptrdiff_t offset = 0; offset < static_cast<ptrdiff_t>(oldSize); offset += sizeof(void*)) {
865
stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
866
stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*))));
870
stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
871
scratchGPR1HasStorage = true;
874
stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
876
if (isInlineOffset(slot.cachedOffset()))
877
stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
879
if (!scratchGPR1HasStorage)
880
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
881
stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
883
#elif USE(JSVALUE32_64)
884
if (isInlineOffset(slot.cachedOffset())) {
885
stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
886
stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
888
if (!scratchGPR1HasStorage)
889
stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
890
stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
891
stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
895
MacroAssembler::Jump success;
896
MacroAssembler::Jump failure;
898
if (allocator.didReuseRegisters()) {
899
allocator.restoreReusedRegistersByPopping(stubJit);
900
success = stubJit.jump();
902
failureCases.link(&stubJit);
903
allocator.restoreReusedRegistersByPopping(stubJit);
904
failure = stubJit.jump();
906
success = stubJit.jump();
908
MacroAssembler::Call operationCall;
909
MacroAssembler::Jump successInSlowPath;
911
if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
912
slowPath.link(&stubJit);
914
allocator.restoreReusedRegistersByPopping(stubJit);
915
ScratchBuffer* scratchBuffer = globalData->scratchBufferForSize(allocator.desiredScratchBufferSize());
916
allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
918
stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
920
stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
922
operationCall = stubJit.call();
923
allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
924
successInSlowPath = stubJit.jump();
927
LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
928
patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
929
if (allocator.didReuseRegisters())
930
patchBuffer.link(failure, failureLabel);
932
patchBuffer.link(failureCases, failureLabel);
933
if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
934
patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
935
patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
939
createJITStubRoutine(
942
("DFG PutById %stransition stub (%p -> %p) for %s, return point %p",
943
structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
944
oldStructure, structure,
945
toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
946
stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
948
exec->codeBlock()->ownerExecutable(),
949
structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
953
static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
955
CodeBlock* codeBlock = exec->codeBlock();
956
JSGlobalData* globalData = &exec->globalData();
958
if (!baseValue.isCell())
960
JSCell* baseCell = baseValue.asCell();
961
Structure* structure = baseCell->structure();
962
Structure* oldStructure = structure->previousID();
964
if (!slot.isCacheable())
966
if (structure->isUncacheableDictionary())
969
// Optimize self access.
970
if (slot.base() == baseValue) {
971
if (slot.type() == PutPropertySlot::NewProperty) {
972
if (structure->isDictionary())
975
// Skip optimizing the case where we need a realloc, if we don't have
976
// enough registers to make it happen.
977
if (GPRInfo::numberOfRegisters < 6
978
&& oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
979
&& oldStructure->outOfLineCapacity())
982
// Skip optimizing the case where we need realloc, and the structure has
984
if (hasIndexingHeader(oldStructure->indexingType()))
987
if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
990
StructureChain* prototypeChain = structure->prototypeChain(exec);
992
emitPutTransitionStub(
993
exec, baseValue, ident, slot, stubInfo, putKind,
994
structure, oldStructure, prototypeChain,
995
stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
996
stubInfo.stubRoutine);
998
RepatchBuffer repatchBuffer(codeBlock);
999
repatchBuffer.relink(
1000
stubInfo.callReturnLocation.jumpAtOffset(
1001
stubInfo.patch.dfg.deltaCallToStructCheck),
1002
CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1003
repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1005
stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1010
dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1011
stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure);
1018
void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1020
bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1022
dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1025
static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1027
CodeBlock* codeBlock = exec->codeBlock();
1028
JSGlobalData* globalData = &exec->globalData();
1030
if (!baseValue.isCell())
1032
JSCell* baseCell = baseValue.asCell();
1033
Structure* structure = baseCell->structure();
1034
Structure* oldStructure = structure->previousID();
1036
if (!slot.isCacheable())
1038
if (structure->isUncacheableDictionary())
1041
// Optimize self access.
1042
if (slot.base() == baseValue) {
1043
PolymorphicPutByIdList* list;
1044
RefPtr<JITStubRoutine> stubRoutine;
1046
if (slot.type() == PutPropertySlot::NewProperty) {
1047
if (structure->isDictionary())
1050
// Skip optimizing the case where we need a realloc, if we don't have
1051
// enough registers to make it happen.
1052
if (GPRInfo::numberOfRegisters < 6
1053
&& oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1054
&& oldStructure->outOfLineCapacity())
1057
// Skip optimizing the case where we need realloc, and the structure has
1058
// indexing storage.
1059
if (hasIndexingHeader(oldStructure->indexingType()))
1062
if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1065
StructureChain* prototypeChain = structure->prototypeChain(exec);
1067
// We're now committed to creating the stub. Mogrify the meta-data accordingly.
1068
list = PolymorphicPutByIdList::from(
1070
stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
1072
emitPutTransitionStub(
1073
exec, baseValue, propertyName, slot, stubInfo, putKind,
1074
structure, oldStructure, prototypeChain,
1075
CodeLocationLabel(list->currentSlowPathTarget()),
1079
PutByIdAccess::transition(
1080
*globalData, codeBlock->ownerExecutable(),
1081
oldStructure, structure, prototypeChain,
1084
// We're now committed to creating the stub. Mogrify the meta-data accordingly.
1085
list = PolymorphicPutByIdList::from(
1087
stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
1090
exec, baseValue, propertyName, slot, stubInfo, putKind,
1091
structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1094
PutByIdAccess::replace(
1095
*globalData, codeBlock->ownerExecutable(),
1096
structure, stubRoutine));
1099
RepatchBuffer repatchBuffer(codeBlock);
1100
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code()));
1103
repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1111
void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1113
bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1115
dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1118
static void linkSlowFor(RepatchBuffer& repatchBuffer, JSGlobalData* globalData, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
1120
if (kind == CodeForCall) {
1121
repatchBuffer.relink(callLinkInfo.callReturnLocation, globalData->getCTIStub(virtualCallThunkGenerator).code());
1124
ASSERT(kind == CodeForConstruct);
1125
repatchBuffer.relink(callLinkInfo.callReturnLocation, globalData->getCTIStub(virtualConstructThunkGenerator).code());
1128
void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
1130
ASSERT(!callLinkInfo.stub);
1132
CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1133
JSGlobalData* globalData = callerCodeBlock->globalData();
1135
RepatchBuffer repatchBuffer(callerCodeBlock);
1137
ASSERT(!callLinkInfo.isLinked());
1138
callLinkInfo.callee.set(exec->callerFrame()->globalData(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1139
callLinkInfo.lastSeenCallee.set(exec->callerFrame()->globalData(), callerCodeBlock->ownerExecutable(), callee);
1140
repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1142
if (calleeCodeBlock)
1143
calleeCodeBlock->linkIncomingCall(&callLinkInfo);
1145
if (kind == CodeForCall) {
1146
repatchBuffer.relink(callLinkInfo.callReturnLocation, globalData->getCTIStub(linkClosureCallThunkGenerator).code());
1150
ASSERT(kind == CodeForConstruct);
1151
linkSlowFor(repatchBuffer, globalData, callLinkInfo, CodeForConstruct);
1154
void dfgLinkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
1156
CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1157
JSGlobalData* globalData = callerCodeBlock->globalData();
1159
RepatchBuffer repatchBuffer(callerCodeBlock);
1161
linkSlowFor(repatchBuffer, globalData, callLinkInfo, kind);
1164
void dfgLinkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr)
1166
ASSERT(!callLinkInfo.stub);
1168
CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1169
JSGlobalData* globalData = callerCodeBlock->globalData();
1171
GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1173
CCallHelpers stubJit(globalData, callerCodeBlock);
1175
CCallHelpers::JumpList slowPath;
1179
stubJit.branchTest64(
1180
CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
1182
// We would have already checked that the callee is a cell.
1187
CCallHelpers::NotEqual,
1188
CCallHelpers::Address(calleeGPR, JSCell::structureOffset()),
1189
CCallHelpers::TrustedImmPtr(structure)));
1193
CCallHelpers::NotEqual,
1194
CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1195
CCallHelpers::TrustedImmPtr(executable)));
1198
CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1199
GPRInfo::returnValueGPR);
1203
GPRInfo::returnValueGPR,
1204
CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain)));
1207
GPRInfo::returnValueGPR,
1208
CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1210
CCallHelpers::TrustedImm32(JSValue::CellTag),
1211
CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1214
JITCompiler::Call call = stubJit.nearCall();
1215
JITCompiler::Jump done = stubJit.jump();
1217
slowPath.link(&stubJit);
1218
stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2);
1219
stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2);
1220
stubJit.move(calleeGPR, GPRInfo::nonArgGPR0);
1221
#if USE(JSVALUE32_64)
1222
stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1);
1224
JITCompiler::Jump slow = stubJit.jump();
1226
LinkBuffer patchBuffer(*globalData, &stubJit, callerCodeBlock);
1228
patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1229
patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1230
patchBuffer.link(slow, CodeLocationLabel(globalData->getCTIStub(virtualCallThunkGenerator).code()));
1232
PassRefPtr<ClosureCallStubRoutine> tmp = adoptRef(new ClosureCallStubRoutine(
1235
("DFG closure call stub for %s, return point %p, target %p (%s)",
1236
toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1237
codePtr.executableAddress(), toCString(*calleeCodeBlock).data())),
1238
*globalData, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1239
RefPtr<ClosureCallStubRoutine> stubRoutine = tmp;
1241
RepatchBuffer repatchBuffer(callerCodeBlock);
1243
repatchBuffer.replaceWithJump(
1244
RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1245
CodeLocationLabel(stubRoutine->code().code()));
1246
linkSlowFor(repatchBuffer, globalData, callLinkInfo, CodeForCall);
1248
callLinkInfo.stub = stubRoutine.release();
1250
ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1253
void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1255
repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
1256
CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
1257
if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
1258
repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
1259
RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
1260
MacroAssembler::Address(
1261
static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
1262
JSCell::structureOffset()),
1263
reinterpret_cast<void*>(-1));
1265
repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(-1));
1267
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
1269
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
1270
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
1272
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
1275
void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1277
V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
1278
V_DFGOperation_EJCI optimizedFunction;
1279
if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1280
optimizedFunction = operationPutByIdStrictOptimize;
1281
else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1282
optimizedFunction = operationPutByIdNonStrictOptimize;
1283
else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1284
optimizedFunction = operationPutByIdDirectStrictOptimize;
1286
ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1287
optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1289
repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
1290
CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
1291
if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
1292
repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
1293
RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
1294
MacroAssembler::Address(
1295
static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
1296
JSCell::structureOffset()),
1297
reinterpret_cast<void*>(-1));
1299
repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(-1));
1301
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
1303
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
1304
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
1306
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
1309
} } // namespace JSC::DFG