~ubuntu-branches/ubuntu/raring/qtwebkit-source/raring-proposed

« back to all changes in this revision

Viewing changes to Source/JavaScriptCore/dfg/DFGRepatch.cpp

  • Committer: Package Import Robot
  • Author(s): Jonathan Riddell
  • Date: 2013-02-18 14:24:18 UTC
  • Revision ID: package-import@ubuntu.com-20130218142418-eon0jmjg3nj438uy
Tags: upstream-2.3
ImportĀ upstreamĀ versionĀ 2.3

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
/*
 
2
 * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
 
3
 *
 
4
 * Redistribution and use in source and binary forms, with or without
 
5
 * modification, are permitted provided that the following conditions
 
6
 * are met:
 
7
 * 1. Redistributions of source code must retain the above copyright
 
8
 *    notice, this list of conditions and the following disclaimer.
 
9
 * 2. Redistributions in binary form must reproduce the above copyright
 
10
 *    notice, this list of conditions and the following disclaimer in the
 
11
 *    documentation and/or other materials provided with the distribution.
 
12
 *
 
13
 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 
14
 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 
15
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 
16
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 
17
 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 
18
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 
19
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 
20
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 
21
 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 
22
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 
23
 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 
24
 */
 
25
 
 
26
#include "config.h"
 
27
#include "DFGRepatch.h"
 
28
 
 
29
#if ENABLE(DFG_JIT)
 
30
 
 
31
#include "DFGCCallHelpers.h"
 
32
#include "DFGScratchRegisterAllocator.h"
 
33
#include "DFGSpeculativeJIT.h"
 
34
#include "DFGThunks.h"
 
35
#include "GCAwareJITStubRoutine.h"
 
36
#include "LinkBuffer.h"
 
37
#include "Operations.h"
 
38
#include "PolymorphicPutByIdList.h"
 
39
#include "RepatchBuffer.h"
 
40
#include <wtf/StringPrintStream.h>
 
41
 
 
42
namespace JSC { namespace DFG {
 
43
 
 
44
static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
 
45
{
 
46
    RepatchBuffer repatchBuffer(codeblock);
 
47
    repatchBuffer.relink(call, newCalleeFunction);
 
48
}
 
49
 
 
50
static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
 
51
{
 
52
    RepatchBuffer repatchBuffer(codeBlock);
 
53
 
 
54
    // Only optimize once!
 
55
    repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
 
56
 
 
57
    // Patch the structure check & the offset of the load.
 
58
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
 
59
    repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
 
60
#if USE(JSVALUE64)
 
61
    if (compact)
 
62
        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
 
63
    else
 
64
        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
 
65
#elif USE(JSVALUE32_64)
 
66
    if (compact) {
 
67
        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
 
68
        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
 
69
    } else {
 
70
        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
 
71
        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
 
72
    }
 
73
#endif
 
74
}
 
75
 
 
76
static void addStructureTransitionCheck(
 
77
    JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
 
78
    MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
 
79
{
 
80
    if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
 
81
        structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
 
82
#if DFG_ENABLE(JIT_ASSERT)
 
83
        // If we execute this code, the object must have the structure we expect. Assert
 
84
        // this in debug modes.
 
85
        jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
 
86
        MacroAssembler::Jump ok = jit.branchPtr(
 
87
            MacroAssembler::Equal,
 
88
            MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
 
89
            MacroAssembler::TrustedImmPtr(structure));
 
90
        jit.breakpoint();
 
91
        ok.link(&jit);
 
92
#endif
 
93
        return;
 
94
    }
 
95
    
 
96
    jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
 
97
    failureCases.append(
 
98
        jit.branchPtr(
 
99
            MacroAssembler::NotEqual,
 
100
            MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
 
101
            MacroAssembler::TrustedImmPtr(structure)));
 
102
}
 
103
 
 
104
static void addStructureTransitionCheck(
 
105
    JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
 
106
    MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
 
107
{
 
108
    if (prototype.isNull())
 
109
        return;
 
110
    
 
111
    ASSERT(prototype.isCell());
 
112
    
 
113
    addStructureTransitionCheck(
 
114
        prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
 
115
        failureCases, scratchGPR);
 
116
}
 
117
 
 
118
static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
 
119
{
 
120
    if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
 
121
        repatchBuffer.replaceWithJump(
 
122
            RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(
 
123
                stubInfo.callReturnLocation.dataLabelPtrAtOffset(
 
124
                    -(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)),
 
125
            CodeLocationLabel(target));
 
126
        return;
 
127
    }
 
128
    
 
129
    repatchBuffer.relink(
 
130
        stubInfo.callReturnLocation.jumpAtOffset(
 
131
            stubInfo.patch.dfg.deltaCallToStructCheck),
 
132
        CodeLocationLabel(target));
 
133
}
 
134
 
 
135
static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
 
136
{
 
137
    if (needToRestoreScratch) {
 
138
        stubJit.pop(scratchGPR);
 
139
        
 
140
        success = stubJit.jump();
 
141
        
 
142
        // link failure cases here, so we can pop scratchGPR, and then jump back.
 
143
        failureCases.link(&stubJit);
 
144
        
 
145
        stubJit.pop(scratchGPR);
 
146
        
 
147
        fail = stubJit.jump();
 
148
        return;
 
149
    }
 
150
    
 
151
    success = stubJit.jump();
 
152
}
 
153
 
 
154
static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
 
155
{
 
156
    patchBuffer.link(success, successLabel);
 
157
        
 
158
    if (needToRestoreScratch) {
 
159
        patchBuffer.link(fail, slowCaseBegin);
 
160
        return;
 
161
    }
 
162
    
 
163
    // link failure cases directly back to normal path
 
164
    patchBuffer.link(failureCases, slowCaseBegin);
 
165
}
 
166
 
 
167
static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
 
168
{
 
169
    linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
 
170
}
 
171
 
 
172
static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
 
173
{
 
174
    JSGlobalData* globalData = &exec->globalData();
 
175
 
 
176
    MacroAssembler stubJit;
 
177
        
 
178
    GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
 
179
#if USE(JSVALUE32_64)
 
180
    GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
 
181
#endif
 
182
    GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
 
183
    GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
 
184
    bool needToRestoreScratch = false;
 
185
    
 
186
    if (scratchGPR == InvalidGPRReg) {
 
187
#if USE(JSVALUE64)
 
188
        scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
 
189
#else
 
190
        scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
 
191
#endif
 
192
        stubJit.push(scratchGPR);
 
193
        needToRestoreScratch = true;
 
194
    }
 
195
    
 
196
    MacroAssembler::JumpList failureCases;
 
197
    
 
198
    failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
 
199
    
 
200
    Structure* currStructure = structure;
 
201
    WriteBarrier<Structure>* it = chain->head();
 
202
    JSObject* protoObject = 0;
 
203
    for (unsigned i = 0; i < count; ++i, ++it) {
 
204
        protoObject = asObject(currStructure->prototypeForLookup(exec));
 
205
        addStructureTransitionCheck(
 
206
            protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit,
 
207
            failureCases, scratchGPR);
 
208
        currStructure = it->get();
 
209
    }
 
210
    
 
211
    if (isInlineOffset(offset)) {
 
212
#if USE(JSVALUE64)
 
213
        stubJit.load64(protoObject->locationForOffset(offset), resultGPR);
 
214
#elif USE(JSVALUE32_64)
 
215
        stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
 
216
        stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
 
217
        stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
 
218
#endif
 
219
    } else {
 
220
        stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR);
 
221
#if USE(JSVALUE64)
 
222
        stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
 
223
#elif USE(JSVALUE32_64)
 
224
        stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
 
225
        stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
 
226
#endif
 
227
    }
 
228
 
 
229
    MacroAssembler::Jump success, fail;
 
230
    
 
231
    emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
 
232
    
 
233
    LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
 
234
    
 
235
    linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
 
236
    
 
237
    stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
 
238
        patchBuffer,
 
239
        ("DFG prototype chain access stub for %s, return point %p",
 
240
            toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
 
241
}
 
242
 
 
243
static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
 
244
{
 
245
    // FIXME: Write a test that proves we need to check for recursion here just
 
246
    // like the interpreter does, then add a check for recursion.
 
247
 
 
248
    CodeBlock* codeBlock = exec->codeBlock();
 
249
    JSGlobalData* globalData = &exec->globalData();
 
250
    
 
251
    if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
 
252
        GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
 
253
#if USE(JSVALUE32_64)
 
254
        GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
 
255
#endif
 
256
        GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
 
257
        GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
 
258
        bool needToRestoreScratch = false;
 
259
        
 
260
        MacroAssembler stubJit;
 
261
        
 
262
        if (scratchGPR == InvalidGPRReg) {
 
263
#if USE(JSVALUE64)
 
264
            scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
 
265
#else
 
266
            scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
 
267
#endif
 
268
            stubJit.push(scratchGPR);
 
269
            needToRestoreScratch = true;
 
270
        }
 
271
        
 
272
        MacroAssembler::JumpList failureCases;
 
273
       
 
274
        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR); 
 
275
        stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR);
 
276
        failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
 
277
        failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
 
278
        
 
279
        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
 
280
        stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
 
281
        failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
 
282
 
 
283
#if USE(JSVALUE64)
 
284
        stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
 
285
#elif USE(JSVALUE32_64)
 
286
        stubJit.move(scratchGPR, resultGPR);
 
287
        stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
 
288
#endif
 
289
 
 
290
        MacroAssembler::Jump success, fail;
 
291
        
 
292
        emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
 
293
        
 
294
        LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
 
295
        
 
296
        linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
 
297
        
 
298
        stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
 
299
            patchBuffer,
 
300
            ("DFG GetById array length stub for %s, return point %p",
 
301
                toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
 
302
                    stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
 
303
        
 
304
        RepatchBuffer repatchBuffer(codeBlock);
 
305
        replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
 
306
        repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
 
307
        
 
308
        return true;
 
309
    }
 
310
    
 
311
    // FIXME: should support length access for String.
 
312
 
 
313
    // FIXME: Cache property access for immediates.
 
314
    if (!baseValue.isCell())
 
315
        return false;
 
316
    JSCell* baseCell = baseValue.asCell();
 
317
    Structure* structure = baseCell->structure();
 
318
    if (!slot.isCacheable())
 
319
        return false;
 
320
    if (!structure->propertyAccessesAreCacheable())
 
321
        return false;
 
322
 
 
323
    // Optimize self access.
 
324
    if (slot.slotBase() == baseValue) {
 
325
        if ((slot.cachedPropertyType() != PropertySlot::Value)
 
326
            || !MacroAssembler::isCompactPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset()))) {
 
327
            dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
 
328
            return true;
 
329
        }
 
330
 
 
331
        dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
 
332
        stubInfo.initGetByIdSelf(*globalData, codeBlock->ownerExecutable(), structure);
 
333
        return true;
 
334
    }
 
335
    
 
336
    if (structure->isDictionary())
 
337
        return false;
 
338
    
 
339
    // FIXME: optimize getters and setters
 
340
    if (slot.cachedPropertyType() != PropertySlot::Value)
 
341
        return false;
 
342
    
 
343
    PropertyOffset offset = slot.cachedOffset();
 
344
    size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
 
345
    if (count == InvalidPrototypeChain)
 
346
        return false;
 
347
 
 
348
    StructureChain* prototypeChain = structure->prototypeChain(exec);
 
349
    
 
350
    ASSERT(slot.slotBase().isObject());
 
351
    
 
352
    generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
 
353
    
 
354
    RepatchBuffer repatchBuffer(codeBlock);
 
355
    replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
 
356
    repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList);
 
357
    
 
358
    stubInfo.initGetByIdChain(*globalData, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
 
359
    return true;
 
360
}
 
361
 
 
362
void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
 
363
{
 
364
    bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
 
365
    if (!cached)
 
366
        dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
 
367
}
 
368
 
 
369
static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
 
370
{
 
371
    if (!baseValue.isCell()
 
372
        || !slot.isCacheable()
 
373
        || baseValue.asCell()->structure()->isUncacheableDictionary()
 
374
        || slot.slotBase() != baseValue)
 
375
        return false;
 
376
    
 
377
    if (!stubInfo.patch.dfg.registersFlushed) {
 
378
        // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
 
379
        // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
 
380
        // if registers were not flushed, don't do non-Value caching.
 
381
        if (slot.cachedPropertyType() != PropertySlot::Value)
 
382
            return false;
 
383
    }
 
384
    
 
385
    CodeBlock* codeBlock = exec->codeBlock();
 
386
    JSCell* baseCell = baseValue.asCell();
 
387
    Structure* structure = baseCell->structure();
 
388
    JSGlobalData* globalData = &exec->globalData();
 
389
    
 
390
    ASSERT(slot.slotBase().isObject());
 
391
    
 
392
    PolymorphicAccessStructureList* polymorphicStructureList;
 
393
    int listIndex;
 
394
    
 
395
    if (stubInfo.accessType == access_unset) {
 
396
        ASSERT(!stubInfo.stubRoutine);
 
397
        polymorphicStructureList = new PolymorphicAccessStructureList();
 
398
        stubInfo.initGetByIdSelfList(polymorphicStructureList, 0);
 
399
        listIndex = 0;
 
400
    } else if (stubInfo.accessType == access_get_by_id_self) {
 
401
        ASSERT(!stubInfo.stubRoutine);
 
402
        polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
 
403
        stubInfo.initGetByIdSelfList(polymorphicStructureList, 1);
 
404
        listIndex = 1;
 
405
    } else {
 
406
        polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
 
407
        listIndex = stubInfo.u.getByIdSelfList.listSize;
 
408
    }
 
409
    
 
410
    if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
 
411
        stubInfo.u.getByIdSelfList.listSize++;
 
412
        
 
413
        GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
 
414
#if USE(JSVALUE32_64)
 
415
        GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
 
416
#endif
 
417
        GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
 
418
        GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
 
419
        
 
420
        CCallHelpers stubJit(globalData, codeBlock);
 
421
        
 
422
        MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
 
423
        
 
424
        // The strategy we use for stubs is as follows:
 
425
        // 1) Call DFG helper that calls the getter.
 
426
        // 2) Check if there was an exception, and if there was, call yet another
 
427
        //    helper.
 
428
        
 
429
        bool isDirect = false;
 
430
        MacroAssembler::Call operationCall;
 
431
        MacroAssembler::Call handlerCall;
 
432
        FunctionPtr operationFunction;
 
433
        MacroAssembler::Jump success;
 
434
        
 
435
        if (slot.cachedPropertyType() == PropertySlot::Getter
 
436
            || slot.cachedPropertyType() == PropertySlot::Custom) {
 
437
            if (slot.cachedPropertyType() == PropertySlot::Getter) {
 
438
                ASSERT(scratchGPR != InvalidGPRReg);
 
439
                ASSERT(baseGPR != scratchGPR);
 
440
                if (isInlineOffset(slot.cachedOffset())) {
 
441
#if USE(JSVALUE64)
 
442
                    stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
 
443
#else
 
444
                    stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
 
445
#endif
 
446
                } else {
 
447
                    stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
 
448
#if USE(JSVALUE64)
 
449
                    stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
 
450
#else
 
451
                    stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
 
452
#endif
 
453
                }
 
454
                stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
 
455
                operationFunction = operationCallGetter;
 
456
            } else {
 
457
                stubJit.setupArgumentsWithExecState(
 
458
                    baseGPR,
 
459
                    MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
 
460
                    MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident)));
 
461
                operationFunction = operationCallCustomGetter;
 
462
            }
 
463
            
 
464
            // Need to make sure that whenever this call is made in the future, we remember the
 
465
            // place that we made it from. It just so happens to be the place that we are at
 
466
            // right now!
 
467
            stubJit.store32(
 
468
                MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()),
 
469
                CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
 
470
            
 
471
            operationCall = stubJit.call();
 
472
#if USE(JSVALUE64)
 
473
            stubJit.move(GPRInfo::returnValueGPR, resultGPR);
 
474
#else
 
475
            stubJit.setupResults(resultGPR, resultTagGPR);
 
476
#endif
 
477
            success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
 
478
            
 
479
            stubJit.setupArgumentsWithExecState(
 
480
                MacroAssembler::TrustedImmPtr(&stubInfo));
 
481
            handlerCall = stubJit.call();
 
482
            stubJit.jump(GPRInfo::returnValueGPR2);
 
483
        } else {
 
484
            if (isInlineOffset(slot.cachedOffset())) {
 
485
#if USE(JSVALUE64)
 
486
                stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
 
487
#else
 
488
                if (baseGPR == resultTagGPR) {
 
489
                    stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
 
490
                    stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
 
491
                } else {
 
492
                    stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
 
493
                    stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
 
494
                }
 
495
#endif
 
496
            } else {
 
497
                stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
 
498
#if USE(JSVALUE64)
 
499
                stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
 
500
#else
 
501
                stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
 
502
                stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
 
503
#endif
 
504
            }
 
505
            success = stubJit.jump();
 
506
            isDirect = true;
 
507
        }
 
508
 
 
509
        LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
 
510
        
 
511
        CodeLocationLabel lastProtoBegin;
 
512
        if (listIndex)
 
513
            lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
 
514
        else
 
515
            lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
 
516
        ASSERT(!!lastProtoBegin);
 
517
        
 
518
        patchBuffer.link(wrongStruct, lastProtoBegin);
 
519
        patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
 
520
        if (!isDirect) {
 
521
            patchBuffer.link(operationCall, operationFunction);
 
522
            patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
 
523
        }
 
524
        
 
525
        PassRefPtr<JITStubRoutine> tmp =
 
526
            createJITStubRoutine(
 
527
                FINALIZE_DFG_CODE(
 
528
                    patchBuffer,
 
529
                    ("DFG GetById polymorphic list access for %s, return point %p",
 
530
                        toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
 
531
                            stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
 
532
                *globalData,
 
533
                codeBlock->ownerExecutable(),
 
534
                slot.cachedPropertyType() == PropertySlot::Getter
 
535
                || slot.cachedPropertyType() == PropertySlot::Custom);
 
536
        RefPtr<JITStubRoutine> stubRoutine = tmp;
 
537
        
 
538
        polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
 
539
        
 
540
        RepatchBuffer repatchBuffer(codeBlock);
 
541
        repatchBuffer.relink(
 
542
            stubInfo.callReturnLocation.jumpAtOffset(
 
543
                stubInfo.patch.dfg.deltaCallToStructCheck),
 
544
            CodeLocationLabel(stubRoutine->code().code()));
 
545
        
 
546
        if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
 
547
            return true;
 
548
    }
 
549
    
 
550
    return false;
 
551
}
 
552
 
 
553
void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
 
554
{
 
555
    bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
 
556
    if (!dontChangeCall)
 
557
        dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
 
558
}
 
559
 
 
560
static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
 
561
{
 
562
    if (!baseValue.isCell()
 
563
        || !slot.isCacheable()
 
564
        || baseValue.asCell()->structure()->isDictionary()
 
565
        || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
 
566
        || slot.slotBase() == baseValue
 
567
        || slot.cachedPropertyType() != PropertySlot::Value)
 
568
        return false;
 
569
    
 
570
    ASSERT(slot.slotBase().isObject());
 
571
    
 
572
    PropertyOffset offset = slot.cachedOffset();
 
573
    size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset);
 
574
    if (count == InvalidPrototypeChain)
 
575
        return false;
 
576
 
 
577
    Structure* structure = baseValue.asCell()->structure();
 
578
    StructureChain* prototypeChain = structure->prototypeChain(exec);
 
579
    CodeBlock* codeBlock = exec->codeBlock();
 
580
    JSGlobalData* globalData = &exec->globalData();
 
581
    
 
582
    PolymorphicAccessStructureList* polymorphicStructureList;
 
583
    int listIndex = 1;
 
584
    
 
585
    if (stubInfo.accessType == access_get_by_id_chain) {
 
586
        ASSERT(!!stubInfo.stubRoutine);
 
587
        polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
 
588
        stubInfo.stubRoutine.clear();
 
589
        stubInfo.initGetByIdProtoList(polymorphicStructureList, 1);
 
590
    } else {
 
591
        ASSERT(stubInfo.accessType == access_get_by_id_proto_list);
 
592
        polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList;
 
593
        listIndex = stubInfo.u.getByIdProtoList.listSize;
 
594
    }
 
595
    
 
596
    if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
 
597
        stubInfo.u.getByIdProtoList.listSize++;
 
598
        
 
599
        CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
 
600
        ASSERT(!!lastProtoBegin);
 
601
 
 
602
        RefPtr<JITStubRoutine> stubRoutine;
 
603
        
 
604
        generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine);
 
605
        
 
606
        polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, true);
 
607
        
 
608
        RepatchBuffer repatchBuffer(codeBlock);
 
609
        replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
 
610
        
 
611
        if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
 
612
            return true;
 
613
    }
 
614
    
 
615
    return false;
 
616
}
 
617
 
 
618
void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
 
619
{
 
620
    bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo);
 
621
    if (!dontChangeCall)
 
622
        dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
 
623
}
 
624
 
 
625
static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
 
626
{
 
627
    if (slot.isStrictMode()) {
 
628
        if (putKind == Direct)
 
629
            return operationPutByIdDirectStrict;
 
630
        return operationPutByIdStrict;
 
631
    }
 
632
    if (putKind == Direct)
 
633
        return operationPutByIdDirectNonStrict;
 
634
    return operationPutByIdNonStrict;
 
635
}
 
636
 
 
637
static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
 
638
{
 
639
    if (slot.isStrictMode()) {
 
640
        if (putKind == Direct)
 
641
            return operationPutByIdDirectStrictBuildList;
 
642
        return operationPutByIdStrictBuildList;
 
643
    }
 
644
    if (putKind == Direct)
 
645
        return operationPutByIdDirectNonStrictBuildList;
 
646
    return operationPutByIdNonStrictBuildList;
 
647
}
 
648
 
 
649
static void emitPutReplaceStub(
 
650
    ExecState* exec,
 
651
    JSValue,
 
652
    const Identifier&,
 
653
    const PutPropertySlot& slot,
 
654
    StructureStubInfo& stubInfo,
 
655
    PutKind,
 
656
    Structure* structure,
 
657
    CodeLocationLabel failureLabel,
 
658
    RefPtr<JITStubRoutine>& stubRoutine)
 
659
{
 
660
    JSGlobalData* globalData = &exec->globalData();
 
661
    GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
 
662
#if USE(JSVALUE32_64)
 
663
    GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
 
664
#endif
 
665
    GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
 
666
    GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
 
667
    bool needToRestoreScratch = false;
 
668
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
 
669
    GPRReg scratchGPR2;
 
670
    const bool writeBarrierNeeded = true;
 
671
#else
 
672
    const bool writeBarrierNeeded = false;
 
673
#endif
 
674
    
 
675
    MacroAssembler stubJit;
 
676
    
 
677
    if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
 
678
#if USE(JSVALUE64)
 
679
        scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
 
680
#else
 
681
        scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR);
 
682
#endif
 
683
        needToRestoreScratch = true;
 
684
        stubJit.push(scratchGPR);
 
685
    }
 
686
 
 
687
    MacroAssembler::Jump badStructure = stubJit.branchPtr(
 
688
        MacroAssembler::NotEqual,
 
689
        MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
 
690
        MacroAssembler::TrustedImmPtr(structure));
 
691
    
 
692
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
 
693
#if USE(JSVALUE64)
 
694
    scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
 
695
#else
 
696
    scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR);
 
697
#endif
 
698
    stubJit.push(scratchGPR2);
 
699
    SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
 
700
    stubJit.pop(scratchGPR2);
 
701
#endif
 
702
    
 
703
#if USE(JSVALUE64)
 
704
    if (isInlineOffset(slot.cachedOffset()))
 
705
        stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
 
706
    else {
 
707
        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
 
708
        stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
 
709
    }
 
710
#elif USE(JSVALUE32_64)
 
711
    if (isInlineOffset(slot.cachedOffset())) {
 
712
        stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
 
713
        stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
 
714
    } else {
 
715
        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
 
716
        stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
 
717
        stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
 
718
    }
 
719
#endif
 
720
    
 
721
    MacroAssembler::Jump success;
 
722
    MacroAssembler::Jump failure;
 
723
    
 
724
    if (needToRestoreScratch) {
 
725
        stubJit.pop(scratchGPR);
 
726
        success = stubJit.jump();
 
727
        
 
728
        badStructure.link(&stubJit);
 
729
        stubJit.pop(scratchGPR);
 
730
        failure = stubJit.jump();
 
731
    } else {
 
732
        success = stubJit.jump();
 
733
        failure = badStructure;
 
734
    }
 
735
    
 
736
    LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
 
737
    patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
 
738
    patchBuffer.link(failure, failureLabel);
 
739
            
 
740
    stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
 
741
        patchBuffer,
 
742
        ("DFG PutById replace stub for %s, return point %p",
 
743
            toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
 
744
                stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
 
745
}
 
746
 
 
747
static void emitPutTransitionStub(
 
748
    ExecState* exec,
 
749
    JSValue,
 
750
    const Identifier&,
 
751
    const PutPropertySlot& slot,
 
752
    StructureStubInfo& stubInfo,
 
753
    PutKind putKind,
 
754
    Structure* structure,
 
755
    Structure* oldStructure,
 
756
    StructureChain* prototypeChain,
 
757
    CodeLocationLabel failureLabel,
 
758
    RefPtr<JITStubRoutine>& stubRoutine)
 
759
{
 
760
    JSGlobalData* globalData = &exec->globalData();
 
761
 
 
762
    GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
 
763
#if USE(JSVALUE32_64)
 
764
    GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
 
765
#endif
 
766
    GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
 
767
    
 
768
    ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters);
 
769
    allocator.lock(baseGPR);
 
770
#if USE(JSVALUE32_64)
 
771
    allocator.lock(valueTagGPR);
 
772
#endif
 
773
    allocator.lock(valueGPR);
 
774
    
 
775
    CCallHelpers stubJit(globalData);
 
776
            
 
777
    GPRReg scratchGPR1 = allocator.allocateScratchGPR();
 
778
    ASSERT(scratchGPR1 != baseGPR);
 
779
    ASSERT(scratchGPR1 != valueGPR);
 
780
    
 
781
    bool needSecondScratch = false;
 
782
    bool needThirdScratch = false;
 
783
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
 
784
    needSecondScratch = true;
 
785
#endif
 
786
    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
 
787
        && oldStructure->outOfLineCapacity()) {
 
788
        needSecondScratch = true;
 
789
        needThirdScratch = true;
 
790
    }
 
791
 
 
792
    GPRReg scratchGPR2;
 
793
    if (needSecondScratch) {
 
794
        scratchGPR2 = allocator.allocateScratchGPR();
 
795
        ASSERT(scratchGPR2 != baseGPR);
 
796
        ASSERT(scratchGPR2 != valueGPR);
 
797
        ASSERT(scratchGPR2 != scratchGPR1);
 
798
    } else
 
799
        scratchGPR2 = InvalidGPRReg;
 
800
    GPRReg scratchGPR3;
 
801
    if (needThirdScratch) {
 
802
        scratchGPR3 = allocator.allocateScratchGPR();
 
803
        ASSERT(scratchGPR3 != baseGPR);
 
804
        ASSERT(scratchGPR3 != valueGPR);
 
805
        ASSERT(scratchGPR3 != scratchGPR1);
 
806
        ASSERT(scratchGPR3 != scratchGPR2);
 
807
    } else
 
808
        scratchGPR3 = InvalidGPRReg;
 
809
            
 
810
    allocator.preserveReusedRegistersByPushing(stubJit);
 
811
 
 
812
    MacroAssembler::JumpList failureCases;
 
813
            
 
814
    ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
 
815
    
 
816
    failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
 
817
    
 
818
    addStructureTransitionCheck(
 
819
        oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
 
820
        scratchGPR1);
 
821
            
 
822
    if (putKind == NotDirect) {
 
823
        for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
 
824
            addStructureTransitionCheck(
 
825
                (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
 
826
                scratchGPR1);
 
827
        }
 
828
    }
 
829
 
 
830
#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
 
831
    ASSERT(needSecondScratch);
 
832
    ASSERT(scratchGPR2 != InvalidGPRReg);
 
833
    // Must always emit this write barrier as the structure transition itself requires it
 
834
    SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess);
 
835
#endif
 
836
    
 
837
    MacroAssembler::JumpList slowPath;
 
838
    
 
839
    bool scratchGPR1HasStorage = false;
 
840
    
 
841
    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
 
842
        size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
 
843
        CopiedAllocator* copiedAllocator = &globalData->heap.storageAllocator();
 
844
        
 
845
        if (!oldStructure->outOfLineCapacity()) {
 
846
            stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
 
847
            slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
 
848
            stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
 
849
            stubJit.negPtr(scratchGPR1);
 
850
            stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
 
851
            stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
 
852
        } else {
 
853
            size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
 
854
            ASSERT(newSize > oldSize);
 
855
            
 
856
            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
 
857
            stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
 
858
            slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
 
859
            stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
 
860
            stubJit.negPtr(scratchGPR1);
 
861
            stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
 
862
            stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
 
863
            // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
 
864
            for (ptrdiff_t offset = 0; offset < static_cast<ptrdiff_t>(oldSize); offset += sizeof(void*)) {
 
865
                stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
 
866
                stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*))));
 
867
            }
 
868
        }
 
869
        
 
870
        stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
 
871
        scratchGPR1HasStorage = true;
 
872
    }
 
873
 
 
874
    stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
 
875
#if USE(JSVALUE64)
 
876
    if (isInlineOffset(slot.cachedOffset()))
 
877
        stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
 
878
    else {
 
879
        if (!scratchGPR1HasStorage)
 
880
            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
 
881
        stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
 
882
    }
 
883
#elif USE(JSVALUE32_64)
 
884
    if (isInlineOffset(slot.cachedOffset())) {
 
885
        stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
 
886
        stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
 
887
    } else {
 
888
        if (!scratchGPR1HasStorage)
 
889
            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
 
890
        stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
 
891
        stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
 
892
    }
 
893
#endif
 
894
            
 
895
    MacroAssembler::Jump success;
 
896
    MacroAssembler::Jump failure;
 
897
            
 
898
    if (allocator.didReuseRegisters()) {
 
899
        allocator.restoreReusedRegistersByPopping(stubJit);
 
900
        success = stubJit.jump();
 
901
 
 
902
        failureCases.link(&stubJit);
 
903
        allocator.restoreReusedRegistersByPopping(stubJit);
 
904
        failure = stubJit.jump();
 
905
    } else
 
906
        success = stubJit.jump();
 
907
    
 
908
    MacroAssembler::Call operationCall;
 
909
    MacroAssembler::Jump successInSlowPath;
 
910
    
 
911
    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
 
912
        slowPath.link(&stubJit);
 
913
        
 
914
        allocator.restoreReusedRegistersByPopping(stubJit);
 
915
        ScratchBuffer* scratchBuffer = globalData->scratchBufferForSize(allocator.desiredScratchBufferSize());
 
916
        allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
 
917
#if USE(JSVALUE64)
 
918
        stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
 
919
#else
 
920
        stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
 
921
#endif
 
922
        operationCall = stubJit.call();
 
923
        allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
 
924
        successInSlowPath = stubJit.jump();
 
925
    }
 
926
    
 
927
    LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
 
928
    patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
 
929
    if (allocator.didReuseRegisters())
 
930
        patchBuffer.link(failure, failureLabel);
 
931
    else
 
932
        patchBuffer.link(failureCases, failureLabel);
 
933
    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
 
934
        patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
 
935
        patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
 
936
    }
 
937
    
 
938
    stubRoutine =
 
939
        createJITStubRoutine(
 
940
            FINALIZE_DFG_CODE(
 
941
                patchBuffer,
 
942
                ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p",
 
943
                    structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
 
944
                    oldStructure, structure,
 
945
                    toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
 
946
                        stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
 
947
            *globalData,
 
948
            exec->codeBlock()->ownerExecutable(),
 
949
            structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
 
950
            structure);
 
951
}
 
952
 
 
953
static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
 
954
{
 
955
    CodeBlock* codeBlock = exec->codeBlock();
 
956
    JSGlobalData* globalData = &exec->globalData();
 
957
 
 
958
    if (!baseValue.isCell())
 
959
        return false;
 
960
    JSCell* baseCell = baseValue.asCell();
 
961
    Structure* structure = baseCell->structure();
 
962
    Structure* oldStructure = structure->previousID();
 
963
    
 
964
    if (!slot.isCacheable())
 
965
        return false;
 
966
    if (structure->isUncacheableDictionary())
 
967
        return false;
 
968
 
 
969
    // Optimize self access.
 
970
    if (slot.base() == baseValue) {
 
971
        if (slot.type() == PutPropertySlot::NewProperty) {
 
972
            if (structure->isDictionary())
 
973
                return false;
 
974
            
 
975
            // Skip optimizing the case where we need a realloc, if we don't have
 
976
            // enough registers to make it happen.
 
977
            if (GPRInfo::numberOfRegisters < 6
 
978
                && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
 
979
                && oldStructure->outOfLineCapacity())
 
980
                return false;
 
981
            
 
982
            // Skip optimizing the case where we need realloc, and the structure has
 
983
            // indexing storage.
 
984
            if (hasIndexingHeader(oldStructure->indexingType()))
 
985
                return false;
 
986
            
 
987
            if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
 
988
                return false;
 
989
            
 
990
            StructureChain* prototypeChain = structure->prototypeChain(exec);
 
991
            
 
992
            emitPutTransitionStub(
 
993
                exec, baseValue, ident, slot, stubInfo, putKind,
 
994
                structure, oldStructure, prototypeChain,
 
995
                stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
 
996
                stubInfo.stubRoutine);
 
997
            
 
998
            RepatchBuffer repatchBuffer(codeBlock);
 
999
            repatchBuffer.relink(
 
1000
                stubInfo.callReturnLocation.jumpAtOffset(
 
1001
                    stubInfo.patch.dfg.deltaCallToStructCheck),
 
1002
                CodeLocationLabel(stubInfo.stubRoutine->code().code()));
 
1003
            repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
 
1004
            
 
1005
            stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
 
1006
            
 
1007
            return true;
 
1008
        }
 
1009
 
 
1010
        dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
 
1011
        stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure);
 
1012
        return true;
 
1013
    }
 
1014
 
 
1015
    return false;
 
1016
}
 
1017
 
 
1018
void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
 
1019
{
 
1020
    bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
 
1021
    if (!cached)
 
1022
        dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
 
1023
}
 
1024
 
 
1025
static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
 
1026
{
 
1027
    CodeBlock* codeBlock = exec->codeBlock();
 
1028
    JSGlobalData* globalData = &exec->globalData();
 
1029
 
 
1030
    if (!baseValue.isCell())
 
1031
        return false;
 
1032
    JSCell* baseCell = baseValue.asCell();
 
1033
    Structure* structure = baseCell->structure();
 
1034
    Structure* oldStructure = structure->previousID();
 
1035
    
 
1036
    if (!slot.isCacheable())
 
1037
        return false;
 
1038
    if (structure->isUncacheableDictionary())
 
1039
        return false;
 
1040
 
 
1041
    // Optimize self access.
 
1042
    if (slot.base() == baseValue) {
 
1043
        PolymorphicPutByIdList* list;
 
1044
        RefPtr<JITStubRoutine> stubRoutine;
 
1045
        
 
1046
        if (slot.type() == PutPropertySlot::NewProperty) {
 
1047
            if (structure->isDictionary())
 
1048
                return false;
 
1049
            
 
1050
            // Skip optimizing the case where we need a realloc, if we don't have
 
1051
            // enough registers to make it happen.
 
1052
            if (GPRInfo::numberOfRegisters < 6
 
1053
                && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
 
1054
                && oldStructure->outOfLineCapacity())
 
1055
                return false;
 
1056
            
 
1057
            // Skip optimizing the case where we need realloc, and the structure has
 
1058
            // indexing storage.
 
1059
            if (hasIndexingHeader(oldStructure->indexingType()))
 
1060
                return false;
 
1061
            
 
1062
            if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
 
1063
                return false;
 
1064
            
 
1065
            StructureChain* prototypeChain = structure->prototypeChain(exec);
 
1066
            
 
1067
            // We're now committed to creating the stub. Mogrify the meta-data accordingly.
 
1068
            list = PolymorphicPutByIdList::from(
 
1069
                putKind, stubInfo,
 
1070
                stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
 
1071
            
 
1072
            emitPutTransitionStub(
 
1073
                exec, baseValue, propertyName, slot, stubInfo, putKind,
 
1074
                structure, oldStructure, prototypeChain,
 
1075
                CodeLocationLabel(list->currentSlowPathTarget()),
 
1076
                stubRoutine);
 
1077
            
 
1078
            list->addAccess(
 
1079
                PutByIdAccess::transition(
 
1080
                    *globalData, codeBlock->ownerExecutable(),
 
1081
                    oldStructure, structure, prototypeChain,
 
1082
                    stubRoutine));
 
1083
        } else {
 
1084
            // We're now committed to creating the stub. Mogrify the meta-data accordingly.
 
1085
            list = PolymorphicPutByIdList::from(
 
1086
                putKind, stubInfo,
 
1087
                stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
 
1088
            
 
1089
            emitPutReplaceStub(
 
1090
                exec, baseValue, propertyName, slot, stubInfo, putKind,
 
1091
                structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
 
1092
            
 
1093
            list->addAccess(
 
1094
                PutByIdAccess::replace(
 
1095
                    *globalData, codeBlock->ownerExecutable(),
 
1096
                    structure, stubRoutine));
 
1097
        }
 
1098
        
 
1099
        RepatchBuffer repatchBuffer(codeBlock);
 
1100
        repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code()));
 
1101
        
 
1102
        if (list->isFull())
 
1103
            repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
 
1104
        
 
1105
        return true;
 
1106
    }
 
1107
    
 
1108
    return false;
 
1109
}
 
1110
 
 
1111
void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
 
1112
{
 
1113
    bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
 
1114
    if (!cached)
 
1115
        dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
 
1116
}
 
1117
 
 
1118
static void linkSlowFor(RepatchBuffer& repatchBuffer, JSGlobalData* globalData, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
 
1119
{
 
1120
    if (kind == CodeForCall) {
 
1121
        repatchBuffer.relink(callLinkInfo.callReturnLocation, globalData->getCTIStub(virtualCallThunkGenerator).code());
 
1122
        return;
 
1123
    }
 
1124
    ASSERT(kind == CodeForConstruct);
 
1125
    repatchBuffer.relink(callLinkInfo.callReturnLocation, globalData->getCTIStub(virtualConstructThunkGenerator).code());
 
1126
}
 
1127
 
 
1128
void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
 
1129
{
 
1130
    ASSERT(!callLinkInfo.stub);
 
1131
    
 
1132
    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
 
1133
    JSGlobalData* globalData = callerCodeBlock->globalData();
 
1134
    
 
1135
    RepatchBuffer repatchBuffer(callerCodeBlock);
 
1136
    
 
1137
    ASSERT(!callLinkInfo.isLinked());
 
1138
    callLinkInfo.callee.set(exec->callerFrame()->globalData(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
 
1139
    callLinkInfo.lastSeenCallee.set(exec->callerFrame()->globalData(), callerCodeBlock->ownerExecutable(), callee);
 
1140
    repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
 
1141
    
 
1142
    if (calleeCodeBlock)
 
1143
        calleeCodeBlock->linkIncomingCall(&callLinkInfo);
 
1144
    
 
1145
    if (kind == CodeForCall) {
 
1146
        repatchBuffer.relink(callLinkInfo.callReturnLocation, globalData->getCTIStub(linkClosureCallThunkGenerator).code());
 
1147
        return;
 
1148
    }
 
1149
    
 
1150
    ASSERT(kind == CodeForConstruct);
 
1151
    linkSlowFor(repatchBuffer, globalData, callLinkInfo, CodeForConstruct);
 
1152
}
 
1153
 
 
1154
void dfgLinkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
 
1155
{
 
1156
    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
 
1157
    JSGlobalData* globalData = callerCodeBlock->globalData();
 
1158
    
 
1159
    RepatchBuffer repatchBuffer(callerCodeBlock);
 
1160
    
 
1161
    linkSlowFor(repatchBuffer, globalData, callLinkInfo, kind);
 
1162
}
 
1163
 
 
1164
void dfgLinkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr)
 
1165
{
 
1166
    ASSERT(!callLinkInfo.stub);
 
1167
    
 
1168
    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
 
1169
    JSGlobalData* globalData = callerCodeBlock->globalData();
 
1170
    
 
1171
    GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
 
1172
    
 
1173
    CCallHelpers stubJit(globalData, callerCodeBlock);
 
1174
    
 
1175
    CCallHelpers::JumpList slowPath;
 
1176
    
 
1177
#if USE(JSVALUE64)
 
1178
    slowPath.append(
 
1179
        stubJit.branchTest64(
 
1180
            CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
 
1181
#else
 
1182
    // We would have already checked that the callee is a cell.
 
1183
#endif
 
1184
    
 
1185
    slowPath.append(
 
1186
        stubJit.branchPtr(
 
1187
            CCallHelpers::NotEqual,
 
1188
            CCallHelpers::Address(calleeGPR, JSCell::structureOffset()),
 
1189
            CCallHelpers::TrustedImmPtr(structure)));
 
1190
    
 
1191
    slowPath.append(
 
1192
        stubJit.branchPtr(
 
1193
            CCallHelpers::NotEqual,
 
1194
            CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
 
1195
            CCallHelpers::TrustedImmPtr(executable)));
 
1196
    
 
1197
    stubJit.loadPtr(
 
1198
        CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
 
1199
        GPRInfo::returnValueGPR);
 
1200
 
 
1201
#if USE(JSVALUE64)
 
1202
    stubJit.store64(
 
1203
        GPRInfo::returnValueGPR,
 
1204
        CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain)));
 
1205
#else
 
1206
    stubJit.storePtr(
 
1207
        GPRInfo::returnValueGPR,
 
1208
        CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
 
1209
    stubJit.store32(
 
1210
        CCallHelpers::TrustedImm32(JSValue::CellTag),
 
1211
        CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
 
1212
#endif
 
1213
    
 
1214
    JITCompiler::Call call = stubJit.nearCall();
 
1215
    JITCompiler::Jump done = stubJit.jump();
 
1216
    
 
1217
    slowPath.link(&stubJit);
 
1218
    stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2);
 
1219
    stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2);
 
1220
    stubJit.move(calleeGPR, GPRInfo::nonArgGPR0);
 
1221
#if USE(JSVALUE32_64)
 
1222
    stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1);
 
1223
#endif
 
1224
    JITCompiler::Jump slow = stubJit.jump();
 
1225
    
 
1226
    LinkBuffer patchBuffer(*globalData, &stubJit, callerCodeBlock);
 
1227
    
 
1228
    patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
 
1229
    patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
 
1230
    patchBuffer.link(slow, CodeLocationLabel(globalData->getCTIStub(virtualCallThunkGenerator).code()));
 
1231
    
 
1232
    PassRefPtr<ClosureCallStubRoutine> tmp = adoptRef(new ClosureCallStubRoutine(
 
1233
        FINALIZE_DFG_CODE(
 
1234
            patchBuffer,
 
1235
            ("DFG closure call stub for %s, return point %p, target %p (%s)",
 
1236
                toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
 
1237
                codePtr.executableAddress(), toCString(*calleeCodeBlock).data())),
 
1238
        *globalData, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
 
1239
    RefPtr<ClosureCallStubRoutine> stubRoutine = tmp;
 
1240
    
 
1241
    RepatchBuffer repatchBuffer(callerCodeBlock);
 
1242
    
 
1243
    repatchBuffer.replaceWithJump(
 
1244
        RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
 
1245
        CodeLocationLabel(stubRoutine->code().code()));
 
1246
    linkSlowFor(repatchBuffer, globalData, callLinkInfo, CodeForCall);
 
1247
    
 
1248
    callLinkInfo.stub = stubRoutine.release();
 
1249
    
 
1250
    ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
 
1251
}
 
1252
 
 
1253
void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
 
1254
{
 
1255
    repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
 
1256
    CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
 
1257
    if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
 
1258
        repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
 
1259
            RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
 
1260
            MacroAssembler::Address(
 
1261
                static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
 
1262
                JSCell::structureOffset()),
 
1263
            reinterpret_cast<void*>(-1));
 
1264
    }
 
1265
    repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(-1));
 
1266
#if USE(JSVALUE64)
 
1267
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
 
1268
#else
 
1269
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
 
1270
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
 
1271
#endif
 
1272
    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
 
1273
}
 
1274
 
 
1275
void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
 
1276
{
 
1277
    V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
 
1278
    V_DFGOperation_EJCI optimizedFunction;
 
1279
    if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
 
1280
        optimizedFunction = operationPutByIdStrictOptimize;
 
1281
    else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
 
1282
        optimizedFunction = operationPutByIdNonStrictOptimize;
 
1283
    else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
 
1284
        optimizedFunction = operationPutByIdDirectStrictOptimize;
 
1285
    else {
 
1286
        ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
 
1287
        optimizedFunction = operationPutByIdDirectNonStrictOptimize;
 
1288
    }
 
1289
    repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
 
1290
    CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
 
1291
    if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
 
1292
        repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
 
1293
            RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
 
1294
            MacroAssembler::Address(
 
1295
                static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
 
1296
                JSCell::structureOffset()),
 
1297
            reinterpret_cast<void*>(-1));
 
1298
    }
 
1299
    repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(-1));
 
1300
#if USE(JSVALUE64)
 
1301
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
 
1302
#else
 
1303
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
 
1304
    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
 
1305
#endif
 
1306
    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
 
1307
}
 
1308
 
 
1309
} } // namespace JSC::DFG
 
1310
 
 
1311
#endif