2
* Copyright (C) 2019 Apple Inc. All rights reserved.
4
* Redistribution and use in source and binary forms, with or without
5
* modification, are permitted provided that the following conditions
7
* 1. Redistributions of source code must retain the above copyright
8
* notice, this list of conditions and the following disclaimer.
9
* 2. Redistributions in binary form must reproduce the above copyright
10
* notice, this list of conditions and the following disclaimer in the
11
* documentation and/or other materials provided with the distribution.
13
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#include "WasmLLIntGenerator.h"
29
#if ENABLE(WEBASSEMBLY)
31
#include "BytecodeGeneratorBaseInlines.h"
32
#include "BytecodeStructs.h"
33
#include "InstructionStream.h"
34
#include "JSCJSValueInlines.h"
36
#include "WasmCallingConvention.h"
37
#include "WasmContextInlines.h"
38
#include "WasmFunctionCodeBlock.h"
39
#include "WasmFunctionParser.h"
40
#include "WasmGeneratorTraits.h"
41
#include "WasmThunks.h"
42
#include <wtf/CompletionHandler.h>
43
#include <wtf/RefPtr.h>
44
#include <wtf/StdUnorderedMap.h>
45
#include <wtf/Variant.h>
47
namespace JSC { namespace Wasm {
49
class LLIntGenerator : public BytecodeGeneratorBase<GeneratorTraits> {
51
using ExpressionType = VirtualRegister;
57
struct ControlTopLevel {
64
Ref<Label> m_alternate;
67
struct ControlType : public Variant<ControlLoop, ControlTopLevel, ControlBlock, ControlIf> {
68
using Base = Variant<ControlLoop, ControlTopLevel, ControlBlock, ControlIf>;
71
: Base(ControlBlock { })
75
static ControlType topLevel(BlockSignature signature, unsigned stackSize, RefPtr<Label>&& continuation)
77
return ControlType(signature, stackSize, WTFMove(continuation), ControlTopLevel { });
80
static ControlType loop(BlockSignature signature, unsigned stackSize, Ref<Label>&& body, RefPtr<Label>&& continuation)
82
return ControlType(signature, stackSize - signature->argumentCount(), WTFMove(continuation), ControlLoop { WTFMove(body) });
85
static ControlType block(BlockSignature signature, unsigned stackSize, RefPtr<Label>&& continuation)
87
return ControlType(signature, stackSize - signature->argumentCount(), WTFMove(continuation), ControlBlock { });
90
static ControlType if_(BlockSignature signature, unsigned stackSize, Ref<Label>&& alternate, RefPtr<Label>&& continuation)
92
return ControlType(signature, stackSize - signature->argumentCount(), WTFMove(continuation), ControlIf { WTFMove(alternate) });
95
static bool isIf(const ControlType& control) { return WTF::holds_alternative<ControlIf>(control); }
96
static bool isTopLevel(const ControlType& control) { return WTF::holds_alternative<ControlTopLevel>(control); }
98
unsigned stackSize() const { return m_stackSize; }
99
BlockSignature signature() const { return m_signature; }
101
RefPtr<Label> targetLabelForBranch() const
103
if (WTF::holds_alternative<ControlLoop>(*this))
104
return WTF::get<ControlLoop>(*this).m_body.ptr();
105
return m_continuation;
108
SignatureArgCount branchTargetArity() const
110
if (WTF::holds_alternative<ControlLoop>(*this))
111
return m_signature->argumentCount();
112
return m_signature->returnCount();
115
Type branchTargetType(unsigned i) const
117
ASSERT(i < branchTargetArity());
118
if (WTF::holds_alternative<ControlLoop>(*this))
119
return m_signature->argument(i);
120
return m_signature->returnType(i);
123
BlockSignature m_signature;
124
unsigned m_stackSize;
125
RefPtr<Label> m_continuation;
129
ControlType(BlockSignature signature, unsigned stackSize, RefPtr<Label>&& continuation, T&& t)
131
, m_signature(signature)
132
, m_stackSize(stackSize)
133
, m_continuation(WTFMove(continuation))
138
using ErrorType = String;
139
using PartialResult = Expected<void, ErrorType>;
140
using UnexpectedResult = Unexpected<ErrorType>;
142
using ControlEntry = FunctionParser<LLIntGenerator>::ControlEntry;
143
using ControlStack = FunctionParser<LLIntGenerator>::ControlStack;
144
using ResultList = FunctionParser<LLIntGenerator>::ResultList;
145
using Stack = FunctionParser<LLIntGenerator>::Stack;
146
using TypedExpression = FunctionParser<LLIntGenerator>::TypedExpression;
148
static ExpressionType emptyExpression() { return { }; };
150
template <typename ...Args>
151
NEVER_INLINE UnexpectedResult WARN_UNUSED_RETURN fail(Args... args) const
153
using namespace FailureHelper; // See ADL comment in WasmParser.h.
154
return UnexpectedResult(makeString("WebAssembly.Module failed compiling: "_s, makeString(args)...));
157
LLIntGenerator(const ModuleInformation&, unsigned functionIndex, const Signature&);
159
std::unique_ptr<FunctionCodeBlock> finalize();
161
template<typename ExpressionListA, typename ExpressionListB>
162
void unifyValuesWithBlock(const ExpressionListA& destinations, const ExpressionListB& values)
164
ASSERT(destinations.size() <= values.size());
165
auto offset = values.size() - destinations.size();
166
for (size_t i = 0; i < destinations.size(); ++i)
167
WasmMov::emit(this, destinations[i], values[offset + i]);
170
enum NoConsistencyCheckTag { NoConsistencyCheck };
171
ExpressionType push(NoConsistencyCheckTag)
173
m_maxStackSize = std::max(m_maxStackSize, ++m_stackSize);
174
return virtualRegisterForLocal(m_stackSize - 1);
177
ExpressionType push()
180
return push(NoConsistencyCheck);
183
void didPopValueFromStack() { --m_stackSize; }
185
PartialResult WARN_UNUSED_RETURN addArguments(const Signature&);
186
PartialResult WARN_UNUSED_RETURN addLocal(Type, uint32_t);
187
ExpressionType addConstant(Type, int64_t);
190
PartialResult WARN_UNUSED_RETURN addRefIsNull(ExpressionType value, ExpressionType& result);
191
PartialResult WARN_UNUSED_RETURN addRefFunc(uint32_t index, ExpressionType& result);
194
PartialResult WARN_UNUSED_RETURN addTableGet(unsigned, ExpressionType index, ExpressionType& result);
195
PartialResult WARN_UNUSED_RETURN addTableSet(unsigned, ExpressionType index, ExpressionType value);
196
PartialResult WARN_UNUSED_RETURN addTableSize(unsigned, ExpressionType& result);
197
PartialResult WARN_UNUSED_RETURN addTableGrow(unsigned, ExpressionType fill, ExpressionType delta, ExpressionType& result);
198
PartialResult WARN_UNUSED_RETURN addTableFill(unsigned, ExpressionType offset, ExpressionType fill, ExpressionType count);
201
PartialResult WARN_UNUSED_RETURN getLocal(uint32_t index, ExpressionType& result);
202
PartialResult WARN_UNUSED_RETURN setLocal(uint32_t index, ExpressionType value);
205
PartialResult WARN_UNUSED_RETURN getGlobal(uint32_t index, ExpressionType& result);
206
PartialResult WARN_UNUSED_RETURN setGlobal(uint32_t index, ExpressionType value);
209
PartialResult WARN_UNUSED_RETURN load(LoadOpType, ExpressionType pointer, ExpressionType& result, uint32_t offset);
210
PartialResult WARN_UNUSED_RETURN store(StoreOpType, ExpressionType pointer, ExpressionType value, uint32_t offset);
211
PartialResult WARN_UNUSED_RETURN addGrowMemory(ExpressionType delta, ExpressionType& result);
212
PartialResult WARN_UNUSED_RETURN addCurrentMemory(ExpressionType& result);
216
PartialResult WARN_UNUSED_RETURN addOp(ExpressionType arg, ExpressionType& result);
218
PartialResult WARN_UNUSED_RETURN addOp(ExpressionType left, ExpressionType right, ExpressionType& result);
219
PartialResult WARN_UNUSED_RETURN addSelect(ExpressionType condition, ExpressionType nonZero, ExpressionType zero, ExpressionType& result);
222
ControlType WARN_UNUSED_RETURN addTopLevel(BlockSignature);
223
PartialResult WARN_UNUSED_RETURN addBlock(BlockSignature, Stack& enclosingStack, ControlType& newBlock, Stack& newStack);
224
PartialResult WARN_UNUSED_RETURN addLoop(BlockSignature, Stack& enclosingStack, ControlType& block, Stack& newStack, uint32_t loopIndex);
225
PartialResult WARN_UNUSED_RETURN addIf(ExpressionType condition, BlockSignature, Stack& enclosingStack, ControlType& result, Stack& newStack);
226
PartialResult WARN_UNUSED_RETURN addElse(ControlType&, Stack&);
227
PartialResult WARN_UNUSED_RETURN addElseToUnreachable(ControlType&);
229
PartialResult WARN_UNUSED_RETURN addReturn(const ControlType&, Stack& returnValues);
230
PartialResult WARN_UNUSED_RETURN addBranch(ControlType&, ExpressionType condition, Stack& returnValues);
231
PartialResult WARN_UNUSED_RETURN addSwitch(ExpressionType condition, const Vector<ControlType*>& targets, ControlType& defaultTargets, Stack& expressionStack);
232
PartialResult WARN_UNUSED_RETURN endBlock(ControlEntry&, Stack& expressionStack);
233
PartialResult WARN_UNUSED_RETURN addEndToUnreachable(ControlEntry&, const Stack& expressionStack = { }, bool unreachable = true);
234
PartialResult WARN_UNUSED_RETURN endTopLevel(BlockSignature, const Stack&);
237
PartialResult WARN_UNUSED_RETURN addCall(uint32_t calleeIndex, const Signature&, Vector<ExpressionType>& args, ResultList& results);
238
PartialResult WARN_UNUSED_RETURN addCallIndirect(unsigned tableIndex, const Signature&, Vector<ExpressionType>& args, ResultList& results);
239
PartialResult WARN_UNUSED_RETURN addUnreachable();
241
void didFinishParsingLocals();
243
void setParser(FunctionParser<LLIntGenerator>* parser) { m_parser = parser; };
245
// We need this for autogenerated templates used by JS bytecodes.
246
void setUsesCheckpoints() const { UNREACHABLE_FOR_PLATFORM(); }
248
void dump(const ControlStack&, const Stack*) { }
251
friend GenericLabel<Wasm::GeneratorTraits>;
253
struct LLIntCallInformation {
254
unsigned stackOffset;
255
unsigned numberOfStackArguments;
256
ResultList arguments;
257
CompletionHandler<void(ResultList&)> commitResults;
260
LLIntCallInformation callInformationForCaller(const Signature&);
261
Vector<VirtualRegister, 2> callInformationForCallee(const Signature&);
263
VirtualRegister virtualRegisterForWasmLocal(uint32_t index)
265
if (index < m_codeBlock->m_numArguments)
266
return m_normalizedArguments[index];
268
const auto& callingConvention = wasmCallingConvention();
269
const uint32_t gprCount = callingConvention.gprArgs.size();
270
const uint32_t fprCount = callingConvention.fprArgs.size();
271
return virtualRegisterForLocal(index - m_codeBlock->m_numArguments + gprCount + fprCount + numberOfLLIntCalleeSaveRegisters);
274
ExpressionType jsNullConstant()
276
if (UNLIKELY(!m_jsNullConstant.isValid())) {
277
m_jsNullConstant = VirtualRegister(FirstConstantRegisterIndex + m_codeBlock->m_constants.size());
278
m_codeBlock->m_constants.append(JSValue::encode(jsNull()));
279
if (UNLIKELY(Options::dumpGeneratedWasmBytecodes()))
280
m_codeBlock->m_constantTypes.append(Type::Anyref);
282
return m_jsNullConstant;
285
ExpressionType zeroConstant()
287
if (UNLIKELY(!m_zeroConstant.isValid())) {
288
m_zeroConstant = VirtualRegister(FirstConstantRegisterIndex + m_codeBlock->m_constants.size());
289
m_codeBlock->m_constants.append(0);
290
if (UNLIKELY(Options::dumpGeneratedWasmBytecodes()))
291
m_codeBlock->m_constantTypes.append(Type::I32);
293
return m_zeroConstant;
296
void getDropKeepCount(const ControlType& target, unsigned& startOffset, unsigned& drop, unsigned& keep)
298
startOffset = target.stackSize() + 1;
299
keep = target.branchTargetArity();
300
drop = m_stackSize - target.stackSize() - target.branchTargetArity();
303
void dropKeep(Stack& values, const ControlType& target, bool dropValues)
305
unsigned startOffset;
309
getDropKeepCount(target, startOffset, drop, keep);
318
WasmDropKeep::emit(this, startOffset, drop, keep);
321
template<typename Functor>
322
void walkExpressionStack(Stack& expressionStack, unsigned stackSize, const Functor& functor)
324
for (unsigned i = expressionStack.size(); i > 0; --i) {
325
VirtualRegister slot = virtualRegisterForLocal(stackSize - i);
326
functor(expressionStack[expressionStack.size() - i], slot);
330
template<typename Functor>
331
void walkExpressionStack(Stack& expressionStack, const Functor& functor)
333
walkExpressionStack(expressionStack, m_stackSize, functor);
336
template<typename Functor>
337
void walkExpressionStack(ControlEntry& entry, const Functor& functor)
339
walkExpressionStack(entry.enclosedExpressionStack, entry.controlData.stackSize(), functor);
342
void checkConsistency()
345
// The rules for locals and constants in the stack are:
346
// 1) Locals have to be materialized whenever a control entry is pushed to the control stack (i.e. every time we splitStack)
347
// NOTE: This is a trade-off so that set_local does not have to walk up the control stack looking for delayed get_locals
348
// 2) If the control entry is a loop, we also need to materialize constants in the newStack, since those slots will be written
349
// to from loop back edges
350
// 3) Both locals and constants have to be materialized before branches, since multiple branches might share the same target,
351
// we can't make any assumptions about the stack state at that point, so we materialize the stack.
352
for (ControlEntry& controlEntry : m_parser->controlStack()) {
353
walkExpressionStack(controlEntry, [&](VirtualRegister expression, VirtualRegister slot) {
354
ASSERT(expression == slot || expression.isConstant());
357
walkExpressionStack(m_parser->expressionStack(), [&](VirtualRegister expression, VirtualRegister slot) {
358
ASSERT(expression == slot || expression.isConstant() || expression.isArgument() || expression.toLocal() < m_codeBlock->m_numVars);
360
#endif // ASSERT_ENABLED
363
void materializeConstantsAndLocals(Stack& expressionStack)
365
if (expressionStack.isEmpty())
369
walkExpressionStack(expressionStack, [&](TypedExpression& expression, VirtualRegister slot) {
370
ASSERT(expression.value() == slot || expression.value().isConstant() || expression.value().isArgument() || expression.value().toLocal() < m_codeBlock->m_numVars);
371
if (expression.value() == slot)
373
WasmMov::emit(this, slot, expression);
374
expression = TypedExpression { expression.type(), slot };
379
void splitStack(BlockSignature signature, Stack& enclosingStack, Stack& newStack)
381
JSC::Wasm::splitStack(signature, enclosingStack, newStack);
383
m_stackSize -= newStack.size();
385
walkExpressionStack(enclosingStack, [&](TypedExpression& expression, VirtualRegister slot) {
386
ASSERT(expression.value() == slot || expression.value().isConstant() || expression.value().isArgument() || expression.value().toLocal() < m_codeBlock->m_numVars);
387
if (expression.value() == slot || expression.value().isConstant())
389
WasmMov::emit(this, slot, expression);
390
expression = TypedExpression { expression.type(), slot };
393
m_stackSize += newStack.size();
397
InstructionStream::Offset offset;
401
struct ConstantMapHashTraits : WTF::GenericHashTraits<EncodedJSValue> {
402
static constexpr bool emptyValueIsZero = true;
403
static void constructDeletedValue(EncodedJSValue& slot) { slot = JSValue::encode(jsNull()); }
404
static bool isDeletedValue(EncodedJSValue value) { return value == JSValue::encode(jsNull()); }
407
FunctionParser<LLIntGenerator>* m_parser { nullptr };
408
const ModuleInformation& m_info;
409
const unsigned m_functionIndex { UINT_MAX };
410
Vector<VirtualRegister> m_normalizedArguments;
411
HashMap<Label*, Vector<SwitchEntry>> m_switches;
412
ExpressionType m_jsNullConstant;
413
ExpressionType m_zeroConstant;
414
ResultList m_unitializedLocals;
415
HashMap<EncodedJSValue, VirtualRegister, WTF::IntHash<EncodedJSValue>, ConstantMapHashTraits> m_constantMap;
416
Vector<VirtualRegister, 2> m_results;
417
unsigned m_stackSize { 0 };
418
unsigned m_maxStackSize { 0 };
421
Expected<std::unique_ptr<FunctionCodeBlock>, String> parseAndCompileBytecode(const uint8_t* functionStart, size_t functionLength, const Signature& signature, const ModuleInformation& info, uint32_t functionIndex)
423
LLIntGenerator llintGenerator(info, functionIndex, signature);
424
FunctionParser<LLIntGenerator> parser(llintGenerator, functionStart, functionLength, signature, info);
425
WASM_FAIL_IF_HELPER_FAILS(parser.parse());
427
return llintGenerator.finalize();
430
LLIntGenerator::LLIntGenerator(const ModuleInformation& info, unsigned functionIndex, const Signature&)
431
: BytecodeGeneratorBase(makeUnique<FunctionCodeBlock>(functionIndex), 0)
433
, m_functionIndex(functionIndex)
435
m_codeBlock->m_numVars = numberOfLLIntCalleeSaveRegisters;
436
m_stackSize = numberOfLLIntCalleeSaveRegisters;
437
m_maxStackSize = numberOfLLIntCalleeSaveRegisters;
439
WasmEnter::emit(this);
442
std::unique_ptr<FunctionCodeBlock> LLIntGenerator::finalize()
444
RELEASE_ASSERT(m_codeBlock);
445
m_codeBlock->m_numCalleeLocals = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), m_maxStackSize);
446
m_codeBlock->setInstructions(m_writer.finalize());
447
return WTFMove(m_codeBlock);
450
// Generated from wasm.json
451
#include "WasmLLIntGeneratorInlines.h"
453
auto LLIntGenerator::callInformationForCaller(const Signature& signature) -> LLIntCallInformation
455
// This function sets up the stack layout for calls. The desired stack layout is:
474
// We need to allocate at least space for all GPRs and FPRs.
475
// Return values use the same allocation layout.
477
const auto initialStackSize = m_stackSize;
479
const auto& callingConvention = wasmCallingConvention();
480
const uint32_t gprCount = callingConvention.gprArgs.size();
481
const uint32_t fprCount = callingConvention.fprArgs.size();
483
uint32_t stackCount = 0;
484
uint32_t gprIndex = 0;
485
uint32_t fprIndex = 0;
486
uint32_t stackIndex = 0;
488
auto allocateStackRegister = [&](Type type) {
494
if (gprIndex < gprCount)
496
else if (stackIndex++ >= stackCount)
501
if (fprIndex < fprCount)
503
else if (stackIndex++ >= stackCount)
508
RELEASE_ASSERT_NOT_REACHED();
513
for (uint32_t i = 0; i < signature.argumentCount(); i++)
514
allocateStackRegister(signature.argument(i));
519
for (uint32_t i = 0; i < signature.returnCount(); i++)
520
allocateStackRegister(signature.returnType(i));
522
// FIXME: we are allocating the extra space for the argument/return count in order to avoid interference, but we could do better
523
// NOTE: We increase arg count by 1 for the case of indirect calls
524
m_stackSize += std::max(signature.argumentCount() + 1, signature.returnCount()) + gprCount + fprCount + stackCount + CallFrame::headerSizeInRegisters;
525
if (m_stackSize % stackAlignmentRegisters())
527
if (m_maxStackSize < m_stackSize)
528
m_maxStackSize = m_stackSize;
531
ResultList arguments(signature.argumentCount());
532
ResultList temporaryResults(signature.returnCount());
534
const unsigned stackOffset = m_stackSize;
535
const unsigned base = stackOffset - CallFrame::headerSizeInRegisters;
537
const uint32_t gprLimit = base - stackCount - gprCount;
538
const uint32_t fprLimit = gprLimit - fprCount;
541
gprIndex = base - stackCount;
542
fprIndex = gprIndex - gprCount;
543
for (uint32_t i = 0; i < signature.argumentCount(); i++) {
544
switch (signature.argument(i)) {
549
if (gprIndex > gprLimit)
550
arguments[i] = virtualRegisterForLocal(--gprIndex);
552
arguments[i] = virtualRegisterForLocal(--stackIndex);
556
if (fprIndex > fprLimit)
557
arguments[i] = virtualRegisterForLocal(--fprIndex);
559
arguments[i] = virtualRegisterForLocal(--stackIndex);
563
RELEASE_ASSERT_NOT_REACHED();
568
gprIndex = base - stackCount;
569
fprIndex = gprIndex - gprCount;
570
for (uint32_t i = 0; i < signature.returnCount(); i++) {
571
switch (signature.returnType(i)) {
576
if (gprIndex > gprLimit)
577
temporaryResults[i] = virtualRegisterForLocal(--gprIndex);
579
temporaryResults[i] = virtualRegisterForLocal(--stackIndex);
583
if (fprIndex > fprLimit)
584
temporaryResults[i] = virtualRegisterForLocal(--fprIndex);
586
temporaryResults[i] = virtualRegisterForLocal(--stackIndex);
590
RELEASE_ASSERT_NOT_REACHED();
594
m_stackSize = initialStackSize;
596
auto commitResults = [this, temporaryResults = WTFMove(temporaryResults)](ResultList& results) {
598
for (auto temporaryResult : temporaryResults) {
599
ExpressionType result = push(NoConsistencyCheck);
600
WasmMov::emit(this, result, temporaryResult);
601
results.append(result);
605
return LLIntCallInformation { stackOffset, stackCount, WTFMove(arguments), WTFMove(commitResults) };
608
auto LLIntGenerator::callInformationForCallee(const Signature& signature) -> Vector<VirtualRegister, 2>
610
if (m_results.size())
613
m_results.reserveInitialCapacity(signature.returnCount());
615
const auto& callingConvention = wasmCallingConvention();
616
const uint32_t gprCount = callingConvention.gprArgs.size();
617
const uint32_t fprCount = callingConvention.fprArgs.size();
619
uint32_t gprIndex = 0;
620
uint32_t fprIndex = gprCount;
621
uint32_t stackIndex = 0;
622
const uint32_t maxGPRIndex = gprCount;
623
const uint32_t maxFPRIndex = maxGPRIndex + fprCount;
625
for (uint32_t i = 0; i < signature.returnCount(); i++) {
626
switch (signature.returnType(i)) {
631
if (gprIndex < maxGPRIndex)
632
m_results.append(virtualRegisterForLocal(numberOfLLIntCalleeSaveRegisters + gprIndex++));
634
m_results.append(virtualRegisterForArgumentIncludingThis(stackIndex++));
638
if (fprIndex < maxFPRIndex)
639
m_results.append(virtualRegisterForLocal(numberOfLLIntCalleeSaveRegisters + fprIndex++));
641
m_results.append(virtualRegisterForArgumentIncludingThis(stackIndex++));
645
RELEASE_ASSERT_NOT_REACHED();
652
auto LLIntGenerator::addArguments(const Signature& signature) -> PartialResult
656
m_codeBlock->m_numArguments = signature.argumentCount();
657
m_normalizedArguments.resize(m_codeBlock->m_numArguments);
659
const auto& callingConvention = wasmCallingConvention();
660
const uint32_t gprCount = callingConvention.gprArgs.size();
661
const uint32_t fprCount = callingConvention.fprArgs.size();
662
const uint32_t maxGPRIndex = gprCount;
663
const uint32_t maxFPRIndex = gprCount + fprCount;
664
uint32_t gprIndex = 0;
665
uint32_t fprIndex = maxGPRIndex;
666
uint32_t stackIndex = 0;
668
Vector<VirtualRegister> registerArguments(gprCount + fprCount);
669
for (uint32_t i = 0; i < gprCount + fprCount; i++)
670
registerArguments[i] = push(NoConsistencyCheck);
672
const auto addArgument = [&](uint32_t index, uint32_t& count, uint32_t max) {
674
m_normalizedArguments[index] = registerArguments[count++];
676
m_normalizedArguments[index] = virtualRegisterForArgumentIncludingThis(stackIndex++);
679
for (uint32_t i = 0; i < signature.argumentCount(); i++) {
680
switch (signature.argument(i)) {
685
addArgument(i, gprIndex, maxGPRIndex);
689
addArgument(i, fprIndex, maxFPRIndex);
693
RELEASE_ASSERT_NOT_REACHED();
697
m_codeBlock->m_numVars += gprCount + fprCount;
702
auto LLIntGenerator::addLocal(Type type, uint32_t count) -> PartialResult
706
m_codeBlock->m_numVars += count;
711
m_unitializedLocals.append(push(NoConsistencyCheck));
714
m_stackSize += count;
720
void LLIntGenerator::didFinishParsingLocals()
722
if (m_unitializedLocals.isEmpty())
725
auto null = jsNullConstant();
726
for (auto local : m_unitializedLocals)
727
WasmMov::emit(this, local, null);
728
m_unitializedLocals.clear();
731
auto LLIntGenerator::addConstant(Type type, int64_t value) -> ExpressionType
733
auto constant = [&] {
735
return zeroConstant();
737
if (value == JSValue::encode(jsNull()))
738
return jsNullConstant();
740
VirtualRegister source(FirstConstantRegisterIndex + m_codeBlock->m_constants.size());
741
auto result = m_constantMap.add(value, source);
742
if (!result.isNewEntry)
743
return result.iterator->value;
744
m_codeBlock->m_constants.append(value);
745
if (UNLIKELY(Options::dumpGeneratedWasmBytecodes()))
746
m_codeBlock->m_constantTypes.append(type);
749
// leave a hole if we need to materialize the constant
754
auto LLIntGenerator::getLocal(uint32_t index, ExpressionType& result) -> PartialResult
756
// leave a hole if we need to materialize the local
758
result = virtualRegisterForWasmLocal(index);
762
auto LLIntGenerator::setLocal(uint32_t index, ExpressionType value) -> PartialResult
764
VirtualRegister target = virtualRegisterForWasmLocal(index);
766
// If this local is currently on the stack we need to materialize it, otherwise it'll see the new value instead of the old one
767
walkExpressionStack(m_parser->expressionStack(), [&](TypedExpression& expression, VirtualRegister slot) {
768
if (expression.value() != target)
770
WasmMov::emit(this, slot, expression);
771
expression = TypedExpression { expression.type(), slot };
774
WasmMov::emit(this, target, value);
779
auto LLIntGenerator::getGlobal(uint32_t index, ExpressionType& result) -> PartialResult
781
const Wasm::GlobalInformation& global = m_info.globals[index];
783
switch (global.bindingMode) {
784
case Wasm::GlobalInformation::BindingMode::EmbeddedInInstance:
785
WasmGetGlobal::emit(this, result, index);
787
case Wasm::GlobalInformation::BindingMode::Portable:
788
WasmGetGlobalPortableBinding::emit(this, result, index);
794
auto LLIntGenerator::setGlobal(uint32_t index, ExpressionType value) -> PartialResult
796
const Wasm::GlobalInformation& global = m_info.globals[index];
797
Type type = global.type;
798
switch (global.bindingMode) {
799
case Wasm::GlobalInformation::BindingMode::EmbeddedInInstance:
800
if (isSubtype(type, Anyref))
801
WasmSetGlobalRef::emit(this, index, value);
803
WasmSetGlobal::emit(this, index, value);
805
case Wasm::GlobalInformation::BindingMode::Portable:
806
if (isSubtype(type, Anyref))
807
WasmSetGlobalRefPortableBinding::emit(this, index, value);
809
WasmSetGlobalPortableBinding::emit(this, index, value);
815
auto LLIntGenerator::addLoop(BlockSignature signature, Stack& enclosingStack, ControlType& block, Stack& newStack, uint32_t loopIndex) -> PartialResult
817
splitStack(signature, enclosingStack, newStack);
818
materializeConstantsAndLocals(newStack);
820
Ref<Label> body = newEmittedLabel();
821
Ref<Label> continuation = newLabel();
823
block = ControlType::loop(signature, m_stackSize, WTFMove(body), WTFMove(continuation));
825
Vector<VirtualRegister> osrEntryData;
826
for (uint32_t i = 0; i < m_codeBlock->m_numArguments; i++)
827
osrEntryData.append(m_normalizedArguments[i]);
829
const auto& callingConvention = wasmCallingConvention();
830
const uint32_t gprCount = callingConvention.gprArgs.size();
831
const uint32_t fprCount = callingConvention.fprArgs.size();
832
for (int32_t i = gprCount + fprCount + numberOfLLIntCalleeSaveRegisters; i < m_codeBlock->m_numVars; i++)
833
osrEntryData.append(virtualRegisterForLocal(i));
834
for (unsigned controlIndex = 0; controlIndex < m_parser->controlStack().size(); ++controlIndex) {
835
Stack& expressionStack = m_parser->controlStack()[controlIndex].enclosedExpressionStack;
836
for (TypedExpression expression : expressionStack)
837
osrEntryData.append(expression);
839
for (TypedExpression expression : enclosingStack)
840
osrEntryData.append(expression);
842
WasmLoopHint::emit(this);
844
m_codeBlock->tierUpCounter().addOSREntryDataForLoop(m_lastInstruction.offset(), { loopIndex, WTFMove(osrEntryData) });
849
auto LLIntGenerator::addTopLevel(BlockSignature signature) -> ControlType
851
return ControlType::topLevel(signature, m_stackSize, newLabel());
854
auto LLIntGenerator::addBlock(BlockSignature signature, Stack& enclosingStack, ControlType& newBlock, Stack& newStack) -> PartialResult
856
splitStack(signature, enclosingStack, newStack);
857
newBlock = ControlType::block(signature, m_stackSize, newLabel());
861
auto LLIntGenerator::addIf(ExpressionType condition, BlockSignature signature, Stack& enclosingStack, ControlType& result, Stack& newStack) -> PartialResult
863
Ref<Label> alternate = newLabel();
864
Ref<Label> continuation = newLabel();
866
splitStack(signature, enclosingStack, newStack);
868
WasmJfalse::emit(this, condition, alternate->bind(this));
870
result = ControlType::if_(signature, m_stackSize, WTFMove(alternate), WTFMove(continuation));
874
auto LLIntGenerator::addElse(ControlType& data, Stack& expressionStack) -> PartialResult
876
ASSERT(WTF::holds_alternative<ControlIf>(data));
877
materializeConstantsAndLocals(expressionStack);
878
WasmJmp::emit(this, data.m_continuation->bind(this));
879
return addElseToUnreachable(data);
882
auto LLIntGenerator::addElseToUnreachable(ControlType& data) -> PartialResult
884
m_stackSize = data.stackSize() + data.m_signature->argumentCount();
886
ControlIf& control = WTF::get<ControlIf>(data);
887
emitLabel(control.m_alternate.get());
888
data = ControlType::block(data.m_signature, m_stackSize, WTFMove(data.m_continuation));
892
auto LLIntGenerator::addReturn(const ControlType& data, Stack& returnValues) -> PartialResult
894
if (!data.m_signature->returnCount()) {
895
WasmRetVoid::emit(this);
899
// no need to drop keep here, since we have to move anyway
900
unifyValuesWithBlock(callInformationForCallee(*data.m_signature), returnValues);
906
auto LLIntGenerator::addBranch(ControlType& data, ExpressionType condition, Stack& returnValues) -> PartialResult
908
RefPtr<Label> target = data.targetLabelForBranch();
909
RefPtr<Label> skip = nullptr;
911
materializeConstantsAndLocals(returnValues);
913
if (condition.isValid()) {
915
WasmJfalse::emit(this, condition, skip->bind(this));
918
dropKeep(returnValues, data, !skip);
919
WasmJmp::emit(this, target->bind(this));
927
auto LLIntGenerator::addSwitch(ExpressionType condition, const Vector<ControlType*>& targets, ControlType& defaultTarget, Stack& expressionStack) -> PartialResult
929
materializeConstantsAndLocals(expressionStack);
931
unsigned tableIndex = m_codeBlock->numberOfJumpTables();
932
FunctionCodeBlock::JumpTable& jumpTable = m_codeBlock->addJumpTable(targets.size() + 1);
934
WasmSwitch::emit(this, condition, tableIndex);
937
InstructionStream::Offset offset = m_lastInstruction.offset();
939
auto addTarget = [&](ControlType& target) {
940
RefPtr<Label> targetLabel = target.targetLabelForBranch();
942
getDropKeepCount(target, jumpTable[index].startOffset, jumpTable[index].dropCount, jumpTable[index].keepCount);
944
if (targetLabel->isForward()) {
945
auto result = m_switches.add(targetLabel.get(), Vector<SwitchEntry>());
946
ASSERT(!jumpTable[index].target);
947
result.iterator->value.append(SwitchEntry { offset, &jumpTable[index++].target });
949
int jumpTarget = targetLabel->location() - offset;
951
jumpTable[index++].target = jumpTarget;
955
for (const auto& target : targets)
957
addTarget(defaultTarget);
962
auto LLIntGenerator::endBlock(ControlEntry& entry, Stack& expressionStack) -> PartialResult
964
// FIXME: We only need to materialize constants here if there exists a jump to this label
965
// https://bugs.webkit.org/show_bug.cgi?id=203657
966
materializeConstantsAndLocals(expressionStack);
967
return addEndToUnreachable(entry, expressionStack, false);
971
auto LLIntGenerator::addEndToUnreachable(ControlEntry& entry, const Stack& expressionStack, bool unreachable) -> PartialResult
973
ControlType& data = entry.controlData;
975
RELEASE_ASSERT(unreachable || m_stackSize == data.stackSize() + data.m_signature->returnCount());
977
m_stackSize = data.stackSize();
979
for (unsigned i = 0; i < data.m_signature->returnCount(); ++i) {
980
// We don't want to do a consistency check here because we just reset the stack size
981
// are pushing new values, while we already have the same values in the stack.
982
// The only reason we do things this way is so that it also works for unreachable blocks,
983
// since they might not have the right number of values in the expression stack.
984
// Instead, we do a stricter consistency check below.
985
auto tmp = push(NoConsistencyCheck);
986
ASSERT(unreachable || tmp == expressionStack[i].value());
988
entry.enclosedExpressionStack.constructAndAppend(data.m_signature->returnType(i), tmp);
990
entry.enclosedExpressionStack.append(expressionStack[i]);
993
if (m_lastOpcodeID == wasm_jmp && data.m_continuation->unresolvedJumps().size() == 1 && data.m_continuation->unresolvedJumps()[0] == static_cast<int>(m_lastInstruction.offset())) {
994
m_lastOpcodeID = wasm_unreachable;
995
m_writer.rewind(m_lastInstruction);
997
emitLabel(*data.m_continuation);
1002
auto LLIntGenerator::endTopLevel(BlockSignature signature, const Stack& expressionStack) -> PartialResult
1004
RELEASE_ASSERT(expressionStack.size() == signature->returnCount());
1006
if (!signature->returnCount()) {
1007
WasmRetVoid::emit(this);
1012
unifyValuesWithBlock(callInformationForCallee(*signature), expressionStack);
1013
WasmRet::emit(this);
1018
auto LLIntGenerator::addCall(uint32_t functionIndex, const Signature& signature, Vector<ExpressionType>& args, ResultList& results) -> PartialResult
1020
ASSERT(signature.argumentCount() == args.size());
1021
LLIntCallInformation info = callInformationForCaller(signature);
1022
unifyValuesWithBlock(info.arguments, args);
1023
if (Context::useFastTLS())
1024
WasmCall::emit(this, functionIndex, info.stackOffset, info.numberOfStackArguments);
1026
WasmCallNoTls::emit(this, functionIndex, info.stackOffset, info.numberOfStackArguments);
1027
info.commitResults(results);
1032
auto LLIntGenerator::addCallIndirect(unsigned tableIndex, const Signature& signature, Vector<ExpressionType>& args, ResultList& results) -> PartialResult
1034
ExpressionType calleeIndex = args.takeLast();
1036
ASSERT(signature.argumentCount() == args.size());
1037
ASSERT(m_info.tableCount() > tableIndex);
1038
ASSERT(m_info.tables[tableIndex].type() == TableElementType::Funcref);
1040
LLIntCallInformation info = callInformationForCaller(signature);
1041
unifyValuesWithBlock(info.arguments, args);
1042
if (Context::useFastTLS())
1043
WasmCallIndirect::emit(this, calleeIndex, m_codeBlock->addSignature(signature), info.stackOffset, info.numberOfStackArguments, tableIndex);
1045
WasmCallIndirectNoTls::emit(this, calleeIndex, m_codeBlock->addSignature(signature), info.stackOffset, info.numberOfStackArguments, tableIndex);
1046
info.commitResults(results);
1051
auto LLIntGenerator::addRefIsNull(ExpressionType value, ExpressionType& result) -> PartialResult
1054
WasmRefIsNull::emit(this, result, value);
1059
auto LLIntGenerator::addRefFunc(uint32_t index, ExpressionType& result) -> PartialResult
1062
WasmRefFunc::emit(this, result, index);
1067
auto LLIntGenerator::addTableGet(unsigned tableIndex, ExpressionType index, ExpressionType& result) -> PartialResult
1070
WasmTableGet::emit(this, result, index, tableIndex);
1075
auto LLIntGenerator::addTableSet(unsigned tableIndex, ExpressionType index, ExpressionType value) -> PartialResult
1077
WasmTableSet::emit(this, index, value, tableIndex);
1082
auto LLIntGenerator::addTableSize(unsigned tableIndex, ExpressionType& result) -> PartialResult
1085
WasmTableSize::emit(this, result, tableIndex);
1090
auto LLIntGenerator::addTableGrow(unsigned tableIndex, ExpressionType fill, ExpressionType delta, ExpressionType& result) -> PartialResult
1093
WasmTableGrow::emit(this, result, fill, delta, tableIndex);
1098
auto LLIntGenerator::addTableFill(unsigned tableIndex, ExpressionType offset, ExpressionType fill, ExpressionType count) -> PartialResult
1100
WasmTableFill::emit(this, offset, fill, count, tableIndex);
1105
auto LLIntGenerator::addUnreachable() -> PartialResult
1107
WasmUnreachable::emit(this);
1112
auto LLIntGenerator::addCurrentMemory(ExpressionType& result) -> PartialResult
1115
WasmCurrentMemory::emit(this, result);
1120
auto LLIntGenerator::addGrowMemory(ExpressionType delta, ExpressionType& result) -> PartialResult
1123
WasmGrowMemory::emit(this, result, delta);
1128
auto LLIntGenerator::addSelect(ExpressionType condition, ExpressionType nonZero, ExpressionType zero, ExpressionType& result) -> PartialResult
1131
WasmSelect::emit(this, result, condition, nonZero, zero);
1136
auto LLIntGenerator::load(LoadOpType op, ExpressionType pointer, ExpressionType& result, uint32_t offset) -> PartialResult
1140
case LoadOpType::I32Load8S:
1141
WasmI32Load8S::emit(this, result, pointer, offset);
1144
case LoadOpType::I64Load8S:
1145
WasmI64Load8S::emit(this, result, pointer, offset);
1148
case LoadOpType::I32Load8U:
1149
case LoadOpType::I64Load8U:
1150
WasmLoad8U::emit(this, result, pointer, offset);
1153
case LoadOpType::I32Load16S:
1154
WasmI32Load16S::emit(this, result, pointer, offset);
1157
case LoadOpType::I64Load16S:
1158
WasmI64Load16S::emit(this, result, pointer, offset);
1161
case LoadOpType::I32Load16U:
1162
case LoadOpType::I64Load16U:
1163
WasmLoad16U::emit(this, result, pointer, offset);
1166
case LoadOpType::I32Load:
1167
case LoadOpType::F32Load:
1168
case LoadOpType::I64Load32U:
1169
WasmLoad32U::emit(this, result, pointer, offset);
1172
case LoadOpType::I64Load32S:
1173
WasmI64Load32S::emit(this, result, pointer, offset);
1176
case LoadOpType::I64Load:
1177
case LoadOpType::F64Load:
1178
WasmLoad64U::emit(this, result, pointer, offset);
1185
auto LLIntGenerator::store(StoreOpType op, ExpressionType pointer, ExpressionType value, uint32_t offset) -> PartialResult
1188
case StoreOpType::I64Store8:
1189
case StoreOpType::I32Store8:
1190
WasmStore8::emit(this, pointer, value, offset);
1193
case StoreOpType::I64Store16:
1194
case StoreOpType::I32Store16:
1195
WasmStore16::emit(this, pointer, value, offset);
1198
case StoreOpType::I64Store32:
1199
case StoreOpType::I32Store:
1200
case StoreOpType::F32Store:
1201
WasmStore32::emit(this, pointer, value, offset);
1204
case StoreOpType::I64Store:
1205
case StoreOpType::F64Store:
1206
WasmStore64::emit(this, pointer, value, offset);
1216
void GenericLabel<Wasm::GeneratorTraits>::setLocation(BytecodeGeneratorBase<Wasm::GeneratorTraits>& generator, unsigned location)
1218
RELEASE_ASSERT(isForward());
1220
m_location = location;
1222
Wasm::LLIntGenerator* llintGenerator = static_cast<Wasm::LLIntGenerator*>(&generator);
1224
auto it = llintGenerator->m_switches.find(this);
1225
if (it != llintGenerator->m_switches.end()) {
1226
for (const auto& entry : it->value) {
1227
ASSERT(!*entry.jumpTarget);
1228
*entry.jumpTarget = m_location - entry.offset;
1230
llintGenerator->m_switches.remove(it);
1234
for (auto offset : m_unresolvedJumps) {
1235
auto instruction = generator.m_writer.ref(offset);
1236
int target = m_location - offset;
1238
#define CASE(__op) \
1239
case __op::opcodeID: \
1240
instruction->cast<__op, WasmOpcodeTraits>()->setTargetLabel(BoundLabel(target), [&]() { \
1241
generator.m_codeBlock->addOutOfLineJumpTarget(instruction.offset(), target); \
1242
return BoundLabel(); \
1246
switch (instruction->opcodeID<WasmOpcodeTraits>()) {
1251
RELEASE_ASSERT_NOT_REACHED();
1257
} // namespace JSC::Wasm
1259
#endif // ENABLE(WEBASSEMBLY)