2
* Copyright (C) 2008 Apple Inc.
3
* Copyright (C) 2009 University of Szeged
6
* Redistribution and use in source and binary forms, with or without
7
* modification, are permitted provided that the following conditions
9
* 1. Redistributions of source code must retain the above copyright
10
* notice, this list of conditions and the following disclaimer.
11
* 2. Redistributions in binary form must reproduce the above copyright
12
* notice, this list of conditions and the following disclaimer in the
13
* documentation and/or other materials provided with the distribution.
15
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
16
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
19
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
#ifndef MacroAssemblerARM_h
29
#define MacroAssemblerARM_h
31
#include <wtf/Platform.h>
33
#if ENABLE(ASSEMBLER) && PLATFORM(ARM_TRADITIONAL)
35
#include "ARMAssembler.h"
36
#include "AbstractMacroAssembler.h"
40
class MacroAssemblerARM : public AbstractMacroAssembler<ARMAssembler> {
43
Equal = ARMAssembler::EQ,
44
NotEqual = ARMAssembler::NE,
45
Above = ARMAssembler::HI,
46
AboveOrEqual = ARMAssembler::CS,
47
Below = ARMAssembler::CC,
48
BelowOrEqual = ARMAssembler::LS,
49
GreaterThan = ARMAssembler::GT,
50
GreaterThanOrEqual = ARMAssembler::GE,
51
LessThan = ARMAssembler::LT,
52
LessThanOrEqual = ARMAssembler::LE,
53
Overflow = ARMAssembler::VS,
54
Signed = ARMAssembler::MI,
55
Zero = ARMAssembler::EQ,
56
NonZero = ARMAssembler::NE
59
enum DoubleCondition {
60
DoubleEqual = ARMAssembler::EQ,
61
DoubleGreaterThan = ARMAssembler::GT,
62
DoubleGreaterThanOrEqual = ARMAssembler::GE,
63
DoubleLessThan = ARMAssembler::LT,
64
DoubleLessThanOrEqual = ARMAssembler::LE,
67
static const RegisterID stackPointerRegister = ARMRegisters::sp;
69
static const Scale ScalePtr = TimesFour;
71
void add32(RegisterID src, RegisterID dest)
73
m_assembler.adds_r(dest, dest, src);
76
void add32(Imm32 imm, Address address)
78
load32(address, ARMRegisters::S1);
79
add32(imm, ARMRegisters::S1);
80
store32(ARMRegisters::S1, address);
83
void add32(Imm32 imm, RegisterID dest)
85
m_assembler.adds_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
88
void add32(Address src, RegisterID dest)
90
load32(src, ARMRegisters::S1);
91
add32(ARMRegisters::S1, dest);
94
void and32(RegisterID src, RegisterID dest)
96
m_assembler.ands_r(dest, dest, src);
99
void and32(Imm32 imm, RegisterID dest)
101
ARMWord w = m_assembler.getImm(imm.m_value, ARMRegisters::S0, true);
102
if (w & ARMAssembler::OP2_INV_IMM)
103
m_assembler.bics_r(dest, dest, w & ~ARMAssembler::OP2_INV_IMM);
105
m_assembler.ands_r(dest, dest, w);
108
void lshift32(Imm32 imm, RegisterID dest)
110
m_assembler.movs_r(dest, m_assembler.lsl(dest, imm.m_value & 0x1f));
113
void lshift32(RegisterID shift_amount, RegisterID dest)
115
m_assembler.movs_r(dest, m_assembler.lsl_r(dest, shift_amount));
118
void mul32(RegisterID src, RegisterID dest)
121
move(src, ARMRegisters::S0);
122
src = ARMRegisters::S0;
124
m_assembler.muls_r(dest, dest, src);
127
void mul32(Imm32 imm, RegisterID src, RegisterID dest)
129
move(imm, ARMRegisters::S0);
130
m_assembler.muls_r(dest, src, ARMRegisters::S0);
133
void not32(RegisterID dest)
135
m_assembler.mvns_r(dest, dest);
138
void or32(RegisterID src, RegisterID dest)
140
m_assembler.orrs_r(dest, dest, src);
143
void or32(Imm32 imm, RegisterID dest)
145
m_assembler.orrs_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
148
void rshift32(RegisterID shift_amount, RegisterID dest)
150
m_assembler.movs_r(dest, m_assembler.asr_r(dest, shift_amount));
153
void rshift32(Imm32 imm, RegisterID dest)
155
m_assembler.movs_r(dest, m_assembler.asr(dest, imm.m_value & 0x1f));
158
void sub32(RegisterID src, RegisterID dest)
160
m_assembler.subs_r(dest, dest, src);
163
void sub32(Imm32 imm, RegisterID dest)
165
m_assembler.subs_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
168
void sub32(Imm32 imm, Address address)
170
load32(address, ARMRegisters::S1);
171
sub32(imm, ARMRegisters::S1);
172
store32(ARMRegisters::S1, address);
175
void sub32(Address src, RegisterID dest)
177
load32(src, ARMRegisters::S1);
178
sub32(ARMRegisters::S1, dest);
181
void xor32(RegisterID src, RegisterID dest)
183
m_assembler.eors_r(dest, dest, src);
186
void xor32(Imm32 imm, RegisterID dest)
188
m_assembler.eors_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
191
void load32(ImplicitAddress address, RegisterID dest)
193
m_assembler.dataTransfer32(true, dest, address.base, address.offset);
196
void load32(BaseIndex address, RegisterID dest)
198
m_assembler.baseIndexTransfer32(true, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
201
#if defined(ARM_REQUIRE_NATURAL_ALIGNMENT) && ARM_REQUIRE_NATURAL_ALIGNMENT
202
void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest);
204
void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
206
load32(address, dest);
210
DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
212
DataLabel32 dataLabel(this);
213
m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
214
m_assembler.dtr_ur(true, dest, address.base, ARMRegisters::S0);
218
Label loadPtrWithPatchToLEA(Address address, RegisterID dest)
221
load32(address, dest);
225
void load16(BaseIndex address, RegisterID dest)
227
m_assembler.add_r(ARMRegisters::S0, address.base, m_assembler.lsl(address.index, address.scale));
228
if (address.offset>=0)
229
m_assembler.ldrh_u(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(address.offset));
231
m_assembler.ldrh_d(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(-address.offset));
234
DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
236
DataLabel32 dataLabel(this);
237
m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
238
m_assembler.dtr_ur(false, src, address.base, ARMRegisters::S0);
242
void store32(RegisterID src, ImplicitAddress address)
244
m_assembler.dataTransfer32(false, src, address.base, address.offset);
247
void store32(RegisterID src, BaseIndex address)
249
m_assembler.baseIndexTransfer32(false, src, address.base, address.index, static_cast<int>(address.scale), address.offset);
252
void store32(Imm32 imm, ImplicitAddress address)
255
m_assembler.ldr_un_imm(ARMRegisters::S1, imm.m_value);
257
move(imm, ARMRegisters::S1);
258
store32(ARMRegisters::S1, address);
261
void store32(RegisterID src, void* address)
263
m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address));
264
m_assembler.dtr_u(false, src, ARMRegisters::S0, 0);
267
void store32(Imm32 imm, void* address)
269
m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address));
271
m_assembler.ldr_un_imm(ARMRegisters::S1, imm.m_value);
273
m_assembler.moveImm(imm.m_value, ARMRegisters::S1);
274
m_assembler.dtr_u(false, ARMRegisters::S1, ARMRegisters::S0, 0);
277
void pop(RegisterID dest)
279
m_assembler.pop_r(dest);
282
void push(RegisterID src)
284
m_assembler.push_r(src);
287
void push(Address address)
289
load32(address, ARMRegisters::S1);
290
push(ARMRegisters::S1);
295
move(imm, ARMRegisters::S0);
296
push(ARMRegisters::S0);
299
void move(Imm32 imm, RegisterID dest)
302
m_assembler.ldr_un_imm(dest, imm.m_value);
304
m_assembler.moveImm(imm.m_value, dest);
307
void move(RegisterID src, RegisterID dest)
309
m_assembler.mov_r(dest, src);
312
void move(ImmPtr imm, RegisterID dest)
314
move(Imm32(imm), dest);
317
void swap(RegisterID reg1, RegisterID reg2)
319
m_assembler.mov_r(ARMRegisters::S0, reg1);
320
m_assembler.mov_r(reg1, reg2);
321
m_assembler.mov_r(reg2, ARMRegisters::S0);
324
void signExtend32ToPtr(RegisterID src, RegisterID dest)
330
void zeroExtend32ToPtr(RegisterID src, RegisterID dest)
336
Jump branch32(Condition cond, RegisterID left, RegisterID right, int useConstantPool = 0)
338
m_assembler.cmp_r(left, right);
339
return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool));
342
Jump branch32(Condition cond, RegisterID left, Imm32 right, int useConstantPool = 0)
344
if (right.m_isPointer) {
345
m_assembler.ldr_un_imm(ARMRegisters::S0, right.m_value);
346
m_assembler.cmp_r(left, ARMRegisters::S0);
348
m_assembler.cmp_r(left, m_assembler.getImm(right.m_value, ARMRegisters::S0));
349
return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool));
352
Jump branch32(Condition cond, RegisterID left, Address right)
354
load32(right, ARMRegisters::S1);
355
return branch32(cond, left, ARMRegisters::S1);
358
Jump branch32(Condition cond, Address left, RegisterID right)
360
load32(left, ARMRegisters::S1);
361
return branch32(cond, ARMRegisters::S1, right);
364
Jump branch32(Condition cond, Address left, Imm32 right)
366
load32(left, ARMRegisters::S1);
367
return branch32(cond, ARMRegisters::S1, right);
370
Jump branch32(Condition cond, BaseIndex left, Imm32 right)
372
load32(left, ARMRegisters::S1);
373
return branch32(cond, ARMRegisters::S1, right);
376
Jump branch32WithUnalignedHalfWords(Condition cond, BaseIndex left, Imm32 right)
378
load32WithUnalignedHalfWords(left, ARMRegisters::S1);
379
return branch32(cond, ARMRegisters::S1, right);
382
Jump branch16(Condition cond, BaseIndex left, RegisterID right)
387
ASSERT_NOT_REACHED();
391
Jump branch16(Condition cond, BaseIndex left, Imm32 right)
393
load16(left, ARMRegisters::S0);
394
move(right, ARMRegisters::S1);
395
m_assembler.cmp_r(ARMRegisters::S0, ARMRegisters::S1);
396
return m_assembler.jmp(ARMCondition(cond));
399
Jump branchTest32(Condition cond, RegisterID reg, RegisterID mask)
401
ASSERT((cond == Zero) || (cond == NonZero));
402
m_assembler.tst_r(reg, mask);
403
return Jump(m_assembler.jmp(ARMCondition(cond)));
406
Jump branchTest32(Condition cond, RegisterID reg, Imm32 mask = Imm32(-1))
408
ASSERT((cond == Zero) || (cond == NonZero));
409
ARMWord w = m_assembler.getImm(mask.m_value, ARMRegisters::S0, true);
410
if (w & ARMAssembler::OP2_INV_IMM)
411
m_assembler.bics_r(ARMRegisters::S0, reg, w & ~ARMAssembler::OP2_INV_IMM);
413
m_assembler.tst_r(reg, w);
414
return Jump(m_assembler.jmp(ARMCondition(cond)));
417
Jump branchTest32(Condition cond, Address address, Imm32 mask = Imm32(-1))
419
load32(address, ARMRegisters::S1);
420
return branchTest32(cond, ARMRegisters::S1, mask);
423
Jump branchTest32(Condition cond, BaseIndex address, Imm32 mask = Imm32(-1))
425
load32(address, ARMRegisters::S1);
426
return branchTest32(cond, ARMRegisters::S1, mask);
431
return Jump(m_assembler.jmp());
434
void jump(RegisterID target)
436
move(target, ARMRegisters::pc);
439
void jump(Address address)
441
load32(address, ARMRegisters::pc);
444
Jump branchAdd32(Condition cond, RegisterID src, RegisterID dest)
446
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
448
return Jump(m_assembler.jmp(ARMCondition(cond)));
451
Jump branchAdd32(Condition cond, Imm32 imm, RegisterID dest)
453
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
455
return Jump(m_assembler.jmp(ARMCondition(cond)));
458
void mull32(RegisterID src1, RegisterID src2, RegisterID dest)
461
move(src1, ARMRegisters::S0);
462
src1 = ARMRegisters::S0;
464
m_assembler.mull_r(ARMRegisters::S1, dest, src2, src1);
465
m_assembler.cmp_r(ARMRegisters::S1, m_assembler.asr(dest, 31));
468
Jump branchMul32(Condition cond, RegisterID src, RegisterID dest)
470
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
471
if (cond == Overflow) {
472
mull32(src, dest, dest);
477
return Jump(m_assembler.jmp(ARMCondition(cond)));
480
Jump branchMul32(Condition cond, Imm32 imm, RegisterID src, RegisterID dest)
482
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
483
if (cond == Overflow) {
484
move(imm, ARMRegisters::S0);
485
mull32(ARMRegisters::S0, src, dest);
489
mul32(imm, src, dest);
490
return Jump(m_assembler.jmp(ARMCondition(cond)));
493
Jump branchSub32(Condition cond, RegisterID src, RegisterID dest)
495
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
497
return Jump(m_assembler.jmp(ARMCondition(cond)));
500
Jump branchSub32(Condition cond, Imm32 imm, RegisterID dest)
502
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
504
return Jump(m_assembler.jmp(ARMCondition(cond)));
515
return Call(m_assembler.jmp(ARMAssembler::AL, true), Call::LinkableNear);
518
Call call(RegisterID target)
521
move(ARMRegisters::pc, target);
523
return Call(jmpSrc, Call::None);
526
void call(Address address)
528
call32(address.base, address.offset);
533
pop(ARMRegisters::pc);
536
void set32(Condition cond, RegisterID left, RegisterID right, RegisterID dest)
538
m_assembler.cmp_r(left, right);
539
m_assembler.mov_r(dest, ARMAssembler::getOp2(0));
540
m_assembler.mov_r(dest, ARMAssembler::getOp2(1), ARMCondition(cond));
543
void set32(Condition cond, RegisterID left, Imm32 right, RegisterID dest)
545
m_assembler.cmp_r(left, m_assembler.getImm(right.m_value, ARMRegisters::S0));
546
m_assembler.mov_r(dest, ARMAssembler::getOp2(0));
547
m_assembler.mov_r(dest, ARMAssembler::getOp2(1), ARMCondition(cond));
550
void setTest32(Condition cond, Address address, Imm32 mask, RegisterID dest)
552
load32(address, ARMRegisters::S1);
553
if (mask.m_value == -1)
554
m_assembler.cmp_r(0, ARMRegisters::S1);
556
m_assembler.tst_r(ARMRegisters::S1, m_assembler.getImm(mask.m_value, ARMRegisters::S0));
557
m_assembler.mov_r(dest, ARMAssembler::getOp2(0));
558
m_assembler.mov_r(dest, ARMAssembler::getOp2(1), ARMCondition(cond));
561
void add32(Imm32 imm, RegisterID src, RegisterID dest)
563
m_assembler.add_r(dest, src, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
566
void add32(Imm32 imm, AbsoluteAddress address)
568
m_assembler.ldr_un_imm(ARMRegisters::S1, reinterpret_cast<ARMWord>(address.m_ptr));
569
m_assembler.dtr_u(true, ARMRegisters::S1, ARMRegisters::S1, 0);
570
add32(imm, ARMRegisters::S1);
571
m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address.m_ptr));
572
m_assembler.dtr_u(false, ARMRegisters::S1, ARMRegisters::S0, 0);
575
void sub32(Imm32 imm, AbsoluteAddress address)
577
m_assembler.ldr_un_imm(ARMRegisters::S1, reinterpret_cast<ARMWord>(address.m_ptr));
578
m_assembler.dtr_u(true, ARMRegisters::S1, ARMRegisters::S1, 0);
579
sub32(imm, ARMRegisters::S1);
580
m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address.m_ptr));
581
m_assembler.dtr_u(false, ARMRegisters::S1, ARMRegisters::S0, 0);
584
void load32(void* address, RegisterID dest)
586
m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address));
587
m_assembler.dtr_u(true, dest, ARMRegisters::S0, 0);
590
Jump branch32(Condition cond, AbsoluteAddress left, RegisterID right)
592
load32(left.m_ptr, ARMRegisters::S1);
593
return branch32(cond, ARMRegisters::S1, right);
596
Jump branch32(Condition cond, AbsoluteAddress left, Imm32 right)
598
load32(left.m_ptr, ARMRegisters::S1);
599
return branch32(cond, ARMRegisters::S1, right);
605
return Call(m_assembler.jmp(ARMAssembler::AL, true), Call::Linkable);
608
Call tailRecursiveCall()
610
return Call::fromTailJump(jump());
613
Call makeTailRecursiveCall(Jump oldJump)
615
return Call::fromTailJump(oldJump);
618
DataLabelPtr moveWithPatch(ImmPtr initialValue, RegisterID dest)
620
DataLabelPtr dataLabel(this);
621
m_assembler.ldr_un_imm(dest, reinterpret_cast<ARMWord>(initialValue.m_value));
625
Jump branchPtrWithPatch(Condition cond, RegisterID left, DataLabelPtr& dataLabel, ImmPtr initialRightValue = ImmPtr(0))
627
dataLabel = moveWithPatch(initialRightValue, ARMRegisters::S1);
628
Jump jump = branch32(cond, left, ARMRegisters::S1, true);
632
Jump branchPtrWithPatch(Condition cond, Address left, DataLabelPtr& dataLabel, ImmPtr initialRightValue = ImmPtr(0))
634
load32(left, ARMRegisters::S1);
635
dataLabel = moveWithPatch(initialRightValue, ARMRegisters::S0);
636
Jump jump = branch32(cond, ARMRegisters::S0, ARMRegisters::S1, true);
640
DataLabelPtr storePtrWithPatch(ImmPtr initialValue, ImplicitAddress address)
642
DataLabelPtr dataLabel = moveWithPatch(initialValue, ARMRegisters::S1);
643
store32(ARMRegisters::S1, address);
647
DataLabelPtr storePtrWithPatch(ImplicitAddress address)
649
return storePtrWithPatch(ImmPtr(0), address);
652
// Floating point operators
653
bool supportsFloatingPoint() const
655
return s_isVFPPresent;
658
bool supportsFloatingPointTruncate() const
663
void loadDouble(ImplicitAddress address, FPRegisterID dest)
665
m_assembler.doubleTransfer(true, dest, address.base, address.offset);
668
void storeDouble(FPRegisterID src, ImplicitAddress address)
670
m_assembler.doubleTransfer(false, src, address.base, address.offset);
673
void addDouble(FPRegisterID src, FPRegisterID dest)
675
m_assembler.faddd_r(dest, dest, src);
678
void addDouble(Address src, FPRegisterID dest)
680
loadDouble(src, ARMRegisters::SD0);
681
addDouble(ARMRegisters::SD0, dest);
684
void subDouble(FPRegisterID src, FPRegisterID dest)
686
m_assembler.fsubd_r(dest, dest, src);
689
void subDouble(Address src, FPRegisterID dest)
691
loadDouble(src, ARMRegisters::SD0);
692
subDouble(ARMRegisters::SD0, dest);
695
void mulDouble(FPRegisterID src, FPRegisterID dest)
697
m_assembler.fmuld_r(dest, dest, src);
700
void mulDouble(Address src, FPRegisterID dest)
702
loadDouble(src, ARMRegisters::SD0);
703
mulDouble(ARMRegisters::SD0, dest);
706
void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
708
m_assembler.fmsr_r(dest, src);
709
m_assembler.fsitod_r(dest, dest);
712
Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
714
m_assembler.fcmpd_r(left, right);
715
m_assembler.fmstat();
716
return Jump(m_assembler.jmp(static_cast<ARMAssembler::Condition>(cond)));
719
// Truncates 'src' to an integer, and places the resulting 'dest'.
720
// If the result is not representable as a 32 bit value, branch.
721
// May also branch for some values that are representable in 32 bits
722
// (specifically, in this case, INT_MIN).
723
Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest)
727
ASSERT_NOT_REACHED();
732
ARMAssembler::Condition ARMCondition(Condition cond)
734
return static_cast<ARMAssembler::Condition>(cond);
737
void ensureSpace(int insnSpace, int constSpace)
739
m_assembler.ensureSpace(insnSpace, constSpace);
742
int sizeOfConstantPool()
744
return m_assembler.sizeOfConstantPool();
749
ensureSpace(3 * sizeof(ARMWord), sizeof(ARMWord));
751
// S0 might be used for parameter passing
752
m_assembler.add_r(ARMRegisters::S1, ARMRegisters::pc, ARMAssembler::OP2_IMM | 0x4);
753
m_assembler.push_r(ARMRegisters::S1);
756
void call32(RegisterID base, int32_t offset)
758
if (base == ARMRegisters::sp)
762
if (offset <= 0xfff) {
764
m_assembler.dtr_u(true, ARMRegisters::pc, base, offset);
765
} else if (offset <= 0xfffff) {
766
m_assembler.add_r(ARMRegisters::S0, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
768
m_assembler.dtr_u(true, ARMRegisters::pc, ARMRegisters::S0, offset & 0xfff);
770
ARMWord reg = m_assembler.getImm(offset, ARMRegisters::S0);
772
m_assembler.dtr_ur(true, ARMRegisters::pc, base, reg);
776
if (offset <= 0xfff) {
778
m_assembler.dtr_d(true, ARMRegisters::pc, base, offset);
779
} else if (offset <= 0xfffff) {
780
m_assembler.sub_r(ARMRegisters::S0, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
782
m_assembler.dtr_d(true, ARMRegisters::pc, ARMRegisters::S0, offset & 0xfff);
784
ARMWord reg = m_assembler.getImm(offset, ARMRegisters::S0);
786
m_assembler.dtr_dr(true, ARMRegisters::pc, base, reg);
792
friend class LinkBuffer;
793
friend class RepatchBuffer;
795
static void linkCall(void* code, Call call, FunctionPtr function)
797
ARMAssembler::linkCall(code, call.m_jmp, function.value());
800
static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
802
ARMAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
805
static void repatchCall(CodeLocationCall call, FunctionPtr destination)
807
ARMAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
810
static const bool s_isVFPPresent;
815
#endif // ENABLE(ASSEMBLER) && PLATFORM(ARM_TRADITIONAL)
817
#endif // MacroAssemblerARM_h