Index: src/arm/macro-assembler-arm.cc |
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc |
index 4409e2643afa32b79a7c0d309f3a4d22f1a7d4a1..3901f48a68828c987ef608ce79b901f251d1f95e 100644 |
--- a/src/arm/macro-assembler-arm.cc |
+++ b/src/arm/macro-assembler-arm.cc |
@@ -108,7 +108,7 @@ void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, |
int MacroAssembler::CallSize(Register target, Condition cond) { |
-#if USE_BLX |
+#ifdef USE_BLX |
return kInstrSize; |
#else |
return 2 * kInstrSize; |
@@ -121,7 +121,7 @@ void MacroAssembler::Call(Register target, Condition cond) { |
BlockConstPoolScope block_const_pool(this); |
Label start; |
bind(&start); |
-#if USE_BLX |
+#ifdef USE_BLX |
blx(target, cond); |
#else |
// set lr for return at current pc + 8 |
@@ -158,15 +158,29 @@ int MacroAssembler::CallSizeNotPredictableCodeSize( |
void MacroAssembler::Call(Address target, |
RelocInfo::Mode rmode, |
- Condition cond) { |
+ Condition cond, |
+ TargetAddressStorageMode mode) { |
// Block constant pool for the call instruction sequence. |
BlockConstPoolScope block_const_pool(this); |
Label start; |
bind(&start); |
-#if USE_BLX |
- // On ARMv5 and after the recommended call sequence is: |
- // ldr ip, [pc, #...] |
- // blx ip |
+ |
+ bool old_predictable_code_size = predictable_code_size(); |
+ if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
+ set_predictable_code_size(true); |
+ } |
+ |
+#ifdef USE_BLX |
+ // Call sequence on V7 or later may be : |
+ // movw ip, #... @ call address low 16 |
+ // movt ip, #... @ call address high 16 |
+ // blx ip |
+ // @ return address |
+ // Or for pre-V7 or values that may be back-patched |
+ // to avoid ICache flushes: |
+ // ldr ip, [pc, #...] @ call address |
+ // blx ip |
+ // @ return address |
// Statement positions are expected to be recorded when the target |
// address is loaded. The mov method will automatically record |
@@ -177,15 +191,16 @@ void MacroAssembler::Call(Address target, |
mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); |
blx(ip, cond); |
- ASSERT(kCallTargetAddressOffset == 2 * kInstrSize); |
#else |
// Set lr for return at current pc + 8. |
mov(lr, Operand(pc), LeaveCC, cond); |
// Emit a ldr<cond> pc, [pc + offset of target in constant pool]. |
mov(pc, Operand(reinterpret_cast<int32_t>(target), rmode), LeaveCC, cond); |
- ASSERT(kCallTargetAddressOffset == kInstrSize); |
#endif |
ASSERT_EQ(CallSize(target, rmode, cond), SizeOfCodeGeneratedSince(&start)); |
+ if (mode == NEVER_INLINE_TARGET_ADDRESS) { |
+ set_predictable_code_size(old_predictable_code_size); |
+ } |
} |
@@ -200,7 +215,8 @@ int MacroAssembler::CallSize(Handle<Code> code, |
void MacroAssembler::Call(Handle<Code> code, |
RelocInfo::Mode rmode, |
TypeFeedbackId ast_id, |
- Condition cond) { |
+ Condition cond, |
+ TargetAddressStorageMode mode) { |
Label start; |
bind(&start); |
ASSERT(RelocInfo::IsCodeTarget(rmode)); |
@@ -209,9 +225,7 @@ void MacroAssembler::Call(Handle<Code> code, |
rmode = RelocInfo::CODE_TARGET_WITH_ID; |
} |
// 'code' is always generated ARM code, never THUMB code |
- Call(reinterpret_cast<Address>(code.location()), rmode, cond); |
- ASSERT_EQ(CallSize(code, rmode, ast_id, cond), |
- SizeOfCodeGeneratedSince(&start)); |
+ Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode); |
} |
@@ -288,17 +302,15 @@ void MacroAssembler::Move(DoubleRegister dst, DoubleRegister src) { |
void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
Condition cond) { |
if (!src2.is_reg() && |
- !src2.must_use_constant_pool(this) && |
+ !src2.must_output_reloc_info(this) && |
src2.immediate() == 0) { |
mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond); |
- |
} else if (!src2.is_single_instruction(this) && |
- !src2.must_use_constant_pool(this) && |
+ !src2.must_output_reloc_info(this) && |
CpuFeatures::IsSupported(ARMv7) && |
IsPowerOf2(src2.immediate() + 1)) { |
ubfx(dst, src1, 0, |
WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); |
- |
} else { |
and_(dst, src1, src2, LeaveCC, cond); |
} |