| Index: src/arm/lithium-codegen-arm.cc
|
| diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
|
| index d5b67a7a1ba65078c8eec836adc8967404dd6f05..cabaa716387f9dee2a69e6fc48e1588490dd9881 100644
|
| --- a/src/arm/lithium-codegen-arm.cc
|
| +++ b/src/arm/lithium-codegen-arm.cc
|
| @@ -608,22 +608,24 @@ void LCodeGen::AddToTranslation(Translation* translation,
|
|
|
| void LCodeGen::CallCode(Handle<Code> code,
|
| RelocInfo::Mode mode,
|
| - LInstruction* instr) {
|
| - CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
|
| + LInstruction* instr,
|
| + TargetAddressStorageMode storage_mode) {
|
| + CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode);
|
| }
|
|
|
|
|
| void LCodeGen::CallCodeGeneric(Handle<Code> code,
|
| RelocInfo::Mode mode,
|
| LInstruction* instr,
|
| - SafepointMode safepoint_mode) {
|
| + SafepointMode safepoint_mode,
|
| + TargetAddressStorageMode storage_mode) {
|
| ASSERT(instr != NULL);
|
| // Block literal pool emission to ensure nop indicating no inlined smi code
|
| // is in the correct position.
|
| Assembler::BlockConstPoolScope block_const_pool(masm());
|
| LPointerMap* pointers = instr->pointer_map();
|
| RecordPosition(pointers->position());
|
| - __ Call(code, mode);
|
| + __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode);
|
| RecordSafepointWithLazyDeopt(instr, safepoint_mode);
|
|
|
| // Signal that we don't inline smi code before these stubs in the
|
| @@ -2471,6 +2473,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| // We use Factory::the_hole_value() on purpose instead of loading from the
|
| // root array to force relocation to be able to later patch with
|
| // the cached map.
|
| + PredictableCodeSizeScope predictable(masm_);
|
| Handle<JSGlobalPropertyCell> cell =
|
| factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
|
| __ mov(ip, Operand(Handle<Object>(cell)));
|
| @@ -2532,6 +2535,9 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
| ASSERT(temp.is(r4));
|
| __ LoadHeapObject(InstanceofStub::right(), instr->function());
|
| static const int kAdditionalDelta = 5;
|
| + // Make sure that code size is predicable, since we use specific constants
|
| + // offsets in the code to find embedded values..
|
| + PredictableCodeSizeScope predictable(masm_);
|
| int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
|
| Label before_push_delta;
|
| __ bind(&before_push_delta);
|
| @@ -2795,7 +2801,7 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
|
| if (need_generic) {
|
| __ mov(r2, Operand(name));
|
| Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
|
| - CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| }
|
| __ bind(&done);
|
| }
|
| @@ -2808,7 +2814,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
|
| // Name is always in r2.
|
| __ mov(r2, Operand(instr->name()));
|
| Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
|
| - CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| }
|
|
|
|
|
| @@ -3119,7 +3125,7 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
|
| ASSERT(ToRegister(instr->key()).is(r0));
|
|
|
| Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
|
| - CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| }
|
|
|
|
|
| @@ -3814,7 +3820,7 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
|
| int arity = instr->arity();
|
| Handle<Code> ic =
|
| isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
|
| - CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
| @@ -3827,7 +3833,7 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
| Handle<Code> ic =
|
| isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
|
| __ mov(r2, Operand(instr->name()));
|
| - CallCode(ic, mode, instr);
|
| + CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| // Restore context register.
|
| __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
| @@ -3852,7 +3858,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
| Handle<Code> ic =
|
| isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
|
| __ mov(r2, Operand(instr->name()));
|
| - CallCode(ic, mode, instr);
|
| + CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
| @@ -3952,7 +3958,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
|
| Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
|
| ? isolate()->builtins()->StoreIC_Initialize_Strict()
|
| : isolate()->builtins()->StoreIC_Initialize();
|
| - CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| }
|
|
|
|
|
| @@ -4166,7 +4172,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
|
| Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
|
| ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
|
| : isolate()->builtins()->KeyedStoreIC_Initialize();
|
| - CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
|
| }
|
|
|
|
|
| @@ -5541,6 +5547,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
| __ cmp(sp, Operand(ip));
|
| __ b(hs, &done);
|
| StackCheckStub stub;
|
| + PredictableCodeSizeScope predictable(masm_);
|
| CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
| EnsureSpaceForLazyDeopt();
|
| __ bind(&done);
|
|
|