| Index: src/x64/lithium-codegen-x64.cc
|
| diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
|
| index 85895b39039d2eb460e4f9bd828c781c1cbd9830..72cfdb01990e4f12d120f969c28a60961681f90c 100644
|
| --- a/src/x64/lithium-codegen-x64.cc
|
| +++ b/src/x64/lithium-codegen-x64.cc
|
| @@ -266,6 +266,7 @@ bool LCodeGen::GenerateJumpTable() {
|
| __ jmp(&needs_frame);
|
| } else {
|
| __ bind(&needs_frame);
|
| + __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset));
|
| __ push(rbp);
|
| __ movq(rbp, rsp);
|
| __ push(rsi);
|
| @@ -589,10 +590,30 @@ void LCodeGen::CallRuntime(const Runtime::Function* function,
|
| }
|
|
|
|
|
| +void LCodeGen::LoadContextFromDeferred(LOperand* context) {
|
| + if (context->IsRegister()) {
|
| + if (!ToRegister(context).is(rsi)) {
|
| + __ movq(rsi, ToRegister(context));
|
| + }
|
| + } else if (context->IsStackSlot()) {
|
| + __ movq(rsi, ToOperand(context));
|
| + } else if (context->IsConstantOperand()) {
|
| + HConstant* constant =
|
| + chunk_->LookupConstant(LConstantOperand::cast(context));
|
| + __ Move(rsi, Handle<Object>::cast(constant->handle(isolate())));
|
| + } else {
|
| + UNREACHABLE();
|
| + }
|
| +}
|
| +
|
| +
|
| +
|
| void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
|
| int argc,
|
| - LInstruction* instr) {
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| + LInstruction* instr,
|
| + LOperand* context) {
|
| + LoadContextFromDeferred(context);
|
| +
|
| __ CallRuntimeSaveDoubles(id);
|
| RecordSafepointWithRegisters(
|
| instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
|
| @@ -816,10 +837,6 @@ void LCodeGen::RecordSafepoint(
|
| safepoint.DefinePointerRegister(ToRegister(pointer), zone());
|
| }
|
| }
|
| - if (kind & Safepoint::kWithRegisters) {
|
| - // Register rsi always contains a pointer to the context.
|
| - safepoint.DefinePointerRegister(rsi, zone());
|
| - }
|
| }
|
|
|
|
|
| @@ -895,6 +912,7 @@ void LCodeGen::DoParameter(LParameter* instr) {
|
|
|
|
|
| void LCodeGen::DoCallStub(LCallStub* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
| switch (instr->hydrogen()->major_key()) {
|
| case CodeStub::RegExpConstructResult: {
|
| @@ -1626,7 +1644,6 @@ void LCodeGen::DoDateField(LDateField* instr) {
|
| __ movq(arg_reg_1, object);
|
| __ movq(arg_reg_2, index, RelocInfo::NONE64);
|
| __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| __ bind(&done);
|
| }
|
| }
|
| @@ -1664,6 +1681,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
|
|
|
| void LCodeGen::DoThrow(LThrow* instr) {
|
| __ push(ToRegister(instr->value()));
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| CallRuntime(Runtime::kThrow, 1, instr);
|
|
|
| if (FLAG_debug_code) {
|
| @@ -1818,7 +1836,6 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
|
| ASSERT(right.is(xmm1));
|
| __ CallCFunction(
|
| ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| __ movaps(result, xmm_scratch);
|
| break;
|
| }
|
| @@ -1830,6 +1847,7 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
|
|
|
|
|
| void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->left()).is(rdx));
|
| ASSERT(ToRegister(instr->right()).is(rax));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
| @@ -2244,6 +2262,7 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
|
|
|
|
|
| void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| Token::Value op = instr->op();
|
|
|
| Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
| @@ -2400,6 +2419,7 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
|
|
|
|
|
| void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| InstanceofStub stub(InstanceofStub::kNoFlags);
|
| __ push(ToRegister(instr->left()));
|
| __ push(ToRegister(instr->right()));
|
| @@ -2431,7 +2451,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| Label map_check_;
|
| };
|
|
|
| -
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| DeferredInstanceOfKnownGlobal* deferred;
|
| deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
|
|
|
| @@ -2526,6 +2546,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
|
|
|
|
| void LCodeGen::DoCmpT(LCmpT* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| Token::Value op = instr->op();
|
|
|
| Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
| @@ -2545,9 +2566,12 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
|
|
|
| void LCodeGen::DoReturn(LReturn* instr) {
|
| if (FLAG_trace && info()->IsOptimizing()) {
|
| - // Preserve the return value on the stack and rely on the runtime
|
| - // call to return the value in the same register.
|
| + // Preserve the return value on the stack and rely on the runtime call
|
| + // to return the value in the same register. We're leaving the code
|
| + // managed by the register allocator and tearing down the frame, it's
|
| + // safe to write to the context register.
|
| __ push(rax);
|
| + __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| __ CallRuntime(Runtime::kTraceExit, 1);
|
| }
|
| if (info()->saves_caller_doubles()) {
|
| @@ -2598,6 +2622,7 @@ void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
|
|
|
|
|
| void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->global_object()).is(rax));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| @@ -2636,6 +2661,7 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
|
|
|
|
|
| void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->global_object()).is(rdx));
|
| ASSERT(ToRegister(instr->value()).is(rax));
|
|
|
| @@ -2735,6 +2761,7 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
|
|
|
|
|
| void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->object()).is(rax));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| @@ -3014,6 +3041,7 @@ Operand LCodeGen::BuildFastArrayOperand(
|
|
|
|
|
| void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->object()).is(rdx));
|
| ASSERT(ToRegister(instr->key()).is(rax));
|
|
|
| @@ -3116,7 +3144,8 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
|
| // TODO(kmillikin): We have a hydrogen value for the global object. See
|
| // if it's better to use it than to explicitly fetch it from the context
|
| // here.
|
| - __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
|
| + __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| + __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_OBJECT_INDEX));
|
| __ movq(receiver,
|
| FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
|
| __ bind(&receiver_ok);
|
| @@ -3163,7 +3192,6 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
|
| ParameterCount actual(rax);
|
| __ InvokeFunction(function, actual, CALL_FUNCTION,
|
| safepoint_generator, CALL_AS_METHOD);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
|
|
| @@ -3186,7 +3214,12 @@ void LCodeGen::DoThisFunction(LThisFunction* instr) {
|
|
|
| void LCodeGen::DoContext(LContext* instr) {
|
| Register result = ToRegister(instr->result());
|
| - __ movq(result, rsi);
|
| + if (info()->IsOptimizing()) {
|
| + __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| + } else {
|
| + // If there is no frame, the context must be in rsi.
|
| + ASSERT(result.is(rsi));
|
| + }
|
| }
|
|
|
|
|
| @@ -3199,6 +3232,7 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
|
|
|
|
|
| void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| __ push(rsi); // The context is the first argument.
|
| __ Push(instr->hydrogen()->pairs());
|
| __ Push(Smi::FromInt(instr->hydrogen()->flags()));
|
| @@ -3207,8 +3241,10 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
|
|
|
|
|
| void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
|
| + Register context = ToRegister(instr->context());
|
| Register result = ToRegister(instr->result());
|
| - __ movq(result, GlobalObjectOperand());
|
| + __ movq(result,
|
| + Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
|
| }
|
|
|
|
|
| @@ -3265,9 +3301,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
| __ InvokeFunction(
|
| function, expected, count, CALL_FUNCTION, generator, call_kind);
|
| }
|
| -
|
| - // Restore context.
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
|
|
| @@ -3308,7 +3341,8 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
|
|
|
| // Slow case: Call the runtime system to do the number allocation.
|
| __ bind(&slow);
|
| - CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
|
| + CallRuntimeFromDeferred(
|
| + Runtime::kAllocateHeapNumber, 0, instr, instr->context());
|
| // Set the pointer to the new heap number in tmp.
|
| if (!tmp.is(rax)) __ movq(tmp, rax);
|
| // Restore input_reg after call to runtime.
|
| @@ -3687,6 +3721,9 @@ void LCodeGen::DoMathLog(LMathLog* instr) {
|
|
|
| void LCodeGen::DoMathTan(LMathTan* instr) {
|
| ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
| + // Set the context register to a GC-safe fake value. Clobbering it is
|
| + // OK because this instruction is marked as a call.
|
| + __ Set(rsi, 0);
|
| TranscendentalCacheStub stub(TranscendentalCache::TAN,
|
| TranscendentalCacheStub::UNTAGGED);
|
| CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
| @@ -3695,6 +3732,9 @@ void LCodeGen::DoMathTan(LMathTan* instr) {
|
|
|
| void LCodeGen::DoMathCos(LMathCos* instr) {
|
| ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
| + // Set the context register to a GC-safe fake value. Clobbering it is
|
| + // OK because this instruction is marked as a call.
|
| + __ Set(rsi, 0);
|
| TranscendentalCacheStub stub(TranscendentalCache::COS,
|
| TranscendentalCacheStub::UNTAGGED);
|
| CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
| @@ -3703,6 +3743,9 @@ void LCodeGen::DoMathCos(LMathCos* instr) {
|
|
|
| void LCodeGen::DoMathSin(LMathSin* instr) {
|
| ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
| + // Set the context register to a GC-safe fake value. Clobbering it is
|
| + // OK because this instruction is marked as a call.
|
| + __ Set(rsi, 0);
|
| TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
| TranscendentalCacheStub::UNTAGGED);
|
| CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
| @@ -3710,6 +3753,7 @@ void LCodeGen::DoMathSin(LMathSin* instr) {
|
|
|
|
|
| void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->function()).is(rdi));
|
| ASSERT(instr->HasPointerMap());
|
|
|
| @@ -3719,7 +3763,6 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
|
| SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
|
| ParameterCount count(instr->arity());
|
| __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| } else {
|
| CallKnownFunction(known_function,
|
| instr->hydrogen()->formal_parameter_count(),
|
| @@ -3732,6 +3775,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
|
|
|
|
|
| void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->key()).is(rcx));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| @@ -3739,11 +3783,11 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
|
| Handle<Code> ic =
|
| isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
|
| CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
|
|
| void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| int arity = instr->arity();
|
| @@ -3752,22 +3796,22 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
| isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
|
| __ Move(rcx, instr->name());
|
| CallCode(ic, mode, instr);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
|
|
| void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->function()).is(rdi));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| int arity = instr->arity();
|
| CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
| CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
|
|
| void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
| int arity = instr->arity();
|
| RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
|
| @@ -3775,7 +3819,6 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
| isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
|
| __ Move(rcx, instr->name());
|
| CallCode(ic, mode, instr);
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| }
|
|
|
|
|
| @@ -3791,6 +3834,7 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
|
|
|
|
|
| void LCodeGen::DoCallNew(LCallNew* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->constructor()).is(rdi));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| @@ -3804,6 +3848,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
|
|
|
|
|
| void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->constructor()).is(rdi));
|
| ASSERT(ToRegister(instr->result()).is(rax));
|
|
|
| @@ -3848,6 +3893,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
|
|
|
|
|
| void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
|
| }
|
|
|
| @@ -3980,6 +4026,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
|
|
|
|
|
| void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->object()).is(rdx));
|
| ASSERT(ToRegister(instr->value()).is(rax));
|
|
|
| @@ -4217,6 +4264,7 @@ void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
|
|
|
|
|
| void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| ASSERT(ToRegister(instr->object()).is(rdx));
|
| ASSERT(ToRegister(instr->key()).is(rcx));
|
| ASSERT(ToRegister(instr->value()).is(rax));
|
| @@ -4252,6 +4300,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
|
| if (!object_reg.is(rax)) {
|
| __ movq(rax, object_reg);
|
| }
|
| + LoadContextFromDeferred(instr->context());
|
| __ Move(rbx, to_map);
|
| TransitionElementsKindStub stub(from_kind, to_kind);
|
| __ CallStub(&stub);
|
| @@ -4273,6 +4322,7 @@ void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
|
|
|
|
|
| void LCodeGen::DoStringAdd(LStringAdd* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| EmitPushTaggedOperand(instr->left());
|
| EmitPushTaggedOperand(instr->right());
|
| StringAddStub stub(instr->hydrogen()->flags());
|
| @@ -4327,7 +4377,8 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
|
| __ Integer32ToSmi(index, index);
|
| __ push(index);
|
| }
|
| - CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
|
| + CallRuntimeFromDeferred(
|
| + Runtime::kStringCharCodeAt, 2, instr, instr->context());
|
| __ AssertSmi(rax);
|
| __ SmiToInteger32(rax, rax);
|
| __ StoreToSafepointRegisterSlot(result, rax);
|
| @@ -4380,7 +4431,7 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
|
| PushSafepointRegistersScope scope(this);
|
| __ Integer32ToSmi(char_code, char_code);
|
| __ push(char_code);
|
| - CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
|
| + CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
|
| __ StoreToSafepointRegisterSlot(result, rax);
|
| }
|
|
|
| @@ -4484,7 +4535,16 @@ void LCodeGen::DoDeferredNumberTagU(LNumberTagU* instr) {
|
| // integer value.
|
| __ StoreToSafepointRegisterSlot(reg, Immediate(0));
|
|
|
| - CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
|
| + // NumberTagU uses the context from the frame, rather than
|
| + // the environment's HContext or HInlinedContext value.
|
| + // They only call Runtime::kAllocateHeapNumber.
|
| + // The corresponding HChange instructions are added in a phase that does
|
| + // not have easy access to the local context.
|
| + __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| + __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
|
| + RecordSafepointWithRegisters(
|
| + instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
|
| +
|
| if (!reg.is(rax)) __ movq(reg, rax);
|
|
|
| // Done. Put the value in temp_xmm into the value of the allocated heap
|
| @@ -4532,8 +4592,15 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
|
|
|
| {
|
| PushSafepointRegistersScope scope(this);
|
| - CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
|
| - // Ensure that value in rax survives popping registers.
|
| + // NumberTagD uses the context from the frame, rather than
|
| + // the environment's HContext or HInlinedContext value.
|
| + // They only call Runtime::kAllocateHeapNumber.
|
| + // The corresponding HChange instructions are added in a phase that does
|
| + // not have easy access to the local context.
|
| + __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| + __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
|
| + RecordSafepointWithRegisters(
|
| + instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
|
| __ movq(kScratchRegister, rax);
|
| }
|
| __ movq(reg, kScratchRegister);
|
| @@ -4839,7 +4906,11 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
|
| {
|
| PushSafepointRegistersScope scope(this);
|
| __ push(object);
|
| - CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
|
| + __ Set(rsi, 0);
|
| + __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
|
| + RecordSafepointWithRegisters(
|
| + instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
|
| +
|
| __ testq(rax, Immediate(kSmiTagMask));
|
| }
|
| DeoptimizeIf(zero, instr->environment());
|
| @@ -5031,12 +5102,15 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
| if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
| ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
| ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
| - CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
|
| + CallRuntimeFromDeferred(
|
| + Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context());
|
| } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
| ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
| - CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
|
| + CallRuntimeFromDeferred(
|
| + Runtime::kAllocateInOldDataSpace, 1, instr, instr->context());
|
| } else {
|
| - CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
|
| + CallRuntimeFromDeferred(
|
| + Runtime::kAllocateInNewSpace, 1, instr, instr->context());
|
| }
|
| __ StoreToSafepointRegisterSlot(result, rax);
|
| }
|
| @@ -5050,6 +5124,7 @@ void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
|
|
|
|
|
| void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| Label materialized;
|
| // Registers will be used as follows:
|
| // rcx = literals array.
|
| @@ -5100,6 +5175,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
|
|
|
|
|
| void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| // Use the fast case closure allocation code that allocates in new
|
| // space for nested functions that don't need literals cloning.
|
| bool pretenure = instr->hydrogen()->pretenure();
|
| @@ -5119,6 +5195,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
|
|
|
|
| void LCodeGen::DoTypeof(LTypeof* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| LOperand* input = instr->value();
|
| EmitPushTaggedOperand(input);
|
| CallRuntime(Runtime::kTypeof, 1, instr);
|
| @@ -5326,6 +5403,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
| Label done;
|
| __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
| __ j(above_equal, &done, Label::kNear);
|
| +
|
| + ASSERT(instr->context()->IsRegister());
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| CallCode(isolate()->builtins()->StackCheck(),
|
| RelocInfo::CODE_TARGET,
|
| instr);
|
| @@ -5369,6 +5449,7 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
|
|
|
|
|
| void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(rsi));
|
| __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
|
| DeoptimizeIf(equal, instr->environment());
|
|
|
|
|