| Index: src/x64/lithium-codegen-x64.cc
|
| diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
|
| index 3f01d3ba37021de6e89aabd7e6461d3b0151463c..9217a9407fc942e2ea077bec5cee802b8737299b 100644
|
| --- a/src/x64/lithium-codegen-x64.cc
|
| +++ b/src/x64/lithium-codegen-x64.cc
|
| @@ -119,45 +119,35 @@ void LCodeGen::Comment(const char* format, ...) {
|
| bool LCodeGen::GeneratePrologue() {
|
| ASSERT(is_generating());
|
|
|
| - if (info()->IsOptimizing()) {
|
| - ProfileEntryHookStub::MaybeCallEntryHook(masm_);
|
| + ProfileEntryHookStub::MaybeCallEntryHook(masm_);
|
|
|
| #ifdef DEBUG
|
| - if (strlen(FLAG_stop_at) > 0 &&
|
| - info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
|
| - __ int3();
|
| - }
|
| + if (strlen(FLAG_stop_at) > 0 &&
|
| + info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
|
| + __ int3();
|
| + }
|
| #endif
|
|
|
| - // Strict mode functions need to replace the receiver with undefined
|
| - // when called as functions (without an explicit receiver
|
| - // object). rcx is zero for method calls and non-zero for function
|
| - // calls.
|
| - if (!info_->is_classic_mode() || info_->is_native()) {
|
| - Label ok;
|
| - __ testq(rcx, rcx);
|
| - __ j(zero, &ok, Label::kNear);
|
| - // +1 for return address.
|
| - int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
| - __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
| - __ movq(Operand(rsp, receiver_offset), kScratchRegister);
|
| - __ bind(&ok);
|
| - }
|
| + // Strict mode functions need to replace the receiver with undefined
|
| + // when called as functions (without an explicit receiver
|
| + // object). rcx is zero for method calls and non-zero for function
|
| + // calls.
|
| + if (!info_->is_classic_mode() || info_->is_native()) {
|
| + Label ok;
|
| + __ testq(rcx, rcx);
|
| + __ j(zero, &ok, Label::kNear);
|
| + // +1 for return address.
|
| + int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
| + __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
| + __ movq(Operand(rsp, receiver_offset), kScratchRegister);
|
| + __ bind(&ok);
|
| }
|
|
|
| info()->set_prologue_offset(masm_->pc_offset());
|
| - if (NeedsEagerFrame()) {
|
| - ASSERT(!frame_is_built_);
|
| - frame_is_built_ = true;
|
| - __ push(rbp); // Caller's frame pointer.
|
| - __ movq(rbp, rsp);
|
| - __ push(rsi); // Callee's context.
|
| - if (info()->IsStub()) {
|
| - __ Push(Smi::FromInt(StackFrame::STUB));
|
| - } else {
|
| - __ push(rdi); // Callee's JS function.
|
| - }
|
| - }
|
| + __ push(rbp); // Caller's frame pointer.
|
| + __ movq(rbp, rsp);
|
| + __ push(rsi); // Callee's context.
|
| + __ push(rdi); // Callee's JS function.
|
|
|
| // Reserve space for the stack slots needed by the code.
|
| int slots = GetStackSlotCount();
|
| @@ -187,7 +177,7 @@ bool LCodeGen::GeneratePrologue() {
|
| }
|
|
|
| // Possibly allocate a local context.
|
| - int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
|
| + int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
|
| if (heap_slots > 0) {
|
| Comment(";;; Allocate local context");
|
| // Argument to NewContext is the function, which is still in rdi.
|
| @@ -223,7 +213,7 @@ bool LCodeGen::GeneratePrologue() {
|
| }
|
|
|
| // Trace the call.
|
| - if (FLAG_trace && info()->IsOptimizing()) {
|
| + if (FLAG_trace) {
|
| __ CallRuntime(Runtime::kTraceEnter, 0);
|
| }
|
| return !is_aborted();
|
| @@ -276,55 +266,9 @@ bool LCodeGen::GenerateBody() {
|
|
|
|
|
| bool LCodeGen::GenerateJumpTable() {
|
| - Label needs_frame_not_call;
|
| - Label needs_frame_is_call;
|
| for (int i = 0; i < jump_table_.length(); i++) {
|
| __ bind(&jump_table_[i].label);
|
| - Address entry = jump_table_[i].address;
|
| - if (jump_table_[i].needs_frame) {
|
| - __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
|
| - if (jump_table_[i].is_lazy_deopt) {
|
| - if (needs_frame_is_call.is_bound()) {
|
| - __ jmp(&needs_frame_is_call);
|
| - } else {
|
| - __ bind(&needs_frame_is_call);
|
| - __ push(rbp);
|
| - __ movq(rbp, rsp);
|
| - __ push(rsi);
|
| - // This variant of deopt can only be used with stubs. Since we don't
|
| - // have a function pointer to install in the stack frame that we're
|
| - // building, install a special marker there instead.
|
| - ASSERT(info()->IsStub());
|
| - __ Move(rsi, Smi::FromInt(StackFrame::STUB));
|
| - __ push(rsi);
|
| - __ movq(rsi, MemOperand(rsp, kPointerSize));
|
| - __ call(kScratchRegister);
|
| - }
|
| - } else {
|
| - if (needs_frame_not_call.is_bound()) {
|
| - __ jmp(&needs_frame_not_call);
|
| - } else {
|
| - __ bind(&needs_frame_not_call);
|
| - __ push(rbp);
|
| - __ movq(rbp, rsp);
|
| - __ push(r8);
|
| - // This variant of deopt can only be used with stubs. Since we don't
|
| - // have a function pointer to install in the stack frame that we're
|
| - // building, install a special marker there instead.
|
| - ASSERT(info()->IsStub());
|
| - __ Move(rsi, Smi::FromInt(StackFrame::STUB));
|
| - __ push(rsi);
|
| - __ movq(rsi, MemOperand(rsp, kPointerSize));
|
| - __ jmp(kScratchRegister);
|
| - }
|
| - }
|
| - } else {
|
| - if (jump_table_[i].is_lazy_deopt) {
|
| - __ Call(entry, RelocInfo::RUNTIME_ENTRY);
|
| - } else {
|
| - __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
|
| - }
|
| - }
|
| + __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
|
| }
|
| return !is_aborted();
|
| }
|
| @@ -336,32 +280,10 @@ bool LCodeGen::GenerateDeferredCode() {
|
| for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
|
| LDeferredCode* code = deferred_[i];
|
| __ bind(code->entry());
|
| - if (NeedsDeferredFrame()) {
|
| - Comment(";;; Deferred build frame",
|
| - code->instruction_index(),
|
| - code->instr()->Mnemonic());
|
| - ASSERT(!frame_is_built_);
|
| - ASSERT(info()->IsStub());
|
| - frame_is_built_ = true;
|
| - // Build the frame in such a way that esi isn't trashed.
|
| - __ push(rbp); // Caller's frame pointer.
|
| - __ push(Operand(rbp, StandardFrameConstants::kContextOffset));
|
| - __ Push(Smi::FromInt(StackFrame::STUB));
|
| - __ lea(rbp, Operand(rsp, 2 * kPointerSize));
|
| - }
|
| Comment(";;; Deferred code @%d: %s.",
|
| code->instruction_index(),
|
| code->instr()->Mnemonic());
|
| code->Generate();
|
| - if (NeedsDeferredFrame()) {
|
| - Comment(";;; Deferred destroy frame",
|
| - code->instruction_index(),
|
| - code->instr()->Mnemonic());
|
| - ASSERT(frame_is_built_);
|
| - frame_is_built_ = false;
|
| - __ movq(rsp, rbp);
|
| - __ pop(rbp);
|
| - }
|
| __ jmp(code->exit());
|
| }
|
| }
|
| @@ -474,9 +396,7 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
|
| translation,
|
| arguments_index,
|
| arguments_count);
|
| - bool has_closure_id = !info()->closure().is_null() &&
|
| - *info()->closure() != *environment->closure();
|
| - int closure_id = has_closure_id
|
| + int closure_id = *info()->closure() != *environment->closure()
|
| ? DefineDeoptimizationLiteral(environment->closure())
|
| : Translation::kSelfLiteralId;
|
|
|
| @@ -500,9 +420,6 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
|
| case ARGUMENTS_ADAPTOR:
|
| translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
|
| break;
|
| - case STUB:
|
| - translation->BeginCompiledStubFrame();
|
| - break;
|
| }
|
|
|
| // Inlined frames which push their arguments cause the index to be
|
| @@ -693,33 +610,20 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
| RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
|
| ASSERT(environment->HasBeenRegistered());
|
| int id = environment->deoptimization_index();
|
| - ASSERT(info()->IsOptimizing() || info()->IsStub());
|
| - Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
| - ? Deoptimizer::LAZY
|
| - : Deoptimizer::EAGER;
|
| - Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type);
|
| + Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
|
| if (entry == NULL) {
|
| Abort("bailout was not prepared");
|
| return;
|
| }
|
|
|
| - ASSERT(info()->IsStub() || frame_is_built_);
|
| - bool lazy_deopt = info()->IsStub();
|
| if (cc == no_condition) {
|
| - if (lazy_deopt) {
|
| - __ Call(entry, RelocInfo::RUNTIME_ENTRY);
|
| - } else {
|
| - __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
|
| - }
|
| + __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
|
| } else {
|
| // We often have several deopts to the same entry, reuse the last
|
| // jump entry if this is the case.
|
| if (jump_table_.is_empty() ||
|
| - jump_table_.last().address != entry ||
|
| - jump_table_.last().needs_frame != !frame_is_built_ ||
|
| - jump_table_.last().is_lazy_deopt != lazy_deopt) {
|
| - JumpTableEntry table_entry(entry, !frame_is_built_, lazy_deopt);
|
| - jump_table_.Add(table_entry, zone());
|
| + jump_table_.last().address != entry) {
|
| + jump_table_.Add(JumpTableEntry(entry), zone());
|
| }
|
| __ j(cc, &jump_table_.last().label);
|
| }
|
| @@ -2384,22 +2288,15 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
|
|
|
|
|
| void LCodeGen::DoReturn(LReturn* instr) {
|
| - if (FLAG_trace && info()->IsOptimizing()) {
|
| + if (FLAG_trace) {
|
| // Preserve the return value on the stack and rely on the runtime
|
| // call to return the value in the same register.
|
| __ push(rax);
|
| __ CallRuntime(Runtime::kTraceExit, 1);
|
| }
|
| - if (NeedsEagerFrame()) {
|
| - __ movq(rsp, rbp);
|
| - __ pop(rbp);
|
| - }
|
| - if (info()->IsStub()) {
|
| - __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
| - __ Ret(0, r10);
|
| - } else {
|
| - __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
|
| - }
|
| + __ movq(rsp, rbp);
|
| + __ pop(rbp);
|
| + __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
|
| }
|
|
|
|
|
| @@ -4630,10 +4527,10 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
|
| void LCodeGen::DoCheckMapCommon(Register reg,
|
| Handle<Map> map,
|
| CompareMapMode mode,
|
| - LInstruction* instr) {
|
| + LEnvironment* env) {
|
| Label success;
|
| __ CompareMap(reg, map, &success, mode);
|
| - DeoptimizeIf(not_equal, instr->environment());
|
| + DeoptimizeIf(not_equal, env);
|
| __ bind(&success);
|
| }
|
|
|
| @@ -4651,7 +4548,7 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
|
| __ j(equal, &success);
|
| }
|
| Handle<Map> map = map_set->last();
|
| - DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr);
|
| + DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
|
| __ bind(&success);
|
| }
|
|
|
| @@ -4718,7 +4615,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
|
| // Check prototype maps up to the holder.
|
| while (!current_prototype.is_identical_to(holder)) {
|
| DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
|
| - ALLOW_ELEMENT_TRANSITION_MAPS, instr);
|
| + ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
|
| current_prototype =
|
| Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
|
| // Load next prototype object.
|
| @@ -4727,7 +4624,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
|
|
|
| // Check the holder map.
|
| DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
|
| - ALLOW_ELEMENT_TRANSITION_MAPS, instr);
|
| + ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
|
| }
|
|
|
|
|
| @@ -5263,7 +5160,6 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
|
|
|
|
|
| void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
|
| - if (info()->IsStub()) return;
|
| // Ensure that we have enough space after the previous lazy-bailout
|
| // instruction for patching the code here.
|
| int current_pc = masm()->pc_offset();
|
|
|