| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
| 8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
| 9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
| 10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
| (...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 Label done; \ | 323 Label done; \ |
| 324 __ j(above_equal, &done, Label::kNear); \ | 324 __ j(above_equal, &done, Label::kNear); \ |
| 325 if (instr->InputAt(2)->IsRegister()) { \ | 325 if (instr->InputAt(2)->IsRegister()) { \ |
| 326 __ asm_instr(i.MemoryOperand(3), i.InputRegister(2)); \ | 326 __ asm_instr(i.MemoryOperand(3), i.InputRegister(2)); \ |
| 327 } else { \ | 327 } else { \ |
| 328 __ asm_instr(i.MemoryOperand(3), i.InputImmediate(2)); \ | 328 __ asm_instr(i.MemoryOperand(3), i.InputImmediate(2)); \ |
| 329 } \ | 329 } \ |
| 330 __ bind(&done); \ | 330 __ bind(&done); \ |
| 331 } while (false) | 331 } while (false) |
| 332 | 332 |
| 333 void CodeGenerator::AssembleDeconstructFrame() { |
| 334 __ mov(esp, ebp); |
| 335 __ pop(ebp); |
| 336 } |
| 333 | 337 |
| 334 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 338 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
| 335 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 339 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 336 if (sp_slot_delta > 0) { | 340 if (sp_slot_delta > 0) { |
| 337 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); | 341 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); |
| 338 } | 342 } |
| 339 frame_access_state()->SetFrameAccessToDefault(); | 343 frame_access_state()->SetFrameAccessToDefault(); |
| 340 } | 344 } |
| 341 | 345 |
| 342 | 346 |
| 343 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 347 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
| 344 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 348 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 345 if (sp_slot_delta < 0) { | 349 if (sp_slot_delta < 0) { |
| 346 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); | 350 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); |
| 347 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 351 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
| 348 } | 352 } |
| 349 if (frame()->needs_frame()) { | 353 if (frame_access_state()->access_frame_with_fp()) { |
| 350 __ mov(ebp, MemOperand(ebp, 0)); | 354 __ mov(ebp, MemOperand(ebp, 0)); |
| 351 } | 355 } |
| 352 frame_access_state()->SetFrameAccessToSP(); | 356 frame_access_state()->SetFrameAccessToSP(); |
| 353 } | 357 } |
| 354 | 358 |
| 355 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 359 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
| 356 Register, Register, | 360 Register, Register, |
| 357 Register) { | 361 Register) { |
| 358 // There are not enough temp registers left on ia32 for a call instruction | 362 // There are not enough temp registers left on ia32 for a call instruction |
| 359 // so we pick some scratch registers and save/restore them manually here. | 363 // so we pick some scratch registers and save/restore them manually here. |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 507 case kArchRet: | 511 case kArchRet: |
| 508 AssembleReturn(); | 512 AssembleReturn(); |
| 509 break; | 513 break; |
| 510 case kArchStackPointer: | 514 case kArchStackPointer: |
| 511 __ mov(i.OutputRegister(), esp); | 515 __ mov(i.OutputRegister(), esp); |
| 512 break; | 516 break; |
| 513 case kArchFramePointer: | 517 case kArchFramePointer: |
| 514 __ mov(i.OutputRegister(), ebp); | 518 __ mov(i.OutputRegister(), ebp); |
| 515 break; | 519 break; |
| 516 case kArchParentFramePointer: | 520 case kArchParentFramePointer: |
| 517 if (frame_access_state()->frame()->needs_frame()) { | 521 if (frame_access_state()->access_frame_with_fp()) { |
| 518 __ mov(i.OutputRegister(), Operand(ebp, 0)); | 522 __ mov(i.OutputRegister(), Operand(ebp, 0)); |
| 519 } else { | 523 } else { |
| 520 __ mov(i.OutputRegister(), ebp); | 524 __ mov(i.OutputRegister(), ebp); |
| 521 } | 525 } |
| 522 break; | 526 break; |
| 523 case kArchTruncateDoubleToI: { | 527 case kArchTruncateDoubleToI: { |
| 524 auto result = i.OutputRegister(); | 528 auto result = i.OutputRegister(); |
| 525 auto input = i.InputDoubleRegister(0); | 529 auto input = i.InputDoubleRegister(0); |
| 526 auto ool = new (zone()) OutOfLineTruncateDoubleToI(this, result, input); | 530 auto ool = new (zone()) OutOfLineTruncateDoubleToI(this, result, input); |
| 527 __ cvttsd2si(result, Operand(input)); | 531 __ cvttsd2si(result, Operand(input)); |
| (...skipping 811 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1339 default: | 1343 default: |
| 1340 UNREACHABLE(); | 1344 UNREACHABLE(); |
| 1341 break; | 1345 break; |
| 1342 } | 1346 } |
| 1343 // Add a jump if not falling through to the next block. | 1347 // Add a jump if not falling through to the next block. |
| 1344 if (!branch->fallthru) __ jmp(flabel); | 1348 if (!branch->fallthru) __ jmp(flabel); |
| 1345 } | 1349 } |
| 1346 | 1350 |
| 1347 | 1351 |
| 1348 void CodeGenerator::AssembleArchJump(RpoNumber target) { | 1352 void CodeGenerator::AssembleArchJump(RpoNumber target) { |
| 1353 AssembleDeconstructFrameBetweenBlocks(); |
| 1349 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); | 1354 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); |
| 1350 } | 1355 } |
| 1351 | 1356 |
| 1352 | 1357 |
| 1353 // Assembles boolean materializations after an instruction. | 1358 // Assembles boolean materializations after an instruction. |
| 1354 void CodeGenerator::AssembleArchBoolean(Instruction* instr, | 1359 void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
| 1355 FlagsCondition condition) { | 1360 FlagsCondition condition) { |
| 1356 IA32OperandConverter i(this, instr); | 1361 IA32OperandConverter i(this, instr); |
| 1357 Label done; | 1362 Label done; |
| 1358 | 1363 |
| (...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1588 // | FP | RET | args | caller frame | | 1593 // | FP | RET | args | caller frame | |
| 1589 // ^ esp,ebp | 1594 // ^ esp,ebp |
| 1590 | 1595 |
| 1591 // --{ pop ebp }---------------------------------------------------------------- | 1596 // --{ pop ebp }---------------------------------------------------------------- |
| 1592 // | RET | args | caller frame | | 1597 // | RET | args | caller frame | |
| 1593 // ^ esp ^ ebp | 1598 // ^ esp ^ ebp |
| 1594 | 1599 |
| 1595 | 1600 |
| 1596 void CodeGenerator::AssemblePrologue() { | 1601 void CodeGenerator::AssemblePrologue() { |
| 1597 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1602 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1598 if (frame()->needs_frame()) { | 1603 if (frame_access_state()->access_frame_with_fp()) { |
| 1599 if (descriptor->IsCFunctionCall()) { | 1604 if (descriptor->IsCFunctionCall()) { |
| 1600 __ push(ebp); | 1605 __ push(ebp); |
| 1601 __ mov(ebp, esp); | 1606 __ mov(ebp, esp); |
| 1602 } else if (descriptor->IsJSFunctionCall()) { | 1607 } else if (descriptor->IsJSFunctionCall()) { |
| 1603 __ Prologue(this->info()->GeneratePreagedPrologue()); | 1608 __ Prologue(this->info()->GeneratePreagedPrologue()); |
| 1604 } else { | 1609 } else { |
| 1605 __ StubPrologue(info()->GetOutputStackFrameType()); | 1610 __ StubPrologue(info()->GetOutputStackFrameType()); |
| 1606 } | 1611 } |
| 1607 } else { | |
| 1608 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); | |
| 1609 } | 1612 } |
| 1610 frame_access_state()->SetFrameAccessToDefault(); | |
| 1611 | |
| 1612 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1613 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
| 1613 if (info()->is_osr()) { | 1614 if (info()->is_osr()) { |
| 1614 // TurboFan OSR-compiled functions cannot be entered directly. | 1615 // TurboFan OSR-compiled functions cannot be entered directly. |
| 1615 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1616 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
| 1616 | 1617 |
| 1617 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1618 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
| 1618 // frame is still on the stack. Optimized code uses OSR values directly from | 1619 // frame is still on the stack. Optimized code uses OSR values directly from |
| 1619 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1620 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
| 1620 // remaining stack slots. | 1621 // remaining stack slots. |
| 1621 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); | 1622 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1647 const RegList saves = descriptor->CalleeSavedRegisters(); | 1648 const RegList saves = descriptor->CalleeSavedRegisters(); |
| 1648 // Restore registers. | 1649 // Restore registers. |
| 1649 if (saves != 0) { | 1650 if (saves != 0) { |
| 1650 for (int i = 0; i < Register::kNumRegisters; i++) { | 1651 for (int i = 0; i < Register::kNumRegisters; i++) { |
| 1651 if (!((1 << i) & saves)) continue; | 1652 if (!((1 << i) & saves)) continue; |
| 1652 __ pop(Register::from_code(i)); | 1653 __ pop(Register::from_code(i)); |
| 1653 } | 1654 } |
| 1654 } | 1655 } |
| 1655 | 1656 |
| 1656 if (descriptor->IsCFunctionCall()) { | 1657 if (descriptor->IsCFunctionCall()) { |
| 1657 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1658 AssembleDeconstructFrameWhenLeaving(); |
| 1658 __ pop(ebp); // Pop caller's frame pointer. | 1659 } else if (frame_access_state()->access_frame_with_fp()) { |
| 1659 } else if (frame()->needs_frame()) { | |
| 1660 // Canonicalize JSFunction return sites for now. | 1660 // Canonicalize JSFunction return sites for now. |
| 1661 if (return_label_.is_bound()) { | 1661 if (return_label_.is_bound()) { |
| 1662 AssembleDeconstructFrameBetweenBlocks(); |
| 1662 __ jmp(&return_label_); | 1663 __ jmp(&return_label_); |
| 1663 return; | 1664 return; |
| 1664 } else { | 1665 } else { |
| 1665 __ bind(&return_label_); | 1666 __ bind(&return_label_); |
| 1666 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1667 AssembleDeconstructFrameWhenLeaving(); |
| 1667 __ pop(ebp); // Pop caller's frame pointer. | |
| 1668 } | 1668 } |
| 1669 } | 1669 } |
| 1670 size_t pop_size = descriptor->StackParameterCount() * kPointerSize; | 1670 size_t pop_size = descriptor->StackParameterCount() * kPointerSize; |
| 1671 // Might need ecx for scratch if pop_size is too big. | 1671 // Might need ecx for scratch if pop_size is too big. |
| 1672 DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit()); | 1672 DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit()); |
| 1673 __ Ret(static_cast<int>(pop_size), ecx); | 1673 __ Ret(static_cast<int>(pop_size), ecx); |
| 1674 } | 1674 } |
| 1675 | 1675 |
| 1676 | 1676 |
| 1677 void CodeGenerator::AssembleMove(InstructionOperand* source, | 1677 void CodeGenerator::AssembleMove(InstructionOperand* source, |
| (...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1874 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1874 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 1875 __ Nop(padding_size); | 1875 __ Nop(padding_size); |
| 1876 } | 1876 } |
| 1877 } | 1877 } |
| 1878 | 1878 |
| 1879 #undef __ | 1879 #undef __ |
| 1880 | 1880 |
| 1881 } // namespace compiler | 1881 } // namespace compiler |
| 1882 } // namespace internal | 1882 } // namespace internal |
| 1883 } // namespace v8 | 1883 } // namespace v8 |
| OLD | NEW |