| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
| 8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
| 9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
| 10 #include "src/compiler/osr.h" | 10 #include "src/compiler/osr.h" |
| (...skipping 24 matching lines...) Expand all Loading... |
| 35 } | 35 } |
| 36 | 36 |
| 37 Operand OutputOperand() { return ToOperand(instr_->Output()); } | 37 Operand OutputOperand() { return ToOperand(instr_->Output()); } |
| 38 | 38 |
| 39 Operand ToOperand(InstructionOperand* op, int extra = 0) { | 39 Operand ToOperand(InstructionOperand* op, int extra = 0) { |
| 40 if (op->IsRegister()) { | 40 if (op->IsRegister()) { |
| 41 DCHECK(extra == 0); | 41 DCHECK(extra == 0); |
| 42 return Operand(ToRegister(op)); | 42 return Operand(ToRegister(op)); |
| 43 } | 43 } |
| 44 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 44 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
| 45 FrameOffset offset = | 45 FrameOffset offset = frame_access_state()->GetFrameOffset( |
| 46 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 46 AllocatedOperand::cast(op)->index()); |
| 47 return Operand(offset.from_stack_pointer() ? esp : ebp, | 47 return Operand(offset.from_stack_pointer() ? esp : ebp, |
| 48 offset.offset() + extra); | 48 offset.offset() + extra); |
| 49 } | 49 } |
| 50 | 50 |
| 51 Operand ToMaterializableOperand(int materializable_offset) { |
| 52 FrameOffset offset = frame_access_state()->GetFrameOffset( |
| 53 Frame::FPOffsetToSlot(materializable_offset)); |
| 54 return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset()); |
| 55 } |
| 56 |
| 51 Operand HighOperand(InstructionOperand* op) { | 57 Operand HighOperand(InstructionOperand* op) { |
| 52 DCHECK(op->IsDoubleStackSlot()); | 58 DCHECK(op->IsDoubleStackSlot()); |
| 53 return ToOperand(op, kPointerSize); | 59 return ToOperand(op, kPointerSize); |
| 54 } | 60 } |
| 55 | 61 |
| 56 Immediate ToImmediate(InstructionOperand* operand) { | 62 Immediate ToImmediate(InstructionOperand* operand) { |
| 57 Constant constant = ToConstant(operand); | 63 Constant constant = ToConstant(operand); |
| 58 switch (constant.type()) { | 64 switch (constant.type()) { |
| 59 case Constant::kInt32: | 65 case Constant::kInt32: |
| 60 return Immediate(constant.ToInt32()); | 66 return Immediate(constant.ToInt32()); |
| (...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 328 } \ | 334 } \ |
| 329 __ bind(&done); \ | 335 __ bind(&done); \ |
| 330 } while (false) | 336 } while (false) |
| 331 | 337 |
| 332 | 338 |
| 333 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 339 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
| 334 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 340 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 335 if (sp_slot_delta > 0) { | 341 if (sp_slot_delta > 0) { |
| 336 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); | 342 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); |
| 337 } | 343 } |
| 338 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 344 if (frame()->needs_frame()) { |
| 339 int spill_slots = frame()->GetSpillSlotCount(); | |
| 340 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0; | |
| 341 if (has_frame) { | |
| 342 __ pop(ebp); | 345 __ pop(ebp); |
| 343 } | 346 } |
| 347 frame_access_state()->SetFrameAccessToDefault(); |
| 344 } | 348 } |
| 345 | 349 |
| 346 | 350 |
| 347 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 351 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
| 348 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 352 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 349 if (sp_slot_delta < 0) { | 353 if (sp_slot_delta < 0) { |
| 350 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); | 354 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); |
| 355 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
| 351 } | 356 } |
| 357 frame_access_state()->SetFrameAccessToSP(); |
| 352 } | 358 } |
| 353 | 359 |
| 354 | 360 |
| 355 // Assembles an instruction after register allocation, producing machine code. | 361 // Assembles an instruction after register allocation, producing machine code. |
| 356 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 362 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
| 357 X87OperandConverter i(this, instr); | 363 X87OperandConverter i(this, instr); |
| 358 | 364 |
| 359 switch (ArchOpcodeField::decode(instr->opcode())) { | 365 switch (ArchOpcodeField::decode(instr->opcode())) { |
| 360 case kArchCallCodeObject: { | 366 case kArchCallCodeObject: { |
| 361 EnsureSpaceForLazyDeopt(); | 367 EnsureSpaceForLazyDeopt(); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 374 __ lea(esp, Operand(esp, -kDoubleSize)); | 380 __ lea(esp, Operand(esp, -kDoubleSize)); |
| 375 __ fstp_d(Operand(esp, 0)); | 381 __ fstp_d(Operand(esp, 0)); |
| 376 } | 382 } |
| 377 __ fninit(); | 383 __ fninit(); |
| 378 if (double_result) { | 384 if (double_result) { |
| 379 __ fld_d(Operand(esp, 0)); | 385 __ fld_d(Operand(esp, 0)); |
| 380 __ lea(esp, Operand(esp, kDoubleSize)); | 386 __ lea(esp, Operand(esp, kDoubleSize)); |
| 381 } else { | 387 } else { |
| 382 __ fld1(); | 388 __ fld1(); |
| 383 } | 389 } |
| 390 frame_access_state()->ClearSPDelta(); |
| 384 break; | 391 break; |
| 385 } | 392 } |
| 386 case kArchTailCallCodeObject: { | 393 case kArchTailCallCodeObject: { |
| 387 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 394 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
| 388 AssembleDeconstructActivationRecord(stack_param_delta); | 395 AssembleDeconstructActivationRecord(stack_param_delta); |
| 389 if (HasImmediateInput(instr, 0)) { | 396 if (HasImmediateInput(instr, 0)) { |
| 390 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); | 397 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); |
| 391 __ jmp(code, RelocInfo::CODE_TARGET); | 398 __ jmp(code, RelocInfo::CODE_TARGET); |
| 392 } else { | 399 } else { |
| 393 Register reg = i.InputRegister(0); | 400 Register reg = i.InputRegister(0); |
| 394 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 401 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 395 __ jmp(reg); | 402 __ jmp(reg); |
| 396 } | 403 } |
| 404 frame_access_state()->ClearSPDelta(); |
| 397 break; | 405 break; |
| 398 } | 406 } |
| 399 case kArchCallJSFunction: { | 407 case kArchCallJSFunction: { |
| 400 EnsureSpaceForLazyDeopt(); | 408 EnsureSpaceForLazyDeopt(); |
| 401 Register func = i.InputRegister(0); | 409 Register func = i.InputRegister(0); |
| 402 if (FLAG_debug_code) { | 410 if (FLAG_debug_code) { |
| 403 // Check the function's context matches the context argument. | 411 // Check the function's context matches the context argument. |
| 404 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); | 412 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); |
| 405 __ Assert(equal, kWrongFunctionContext); | 413 __ Assert(equal, kWrongFunctionContext); |
| 406 } | 414 } |
| 407 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); | 415 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); |
| 408 RecordCallPosition(instr); | 416 RecordCallPosition(instr); |
| 409 bool double_result = | 417 bool double_result = |
| 410 instr->HasOutput() && instr->Output()->IsDoubleRegister(); | 418 instr->HasOutput() && instr->Output()->IsDoubleRegister(); |
| 411 if (double_result) { | 419 if (double_result) { |
| 412 __ lea(esp, Operand(esp, -kDoubleSize)); | 420 __ lea(esp, Operand(esp, -kDoubleSize)); |
| 413 __ fstp_d(Operand(esp, 0)); | 421 __ fstp_d(Operand(esp, 0)); |
| 414 } | 422 } |
| 415 __ fninit(); | 423 __ fninit(); |
| 416 if (double_result) { | 424 if (double_result) { |
| 417 __ fld_d(Operand(esp, 0)); | 425 __ fld_d(Operand(esp, 0)); |
| 418 __ lea(esp, Operand(esp, kDoubleSize)); | 426 __ lea(esp, Operand(esp, kDoubleSize)); |
| 419 } else { | 427 } else { |
| 420 __ fld1(); | 428 __ fld1(); |
| 421 } | 429 } |
| 430 frame_access_state()->ClearSPDelta(); |
| 422 break; | 431 break; |
| 423 } | 432 } |
| 424 case kArchTailCallJSFunction: { | 433 case kArchTailCallJSFunction: { |
| 425 Register func = i.InputRegister(0); | 434 Register func = i.InputRegister(0); |
| 426 if (FLAG_debug_code) { | 435 if (FLAG_debug_code) { |
| 427 // Check the function's context matches the context argument. | 436 // Check the function's context matches the context argument. |
| 428 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); | 437 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); |
| 429 __ Assert(equal, kWrongFunctionContext); | 438 __ Assert(equal, kWrongFunctionContext); |
| 430 } | 439 } |
| 431 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 440 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
| 432 AssembleDeconstructActivationRecord(stack_param_delta); | 441 AssembleDeconstructActivationRecord(stack_param_delta); |
| 433 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); | 442 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); |
| 443 frame_access_state()->ClearSPDelta(); |
| 434 break; | 444 break; |
| 435 } | 445 } |
| 436 case kArchLazyBailout: { | 446 case kArchLazyBailout: { |
| 437 EnsureSpaceForLazyDeopt(); | 447 EnsureSpaceForLazyDeopt(); |
| 438 RecordCallPosition(instr); | 448 RecordCallPosition(instr); |
| 439 break; | 449 break; |
| 440 } | 450 } |
| 441 case kArchPrepareCallCFunction: { | 451 case kArchPrepareCallCFunction: { |
| 452 // Frame alignment requires using FP-relative frame addressing. |
| 453 frame_access_state()->SetFrameAccessToFP(); |
| 442 int const num_parameters = MiscField::decode(instr->opcode()); | 454 int const num_parameters = MiscField::decode(instr->opcode()); |
| 443 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); | 455 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); |
| 444 break; | 456 break; |
| 445 } | 457 } |
| 446 case kArchPrepareTailCall: | 458 case kArchPrepareTailCall: |
| 447 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 459 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
| 448 break; | 460 break; |
| 449 case kArchCallCFunction: { | 461 case kArchCallCFunction: { |
| 450 int const num_parameters = MiscField::decode(instr->opcode()); | 462 int const num_parameters = MiscField::decode(instr->opcode()); |
| 451 if (HasImmediateInput(instr, 0)) { | 463 if (HasImmediateInput(instr, 0)) { |
| 452 ExternalReference ref = i.InputExternalReference(0); | 464 ExternalReference ref = i.InputExternalReference(0); |
| 453 __ CallCFunction(ref, num_parameters); | 465 __ CallCFunction(ref, num_parameters); |
| 454 } else { | 466 } else { |
| 455 Register func = i.InputRegister(0); | 467 Register func = i.InputRegister(0); |
| 456 __ CallCFunction(func, num_parameters); | 468 __ CallCFunction(func, num_parameters); |
| 457 } | 469 } |
| 470 frame_access_state()->SetFrameAccessToDefault(); |
| 471 frame_access_state()->ClearSPDelta(); |
| 458 break; | 472 break; |
| 459 } | 473 } |
| 460 case kArchJmp: | 474 case kArchJmp: |
| 461 AssembleArchJump(i.InputRpo(0)); | 475 AssembleArchJump(i.InputRpo(0)); |
| 462 break; | 476 break; |
| 463 case kArchLookupSwitch: | 477 case kArchLookupSwitch: |
| 464 AssembleArchLookupSwitch(instr); | 478 AssembleArchLookupSwitch(instr); |
| 465 break; | 479 break; |
| 466 case kArchTableSwitch: | 480 case kArchTableSwitch: |
| 467 AssembleArchTableSwitch(instr); | 481 AssembleArchTableSwitch(instr); |
| (...skipping 779 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1247 if (allocated.machine_type() == kRepFloat32) { | 1261 if (allocated.machine_type() == kRepFloat32) { |
| 1248 __ sub(esp, Immediate(kDoubleSize)); | 1262 __ sub(esp, Immediate(kDoubleSize)); |
| 1249 __ fld_s(i.InputOperand(0)); | 1263 __ fld_s(i.InputOperand(0)); |
| 1250 __ fstp_s(MemOperand(esp, 0)); | 1264 __ fstp_s(MemOperand(esp, 0)); |
| 1251 } else { | 1265 } else { |
| 1252 DCHECK(allocated.machine_type() == kRepFloat64); | 1266 DCHECK(allocated.machine_type() == kRepFloat64); |
| 1253 __ sub(esp, Immediate(kDoubleSize)); | 1267 __ sub(esp, Immediate(kDoubleSize)); |
| 1254 __ fld_d(i.InputOperand(0)); | 1268 __ fld_d(i.InputOperand(0)); |
| 1255 __ fstp_d(MemOperand(esp, 0)); | 1269 __ fstp_d(MemOperand(esp, 0)); |
| 1256 } | 1270 } |
| 1271 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
| 1257 } else if (HasImmediateInput(instr, 0)) { | 1272 } else if (HasImmediateInput(instr, 0)) { |
| 1258 __ push(i.InputImmediate(0)); | 1273 __ push(i.InputImmediate(0)); |
| 1274 frame_access_state()->IncreaseSPDelta(1); |
| 1259 } else { | 1275 } else { |
| 1260 __ push(i.InputOperand(0)); | 1276 __ push(i.InputOperand(0)); |
| 1277 frame_access_state()->IncreaseSPDelta(1); |
| 1261 } | 1278 } |
| 1262 break; | 1279 break; |
| 1263 case kX87Poke: { | 1280 case kX87Poke: { |
| 1264 int const slot = MiscField::decode(instr->opcode()); | 1281 int const slot = MiscField::decode(instr->opcode()); |
| 1265 if (HasImmediateInput(instr, 0)) { | 1282 if (HasImmediateInput(instr, 0)) { |
| 1266 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); | 1283 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); |
| 1267 } else { | 1284 } else { |
| 1268 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); | 1285 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); |
| 1269 } | 1286 } |
| 1270 break; | 1287 break; |
| (...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1643 // | FP | RET | args | caller frame | | 1660 // | FP | RET | args | caller frame | |
| 1644 // ^ esp,ebp | 1661 // ^ esp,ebp |
| 1645 | 1662 |
| 1646 // --{ pop ebp }---------------------------------------------------------------- | 1663 // --{ pop ebp }---------------------------------------------------------------- |
| 1647 // | RET | args | caller frame | | 1664 // | RET | args | caller frame | |
| 1648 // ^ esp ^ ebp | 1665 // ^ esp ^ ebp |
| 1649 | 1666 |
| 1650 | 1667 |
| 1651 void CodeGenerator::AssemblePrologue() { | 1668 void CodeGenerator::AssemblePrologue() { |
| 1652 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1669 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1653 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1670 if (descriptor->IsCFunctionCall()) { |
| 1654 // Assemble a prologue similar the to cdecl calling convention. | 1671 // Assemble a prologue similar the to cdecl calling convention. |
| 1655 __ push(ebp); | 1672 __ push(ebp); |
| 1656 __ mov(ebp, esp); | 1673 __ mov(ebp, esp); |
| 1657 } else if (descriptor->IsJSFunctionCall()) { | 1674 } else if (descriptor->IsJSFunctionCall()) { |
| 1658 // TODO(turbofan): this prologue is redundant with OSR, but needed for | 1675 // TODO(turbofan): this prologue is redundant with OSR, but needed for |
| 1659 // code aging. | 1676 // code aging. |
| 1660 CompilationInfo* info = this->info(); | 1677 CompilationInfo* info = this->info(); |
| 1661 __ Prologue(info->IsCodePreAgingActive()); | 1678 __ Prologue(info->IsCodePreAgingActive()); |
| 1662 } else if (needs_frame_) { | 1679 } else if (frame()->needs_frame()) { |
| 1663 __ StubPrologue(); | 1680 __ StubPrologue(); |
| 1664 } else { | 1681 } else { |
| 1665 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); | 1682 frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize); |
| 1666 } | 1683 } |
| 1684 frame_access_state()->SetFrameAccessToDefault(); |
| 1667 | 1685 |
| 1668 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1686 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
| 1669 if (info()->is_osr()) { | 1687 if (info()->is_osr()) { |
| 1670 // TurboFan OSR-compiled functions cannot be entered directly. | 1688 // TurboFan OSR-compiled functions cannot be entered directly. |
| 1671 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1689 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
| 1672 | 1690 |
| 1673 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1691 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
| 1674 // frame is still on the stack. Optimized code uses OSR values directly from | 1692 // frame is still on the stack. Optimized code uses OSR values directly from |
| 1675 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1693 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
| 1676 // remaining stack slots. | 1694 // remaining stack slots. |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1709 int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 1727 int pop_count = static_cast<int>(descriptor->StackParameterCount()); |
| 1710 const RegList saves = descriptor->CalleeSavedRegisters(); | 1728 const RegList saves = descriptor->CalleeSavedRegisters(); |
| 1711 // Restore registers. | 1729 // Restore registers. |
| 1712 if (saves != 0) { | 1730 if (saves != 0) { |
| 1713 for (int i = 0; i < Register::kNumRegisters; i++) { | 1731 for (int i = 0; i < Register::kNumRegisters; i++) { |
| 1714 if (!((1 << i) & saves)) continue; | 1732 if (!((1 << i) & saves)) continue; |
| 1715 __ pop(Register::from_code(i)); | 1733 __ pop(Register::from_code(i)); |
| 1716 } | 1734 } |
| 1717 } | 1735 } |
| 1718 | 1736 |
| 1719 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1737 if (descriptor->IsCFunctionCall()) { |
| 1720 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1738 __ mov(esp, ebp); // Move stack pointer back to frame pointer. |
| 1721 __ pop(ebp); // Pop caller's frame pointer. | 1739 __ pop(ebp); // Pop caller's frame pointer. |
| 1722 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1740 } else if (frame()->needs_frame()) { |
| 1723 // Canonicalize JSFunction return sites for now. | 1741 // Canonicalize JSFunction return sites for now. |
| 1724 if (return_label_.is_bound()) { | 1742 if (return_label_.is_bound()) { |
| 1725 __ jmp(&return_label_); | 1743 __ jmp(&return_label_); |
| 1726 return; | 1744 return; |
| 1727 } else { | 1745 } else { |
| 1728 __ bind(&return_label_); | 1746 __ bind(&return_label_); |
| 1729 __ mov(esp, ebp); // Move stack pointer back to frame pointer. | 1747 __ mov(esp, ebp); // Move stack pointer back to frame pointer. |
| 1730 __ pop(ebp); // Pop caller's frame pointer. | 1748 __ pop(ebp); // Pop caller's frame pointer. |
| 1731 } | 1749 } |
| 1732 } | 1750 } |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1760 __ pop(dst); | 1778 __ pop(dst); |
| 1761 } | 1779 } |
| 1762 } else if (source->IsConstant()) { | 1780 } else if (source->IsConstant()) { |
| 1763 Constant src_constant = g.ToConstant(source); | 1781 Constant src_constant = g.ToConstant(source); |
| 1764 if (src_constant.type() == Constant::kHeapObject) { | 1782 if (src_constant.type() == Constant::kHeapObject) { |
| 1765 Handle<HeapObject> src = src_constant.ToHeapObject(); | 1783 Handle<HeapObject> src = src_constant.ToHeapObject(); |
| 1766 int offset; | 1784 int offset; |
| 1767 if (IsMaterializableFromFrame(src, &offset)) { | 1785 if (IsMaterializableFromFrame(src, &offset)) { |
| 1768 if (destination->IsRegister()) { | 1786 if (destination->IsRegister()) { |
| 1769 Register dst = g.ToRegister(destination); | 1787 Register dst = g.ToRegister(destination); |
| 1770 __ mov(dst, Operand(ebp, offset)); | 1788 __ mov(dst, g.ToMaterializableOperand(offset)); |
| 1771 } else { | 1789 } else { |
| 1772 DCHECK(destination->IsStackSlot()); | 1790 DCHECK(destination->IsStackSlot()); |
| 1773 Operand dst = g.ToOperand(destination); | 1791 Operand dst = g.ToOperand(destination); |
| 1774 __ push(Operand(ebp, offset)); | 1792 __ push(g.ToMaterializableOperand(offset)); |
| 1775 __ pop(dst); | 1793 __ pop(dst); |
| 1776 } | 1794 } |
| 1777 } else if (destination->IsRegister()) { | 1795 } else if (destination->IsRegister()) { |
| 1778 Register dst = g.ToRegister(destination); | 1796 Register dst = g.ToRegister(destination); |
| 1779 __ LoadHeapObject(dst, src); | 1797 __ LoadHeapObject(dst, src); |
| 1780 } else { | 1798 } else { |
| 1781 DCHECK(destination->IsStackSlot()); | 1799 DCHECK(destination->IsStackSlot()); |
| 1782 Operand dst = g.ToOperand(destination); | 1800 Operand dst = g.ToOperand(destination); |
| 1783 AllowDeferredHandleDereference embedding_raw_address; | 1801 AllowDeferredHandleDereference embedding_raw_address; |
| 1784 if (isolate()->heap()->InNewSpace(*src)) { | 1802 if (isolate()->heap()->InNewSpace(*src)) { |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1890 if (source->IsRegister() && destination->IsRegister()) { | 1908 if (source->IsRegister() && destination->IsRegister()) { |
| 1891 // Register-register. | 1909 // Register-register. |
| 1892 Register src = g.ToRegister(source); | 1910 Register src = g.ToRegister(source); |
| 1893 Register dst = g.ToRegister(destination); | 1911 Register dst = g.ToRegister(destination); |
| 1894 __ xchg(dst, src); | 1912 __ xchg(dst, src); |
| 1895 } else if (source->IsRegister() && destination->IsStackSlot()) { | 1913 } else if (source->IsRegister() && destination->IsStackSlot()) { |
| 1896 // Register-memory. | 1914 // Register-memory. |
| 1897 __ xchg(g.ToRegister(source), g.ToOperand(destination)); | 1915 __ xchg(g.ToRegister(source), g.ToOperand(destination)); |
| 1898 } else if (source->IsStackSlot() && destination->IsStackSlot()) { | 1916 } else if (source->IsStackSlot() && destination->IsStackSlot()) { |
| 1899 // Memory-memory. | 1917 // Memory-memory. |
| 1900 Operand src = g.ToOperand(source); | 1918 Operand dst1 = g.ToOperand(destination); |
| 1901 Operand dst = g.ToOperand(destination); | 1919 __ push(dst1); |
| 1902 __ push(dst); | 1920 frame_access_state()->IncreaseSPDelta(1); |
| 1903 __ push(src); | 1921 Operand src1 = g.ToOperand(source); |
| 1904 __ pop(dst); | 1922 __ push(src1); |
| 1905 __ pop(src); | 1923 Operand dst2 = g.ToOperand(destination); |
| 1924 __ pop(dst2); |
| 1925 frame_access_state()->IncreaseSPDelta(-1); |
| 1926 Operand src2 = g.ToOperand(source); |
| 1927 __ pop(src2); |
| 1906 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { | 1928 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { |
| 1907 UNREACHABLE(); | 1929 UNREACHABLE(); |
| 1908 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) { | 1930 } else if (source->IsDoubleRegister() && destination->IsDoubleStackSlot()) { |
| 1909 auto allocated = AllocatedOperand::cast(*source); | 1931 auto allocated = AllocatedOperand::cast(*source); |
| 1910 switch (allocated.machine_type()) { | 1932 switch (allocated.machine_type()) { |
| 1911 case kRepFloat32: | 1933 case kRepFloat32: |
| 1912 __ fld_s(g.ToOperand(destination)); | 1934 __ fld_s(g.ToOperand(destination)); |
| 1913 __ fxch(); | 1935 __ fxch(); |
| 1914 __ fstp_s(g.ToOperand(destination)); | 1936 __ fstp_s(g.ToOperand(destination)); |
| 1915 break; | 1937 break; |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1969 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1991 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 1970 __ Nop(padding_size); | 1992 __ Nop(padding_size); |
| 1971 } | 1993 } |
| 1972 } | 1994 } |
| 1973 | 1995 |
| 1974 #undef __ | 1996 #undef __ |
| 1975 | 1997 |
| 1976 } // namespace compiler | 1998 } // namespace compiler |
| 1977 } // namespace internal | 1999 } // namespace internal |
| 1978 } // namespace v8 | 2000 } // namespace v8 |
| OLD | NEW |