OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 #include "src/compiler/code-generator-impl.h" | 6 #include "src/compiler/code-generator-impl.h" |
7 #include "src/compiler/gap-resolver.h" | 7 #include "src/compiler/gap-resolver.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/osr.h" | 9 #include "src/compiler/osr.h" |
10 #include "src/mips/macro-assembler-mips.h" | 10 #include "src/mips/macro-assembler-mips.h" |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
113 } | 113 } |
114 UNREACHABLE(); | 114 UNREACHABLE(); |
115 return MemOperand(no_reg); | 115 return MemOperand(no_reg); |
116 } | 116 } |
117 | 117 |
118 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); } | 118 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); } |
119 | 119 |
120 MemOperand ToMemOperand(InstructionOperand* op) const { | 120 MemOperand ToMemOperand(InstructionOperand* op) const { |
121 DCHECK(op != NULL); | 121 DCHECK(op != NULL); |
122 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 122 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
123 FrameOffset offset = | 123 FrameOffset offset = frame_access_state()->GetFrameOffset( |
124 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 124 AllocatedOperand::cast(op)->index()); |
125 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); | 125 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); |
126 } | 126 } |
127 }; | 127 }; |
128 | 128 |
129 | 129 |
130 static inline bool HasRegisterInput(Instruction* instr, size_t index) { | 130 static inline bool HasRegisterInput(Instruction* instr, size_t index) { |
131 return instr->InputAt(index)->IsRegister(); | 131 return instr->InputAt(index)->IsRegister(); |
132 } | 132 } |
133 | 133 |
134 | 134 |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
451 __ bind(ool->exit()); \ | 451 __ bind(ool->exit()); \ |
452 __ bind(&done); \ | 452 __ bind(&done); \ |
453 } while (0) | 453 } while (0) |
454 | 454 |
455 | 455 |
456 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 456 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
457 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 457 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
458 if (sp_slot_delta > 0) { | 458 if (sp_slot_delta > 0) { |
459 __ daddiu(sp, sp, sp_slot_delta * kPointerSize); | 459 __ daddiu(sp, sp, sp_slot_delta * kPointerSize); |
460 } | 460 } |
461 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 461 if (frame()->needs_frame()) { |
462 int spill_slots = frame()->GetSpillSlotCount(); | |
463 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0; | |
464 if (has_frame) { | |
465 __ Pop(ra, fp); | 462 __ Pop(ra, fp); |
466 } | 463 } |
| 464 frame_access_state()->SetFrameAccessToDefault(); |
467 } | 465 } |
468 | 466 |
469 | 467 |
470 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 468 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
471 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 469 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
472 if (sp_slot_delta < 0) { | 470 if (sp_slot_delta < 0) { |
473 __ Dsubu(sp, sp, Operand(-sp_slot_delta * kPointerSize)); | 471 __ Dsubu(sp, sp, Operand(-sp_slot_delta * kPointerSize)); |
| 472 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
474 } | 473 } |
| 474 frame_access_state()->SetFrameAccessToSP(); |
475 } | 475 } |
476 | 476 |
477 | 477 |
478 // Assembles an instruction after register allocation, producing machine code. | 478 // Assembles an instruction after register allocation, producing machine code. |
479 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 479 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
480 MipsOperandConverter i(this, instr); | 480 MipsOperandConverter i(this, instr); |
481 InstructionCode opcode = instr->opcode(); | 481 InstructionCode opcode = instr->opcode(); |
482 | 482 |
483 switch (ArchOpcodeField::decode(opcode)) { | 483 switch (ArchOpcodeField::decode(opcode)) { |
484 case kArchCallCodeObject: { | 484 case kArchCallCodeObject: { |
485 EnsureSpaceForLazyDeopt(); | 485 EnsureSpaceForLazyDeopt(); |
486 if (instr->InputAt(0)->IsImmediate()) { | 486 if (instr->InputAt(0)->IsImmediate()) { |
487 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 487 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
488 RelocInfo::CODE_TARGET); | 488 RelocInfo::CODE_TARGET); |
489 } else { | 489 } else { |
490 __ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); | 490 __ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); |
491 __ Call(at); | 491 __ Call(at); |
492 } | 492 } |
493 RecordCallPosition(instr); | 493 RecordCallPosition(instr); |
| 494 frame_access_state()->ClearSPDelta(); |
494 break; | 495 break; |
495 } | 496 } |
496 case kArchTailCallCodeObject: { | 497 case kArchTailCallCodeObject: { |
497 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 498 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
498 AssembleDeconstructActivationRecord(stack_param_delta); | 499 AssembleDeconstructActivationRecord(stack_param_delta); |
499 if (instr->InputAt(0)->IsImmediate()) { | 500 if (instr->InputAt(0)->IsImmediate()) { |
500 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 501 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
501 RelocInfo::CODE_TARGET); | 502 RelocInfo::CODE_TARGET); |
502 } else { | 503 } else { |
503 __ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); | 504 __ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); |
504 __ Jump(at); | 505 __ Jump(at); |
505 } | 506 } |
| 507 frame_access_state()->ClearSPDelta(); |
506 break; | 508 break; |
507 } | 509 } |
508 case kArchCallJSFunction: { | 510 case kArchCallJSFunction: { |
509 EnsureSpaceForLazyDeopt(); | 511 EnsureSpaceForLazyDeopt(); |
510 Register func = i.InputRegister(0); | 512 Register func = i.InputRegister(0); |
511 if (FLAG_debug_code) { | 513 if (FLAG_debug_code) { |
512 // Check the function's context matches the context argument. | 514 // Check the function's context matches the context argument. |
513 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 515 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
514 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 516 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
515 } | 517 } |
516 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 518 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
517 __ Call(at); | 519 __ Call(at); |
518 RecordCallPosition(instr); | 520 RecordCallPosition(instr); |
| 521 frame_access_state()->ClearSPDelta(); |
519 break; | 522 break; |
520 } | 523 } |
521 case kArchTailCallJSFunction: { | 524 case kArchTailCallJSFunction: { |
522 Register func = i.InputRegister(0); | 525 Register func = i.InputRegister(0); |
523 if (FLAG_debug_code) { | 526 if (FLAG_debug_code) { |
524 // Check the function's context matches the context argument. | 527 // Check the function's context matches the context argument. |
525 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 528 __ ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
526 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 529 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
527 } | 530 } |
528 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 531 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
529 AssembleDeconstructActivationRecord(stack_param_delta); | 532 AssembleDeconstructActivationRecord(stack_param_delta); |
530 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 533 __ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
531 __ Jump(at); | 534 __ Jump(at); |
| 535 frame_access_state()->ClearSPDelta(); |
532 break; | 536 break; |
533 } | 537 } |
534 case kArchLazyBailout: { | 538 case kArchLazyBailout: { |
535 EnsureSpaceForLazyDeopt(); | 539 EnsureSpaceForLazyDeopt(); |
536 RecordCallPosition(instr); | 540 RecordCallPosition(instr); |
537 break; | 541 break; |
538 } | 542 } |
539 case kArchPrepareCallCFunction: { | 543 case kArchPrepareCallCFunction: { |
540 int const num_parameters = MiscField::decode(instr->opcode()); | 544 int const num_parameters = MiscField::decode(instr->opcode()); |
541 __ PrepareCallCFunction(num_parameters, kScratchReg); | 545 __ PrepareCallCFunction(num_parameters, kScratchReg); |
| 546 // Frame alignment requires using FP-relative frame addressing. |
| 547 frame_access_state()->SetFrameAccessToFP(); |
542 break; | 548 break; |
543 } | 549 } |
544 case kArchPrepareTailCall: | 550 case kArchPrepareTailCall: |
545 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 551 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
546 break; | 552 break; |
547 case kArchCallCFunction: { | 553 case kArchCallCFunction: { |
548 int const num_parameters = MiscField::decode(instr->opcode()); | 554 int const num_parameters = MiscField::decode(instr->opcode()); |
549 if (instr->InputAt(0)->IsImmediate()) { | 555 if (instr->InputAt(0)->IsImmediate()) { |
550 ExternalReference ref = i.InputExternalReference(0); | 556 ExternalReference ref = i.InputExternalReference(0); |
551 __ CallCFunction(ref, num_parameters); | 557 __ CallCFunction(ref, num_parameters); |
552 } else { | 558 } else { |
553 Register func = i.InputRegister(0); | 559 Register func = i.InputRegister(0); |
554 __ CallCFunction(func, num_parameters); | 560 __ CallCFunction(func, num_parameters); |
555 } | 561 } |
| 562 frame_access_state()->SetFrameAccessToDefault(); |
| 563 frame_access_state()->ClearSPDelta(); |
556 break; | 564 break; |
557 } | 565 } |
558 case kArchJmp: | 566 case kArchJmp: |
559 AssembleArchJump(i.InputRpo(0)); | 567 AssembleArchJump(i.InputRpo(0)); |
560 break; | 568 break; |
561 case kArchLookupSwitch: | 569 case kArchLookupSwitch: |
562 AssembleArchLookupSwitch(instr); | 570 AssembleArchLookupSwitch(instr); |
563 break; | 571 break; |
564 case kArchTableSwitch: | 572 case kArchTableSwitch: |
565 AssembleArchTableSwitch(instr); | 573 AssembleArchTableSwitch(instr); |
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1065 case kMips64Ldc1: | 1073 case kMips64Ldc1: |
1066 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand()); | 1074 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand()); |
1067 break; | 1075 break; |
1068 case kMips64Sdc1: | 1076 case kMips64Sdc1: |
1069 __ sdc1(i.InputDoubleRegister(2), i.MemoryOperand()); | 1077 __ sdc1(i.InputDoubleRegister(2), i.MemoryOperand()); |
1070 break; | 1078 break; |
1071 case kMips64Push: | 1079 case kMips64Push: |
1072 if (instr->InputAt(0)->IsDoubleRegister()) { | 1080 if (instr->InputAt(0)->IsDoubleRegister()) { |
1073 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); | 1081 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); |
1074 __ Subu(sp, sp, Operand(kDoubleSize)); | 1082 __ Subu(sp, sp, Operand(kDoubleSize)); |
| 1083 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
1075 } else { | 1084 } else { |
1076 __ Push(i.InputRegister(0)); | 1085 __ Push(i.InputRegister(0)); |
| 1086 frame_access_state()->IncreaseSPDelta(1); |
1077 } | 1087 } |
1078 break; | 1088 break; |
1079 case kMips64StackClaim: { | 1089 case kMips64StackClaim: { |
1080 __ Dsubu(sp, sp, Operand(i.InputInt32(0))); | 1090 __ Dsubu(sp, sp, Operand(i.InputInt32(0))); |
| 1091 frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize); |
1081 break; | 1092 break; |
1082 } | 1093 } |
1083 case kMips64StoreToStackSlot: { | 1094 case kMips64StoreToStackSlot: { |
1084 if (instr->InputAt(0)->IsDoubleRegister()) { | 1095 if (instr->InputAt(0)->IsDoubleRegister()) { |
1085 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1))); | 1096 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1))); |
1086 } else { | 1097 } else { |
1087 __ sd(i.InputRegister(0), MemOperand(sp, i.InputInt32(1))); | 1098 __ sd(i.InputRegister(0), MemOperand(sp, i.InputInt32(1))); |
1088 } | 1099 } |
1089 break; | 1100 break; |
1090 } | 1101 } |
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1410 void CodeGenerator::AssembleDeoptimizerCall( | 1421 void CodeGenerator::AssembleDeoptimizerCall( |
1411 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { | 1422 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { |
1412 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( | 1423 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( |
1413 isolate(), deoptimization_id, bailout_type); | 1424 isolate(), deoptimization_id, bailout_type); |
1414 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1425 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
1415 } | 1426 } |
1416 | 1427 |
1417 | 1428 |
1418 void CodeGenerator::AssemblePrologue() { | 1429 void CodeGenerator::AssemblePrologue() { |
1419 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1430 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1420 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1431 if (descriptor->IsCFunctionCall()) { |
1421 __ Push(ra, fp); | 1432 __ Push(ra, fp); |
1422 __ mov(fp, sp); | 1433 __ mov(fp, sp); |
1423 } else if (descriptor->IsJSFunctionCall()) { | 1434 } else if (descriptor->IsJSFunctionCall()) { |
1424 CompilationInfo* info = this->info(); | 1435 CompilationInfo* info = this->info(); |
1425 __ Prologue(info->IsCodePreAgingActive()); | 1436 __ Prologue(info->IsCodePreAgingActive()); |
1426 } else if (needs_frame_) { | 1437 } else if (frame()->needs_frame()) { |
1427 __ StubPrologue(); | 1438 __ StubPrologue(); |
1428 } else { | 1439 } else { |
1429 frame()->SetElidedFrameSizeInSlots(0); | 1440 frame()->SetElidedFrameSizeInSlots(0); |
1430 } | 1441 } |
| 1442 frame_access_state()->SetFrameAccessToDefault(); |
1431 | 1443 |
1432 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1444 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1433 if (info()->is_osr()) { | 1445 if (info()->is_osr()) { |
1434 // TurboFan OSR-compiled functions cannot be entered directly. | 1446 // TurboFan OSR-compiled functions cannot be entered directly. |
1435 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1447 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1436 | 1448 |
1437 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1449 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1438 // frame is still on the stack. Optimized code uses OSR values directly from | 1450 // frame is still on the stack. Optimized code uses OSR values directly from |
1439 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1451 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1440 // remaining stack slots. | 1452 // remaining stack slots. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1480 if (saves != 0) { | 1492 if (saves != 0) { |
1481 __ MultiPop(saves); | 1493 __ MultiPop(saves); |
1482 } | 1494 } |
1483 | 1495 |
1484 // Restore FPU registers. | 1496 // Restore FPU registers. |
1485 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters(); | 1497 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters(); |
1486 if (saves_fpu != 0) { | 1498 if (saves_fpu != 0) { |
1487 __ MultiPopFPU(saves_fpu); | 1499 __ MultiPopFPU(saves_fpu); |
1488 } | 1500 } |
1489 | 1501 |
1490 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1502 if (descriptor->IsCFunctionCall()) { |
1491 __ mov(sp, fp); | 1503 __ mov(sp, fp); |
1492 __ Pop(ra, fp); | 1504 __ Pop(ra, fp); |
1493 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1505 } else if (frame()->needs_frame()) { |
1494 // Canonicalize JSFunction return sites for now. | 1506 // Canonicalize JSFunction return sites for now. |
1495 if (return_label_.is_bound()) { | 1507 if (return_label_.is_bound()) { |
1496 __ Branch(&return_label_); | 1508 __ Branch(&return_label_); |
1497 return; | 1509 return; |
1498 } else { | 1510 } else { |
1499 __ bind(&return_label_); | 1511 __ bind(&return_label_); |
1500 __ mov(sp, fp); | 1512 __ mov(sp, fp); |
1501 __ Pop(ra, fp); | 1513 __ Pop(ra, fp); |
1502 } | 1514 } |
1503 } | 1515 } |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1716 padding_size -= v8::internal::Assembler::kInstrSize; | 1728 padding_size -= v8::internal::Assembler::kInstrSize; |
1717 } | 1729 } |
1718 } | 1730 } |
1719 } | 1731 } |
1720 | 1732 |
1721 #undef __ | 1733 #undef __ |
1722 | 1734 |
1723 } // namespace compiler | 1735 } // namespace compiler |
1724 } // namespace internal | 1736 } // namespace internal |
1725 } // namespace v8 | 1737 } // namespace v8 |
OLD | NEW |