OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 #include "src/compiler/code-generator-impl.h" | 6 #include "src/compiler/code-generator-impl.h" |
7 #include "src/compiler/gap-resolver.h" | 7 #include "src/compiler/gap-resolver.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/osr.h" | 9 #include "src/compiler/osr.h" |
10 #include "src/mips/macro-assembler-mips.h" | 10 #include "src/mips/macro-assembler-mips.h" |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
113 } | 113 } |
114 UNREACHABLE(); | 114 UNREACHABLE(); |
115 return MemOperand(no_reg); | 115 return MemOperand(no_reg); |
116 } | 116 } |
117 | 117 |
118 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); } | 118 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); } |
119 | 119 |
120 MemOperand ToMemOperand(InstructionOperand* op) const { | 120 MemOperand ToMemOperand(InstructionOperand* op) const { |
121 DCHECK(op != NULL); | 121 DCHECK(op != NULL); |
122 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 122 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
123 FrameOffset offset = | 123 FrameOffset offset = frame_access_state()->GetFrameOffset( |
124 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 124 AllocatedOperand::cast(op)->index()); |
125 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); | 125 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); |
126 } | 126 } |
127 }; | 127 }; |
128 | 128 |
129 | 129 |
130 static inline bool HasRegisterInput(Instruction* instr, size_t index) { | 130 static inline bool HasRegisterInput(Instruction* instr, size_t index) { |
131 return instr->InputAt(index)->IsRegister(); | 131 return instr->InputAt(index)->IsRegister(); |
132 } | 132 } |
133 | 133 |
134 | 134 |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
451 __ bind(ool->exit()); \ | 451 __ bind(ool->exit()); \ |
452 __ bind(&done); \ | 452 __ bind(&done); \ |
453 } while (0) | 453 } while (0) |
454 | 454 |
455 | 455 |
456 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 456 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
457 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 457 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
458 if (sp_slot_delta > 0) { | 458 if (sp_slot_delta > 0) { |
459 __ addiu(sp, sp, sp_slot_delta * kPointerSize); | 459 __ addiu(sp, sp, sp_slot_delta * kPointerSize); |
460 } | 460 } |
461 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 461 if (frame()->needs_frame()) { |
462 int spill_slots = frame()->GetSpillSlotCount(); | |
463 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0; | |
464 if (has_frame) { | |
465 __ Pop(ra, fp); | 462 __ Pop(ra, fp); |
466 } | 463 } |
| 464 frame_access_state()->SetFrameAccessToDefault(); |
467 } | 465 } |
468 | 466 |
469 | 467 |
470 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 468 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
471 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 469 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
472 if (sp_slot_delta < 0) { | 470 if (sp_slot_delta < 0) { |
473 __ Subu(sp, sp, Operand(-sp_slot_delta * kPointerSize)); | 471 __ Subu(sp, sp, Operand(-sp_slot_delta * kPointerSize)); |
| 472 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
474 } | 473 } |
| 474 frame_access_state()->SetFrameAccessToSP(); |
475 } | 475 } |
476 | 476 |
477 | 477 |
478 // Assembles an instruction after register allocation, producing machine code. | 478 // Assembles an instruction after register allocation, producing machine code. |
479 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 479 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
480 MipsOperandConverter i(this, instr); | 480 MipsOperandConverter i(this, instr); |
481 InstructionCode opcode = instr->opcode(); | 481 InstructionCode opcode = instr->opcode(); |
482 | 482 |
483 switch (ArchOpcodeField::decode(opcode)) { | 483 switch (ArchOpcodeField::decode(opcode)) { |
484 case kArchCallCodeObject: { | 484 case kArchCallCodeObject: { |
485 EnsureSpaceForLazyDeopt(); | 485 EnsureSpaceForLazyDeopt(); |
486 if (instr->InputAt(0)->IsImmediate()) { | 486 if (instr->InputAt(0)->IsImmediate()) { |
487 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 487 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
488 RelocInfo::CODE_TARGET); | 488 RelocInfo::CODE_TARGET); |
489 } else { | 489 } else { |
490 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); | 490 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); |
491 __ Call(at); | 491 __ Call(at); |
492 } | 492 } |
493 RecordCallPosition(instr); | 493 RecordCallPosition(instr); |
| 494 frame_access_state()->ClearSPDelta(); |
494 break; | 495 break; |
495 } | 496 } |
496 case kArchTailCallCodeObject: { | 497 case kArchTailCallCodeObject: { |
497 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 498 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
498 AssembleDeconstructActivationRecord(stack_param_delta); | 499 AssembleDeconstructActivationRecord(stack_param_delta); |
499 if (instr->InputAt(0)->IsImmediate()) { | 500 if (instr->InputAt(0)->IsImmediate()) { |
500 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), | 501 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)), |
501 RelocInfo::CODE_TARGET); | 502 RelocInfo::CODE_TARGET); |
502 } else { | 503 } else { |
503 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); | 504 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag); |
504 __ Jump(at); | 505 __ Jump(at); |
505 } | 506 } |
| 507 frame_access_state()->ClearSPDelta(); |
506 break; | 508 break; |
507 } | 509 } |
508 case kArchCallJSFunction: { | 510 case kArchCallJSFunction: { |
509 EnsureSpaceForLazyDeopt(); | 511 EnsureSpaceForLazyDeopt(); |
510 Register func = i.InputRegister(0); | 512 Register func = i.InputRegister(0); |
511 if (FLAG_debug_code) { | 513 if (FLAG_debug_code) { |
512 // Check the function's context matches the context argument. | 514 // Check the function's context matches the context argument. |
513 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 515 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
514 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 516 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
515 } | 517 } |
516 | 518 |
517 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 519 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
518 __ Call(at); | 520 __ Call(at); |
519 RecordCallPosition(instr); | 521 RecordCallPosition(instr); |
| 522 frame_access_state()->ClearSPDelta(); |
520 break; | 523 break; |
521 } | 524 } |
522 case kArchTailCallJSFunction: { | 525 case kArchTailCallJSFunction: { |
523 Register func = i.InputRegister(0); | 526 Register func = i.InputRegister(0); |
524 if (FLAG_debug_code) { | 527 if (FLAG_debug_code) { |
525 // Check the function's context matches the context argument. | 528 // Check the function's context matches the context argument. |
526 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 529 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
527 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 530 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
528 } | 531 } |
529 | 532 |
530 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); | 533 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); |
531 AssembleDeconstructActivationRecord(stack_param_delta); | 534 AssembleDeconstructActivationRecord(stack_param_delta); |
532 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 535 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
533 __ Jump(at); | 536 __ Jump(at); |
| 537 frame_access_state()->ClearSPDelta(); |
534 break; | 538 break; |
535 } | 539 } |
536 case kArchLazyBailout: { | 540 case kArchLazyBailout: { |
537 EnsureSpaceForLazyDeopt(); | 541 EnsureSpaceForLazyDeopt(); |
538 RecordCallPosition(instr); | 542 RecordCallPosition(instr); |
539 break; | 543 break; |
540 } | 544 } |
541 case kArchPrepareCallCFunction: { | 545 case kArchPrepareCallCFunction: { |
542 int const num_parameters = MiscField::decode(instr->opcode()); | 546 int const num_parameters = MiscField::decode(instr->opcode()); |
543 __ PrepareCallCFunction(num_parameters, kScratchReg); | 547 __ PrepareCallCFunction(num_parameters, kScratchReg); |
| 548 // Frame alignment requires using FP-relative frame addressing. |
| 549 frame_access_state()->SetFrameAccessToFP(); |
544 break; | 550 break; |
545 } | 551 } |
546 case kArchPrepareTailCall: | 552 case kArchPrepareTailCall: |
547 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); | 553 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); |
548 break; | 554 break; |
549 case kArchCallCFunction: { | 555 case kArchCallCFunction: { |
550 int const num_parameters = MiscField::decode(instr->opcode()); | 556 int const num_parameters = MiscField::decode(instr->opcode()); |
551 if (instr->InputAt(0)->IsImmediate()) { | 557 if (instr->InputAt(0)->IsImmediate()) { |
552 ExternalReference ref = i.InputExternalReference(0); | 558 ExternalReference ref = i.InputExternalReference(0); |
553 __ CallCFunction(ref, num_parameters); | 559 __ CallCFunction(ref, num_parameters); |
554 } else { | 560 } else { |
555 Register func = i.InputRegister(0); | 561 Register func = i.InputRegister(0); |
556 __ CallCFunction(func, num_parameters); | 562 __ CallCFunction(func, num_parameters); |
557 } | 563 } |
| 564 frame_access_state()->SetFrameAccessToDefault(); |
| 565 frame_access_state()->ClearSPDelta(); |
558 break; | 566 break; |
559 } | 567 } |
560 case kArchJmp: | 568 case kArchJmp: |
561 AssembleArchJump(i.InputRpo(0)); | 569 AssembleArchJump(i.InputRpo(0)); |
562 break; | 570 break; |
563 case kArchLookupSwitch: | 571 case kArchLookupSwitch: |
564 AssembleArchLookupSwitch(instr); | 572 AssembleArchLookupSwitch(instr); |
565 break; | 573 break; |
566 case kArchTableSwitch: | 574 case kArchTableSwitch: |
567 AssembleArchTableSwitch(instr); | 575 AssembleArchTableSwitch(instr); |
(...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
956 case kMipsLdc1: | 964 case kMipsLdc1: |
957 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand()); | 965 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand()); |
958 break; | 966 break; |
959 case kMipsSdc1: | 967 case kMipsSdc1: |
960 __ sdc1(i.InputDoubleRegister(2), i.MemoryOperand()); | 968 __ sdc1(i.InputDoubleRegister(2), i.MemoryOperand()); |
961 break; | 969 break; |
962 case kMipsPush: | 970 case kMipsPush: |
963 if (instr->InputAt(0)->IsDoubleRegister()) { | 971 if (instr->InputAt(0)->IsDoubleRegister()) { |
964 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); | 972 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize)); |
965 __ Subu(sp, sp, Operand(kDoubleSize)); | 973 __ Subu(sp, sp, Operand(kDoubleSize)); |
| 974 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); |
966 } else { | 975 } else { |
967 __ Push(i.InputRegister(0)); | 976 __ Push(i.InputRegister(0)); |
| 977 frame_access_state()->IncreaseSPDelta(1); |
968 } | 978 } |
969 break; | 979 break; |
970 case kMipsStackClaim: { | 980 case kMipsStackClaim: { |
971 __ Subu(sp, sp, Operand(i.InputInt32(0))); | 981 __ Subu(sp, sp, Operand(i.InputInt32(0))); |
| 982 frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize); |
972 break; | 983 break; |
973 } | 984 } |
974 case kMipsStoreToStackSlot: { | 985 case kMipsStoreToStackSlot: { |
975 if (instr->InputAt(0)->IsDoubleRegister()) { | 986 if (instr->InputAt(0)->IsDoubleRegister()) { |
976 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1))); | 987 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1))); |
977 } else { | 988 } else { |
978 __ sw(i.InputRegister(0), MemOperand(sp, i.InputInt32(1))); | 989 __ sw(i.InputRegister(0), MemOperand(sp, i.InputInt32(1))); |
979 } | 990 } |
980 break; | 991 break; |
981 } | 992 } |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1292 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { | 1303 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { |
1293 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( | 1304 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( |
1294 isolate(), deoptimization_id, bailout_type); | 1305 isolate(), deoptimization_id, bailout_type); |
1295 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1306 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
1296 } | 1307 } |
1297 | 1308 |
1298 | 1309 |
1299 void CodeGenerator::AssemblePrologue() { | 1310 void CodeGenerator::AssemblePrologue() { |
1300 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1311 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1301 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1312 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1302 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1313 if (descriptor->IsCFunctionCall()) { |
1303 __ Push(ra, fp); | 1314 __ Push(ra, fp); |
1304 __ mov(fp, sp); | 1315 __ mov(fp, sp); |
1305 } else if (descriptor->IsJSFunctionCall()) { | 1316 } else if (descriptor->IsJSFunctionCall()) { |
1306 CompilationInfo* info = this->info(); | 1317 CompilationInfo* info = this->info(); |
1307 __ Prologue(info->IsCodePreAgingActive()); | 1318 __ Prologue(info->IsCodePreAgingActive()); |
1308 } else if (needs_frame_) { | 1319 } else if (frame()->needs_frame()) { |
1309 __ StubPrologue(); | 1320 __ StubPrologue(); |
1310 } else { | 1321 } else { |
1311 frame()->SetElidedFrameSizeInSlots(0); | 1322 frame()->SetElidedFrameSizeInSlots(0); |
1312 } | 1323 } |
| 1324 frame_access_state()->SetFrameAccessToDefault(); |
1313 | 1325 |
1314 if (info()->is_osr()) { | 1326 if (info()->is_osr()) { |
1315 // TurboFan OSR-compiled functions cannot be entered directly. | 1327 // TurboFan OSR-compiled functions cannot be entered directly. |
1316 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1328 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1317 | 1329 |
1318 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1330 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1319 // frame is still on the stack. Optimized code uses OSR values directly from | 1331 // frame is still on the stack. Optimized code uses OSR values directly from |
1320 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1332 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1321 // remaining stack slots. | 1333 // remaining stack slots. |
1322 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); | 1334 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1365 if (saves != 0) { | 1377 if (saves != 0) { |
1366 __ MultiPop(saves); | 1378 __ MultiPop(saves); |
1367 } | 1379 } |
1368 | 1380 |
1369 // Restore FPU registers. | 1381 // Restore FPU registers. |
1370 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters(); | 1382 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters(); |
1371 if (saves_fpu != 0) { | 1383 if (saves_fpu != 0) { |
1372 __ MultiPopFPU(saves_fpu); | 1384 __ MultiPopFPU(saves_fpu); |
1373 } | 1385 } |
1374 | 1386 |
1375 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1387 if (descriptor->IsCFunctionCall()) { |
1376 __ mov(sp, fp); | 1388 __ mov(sp, fp); |
1377 __ Pop(ra, fp); | 1389 __ Pop(ra, fp); |
1378 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1390 } else if (frame()->needs_frame()) { |
1379 // Canonicalize JSFunction return sites for now. | 1391 // Canonicalize JSFunction return sites for now. |
1380 if (return_label_.is_bound()) { | 1392 if (return_label_.is_bound()) { |
1381 __ Branch(&return_label_); | 1393 __ Branch(&return_label_); |
1382 return; | 1394 return; |
1383 } else { | 1395 } else { |
1384 __ bind(&return_label_); | 1396 __ bind(&return_label_); |
1385 __ mov(sp, fp); | 1397 __ mov(sp, fp); |
1386 __ Pop(ra, fp); | 1398 __ Pop(ra, fp); |
1387 } | 1399 } |
1388 } | 1400 } |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1600 padding_size -= v8::internal::Assembler::kInstrSize; | 1612 padding_size -= v8::internal::Assembler::kInstrSize; |
1601 } | 1613 } |
1602 } | 1614 } |
1603 } | 1615 } |
1604 | 1616 |
1605 #undef __ | 1617 #undef __ |
1606 | 1618 |
1607 } // namespace compiler | 1619 } // namespace compiler |
1608 } // namespace internal | 1620 } // namespace internal |
1609 } // namespace v8 | 1621 } // namespace v8 |
OLD | NEW |