Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(280)

Side by Side Diff: src/compiler/arm64/code-generator-arm64.cc

Issue 1811283003: [wasm] WIP fix arm64 frame alignment. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-generator.h" 5 #include "src/compiler/code-generator.h"
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/arm64/macro-assembler-arm64.h" 8 #include "src/arm64/macro-assembler-arm64.h"
9 #include "src/ast/scopes.h" 9 #include "src/ast/scopes.h"
10 #include "src/compiler/code-generator-impl.h" 10 #include "src/compiler/code-generator-impl.h"
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after
523 case kArchCallCodeObject: { 523 case kArchCallCodeObject: {
524 EnsureSpaceForLazyDeopt(); 524 EnsureSpaceForLazyDeopt();
525 if (instr->InputAt(0)->IsImmediate()) { 525 if (instr->InputAt(0)->IsImmediate()) {
526 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), 526 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
527 RelocInfo::CODE_TARGET); 527 RelocInfo::CODE_TARGET);
528 } else { 528 } else {
529 Register target = i.InputRegister(0); 529 Register target = i.InputRegister(0);
530 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); 530 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
531 __ Call(target); 531 __ Call(target);
532 } 532 }
533 RecordCallPosition(instr);
533 // TODO(titzer): this is ugly. JSSP should be a caller-save register 534 // TODO(titzer): this is ugly. JSSP should be a caller-save register
534 // in this case, but it is not possible to express in the register 535 // in this case, but it is not possible to express in the register
535 // allocator. 536 // allocator.
536 CallDescriptor::Flags flags = 537 CallDescriptor::Flags flags =
537 static_cast<CallDescriptor::Flags>(MiscField::decode(opcode)); 538 static_cast<CallDescriptor::Flags>(MiscField::decode(opcode));
ahaas 2016/03/29 08:33:58 You could use "CallDescriptor::Flags flags(MiscFie
titzer 2016/03/29 09:10:53 Done.
538 if (flags & CallDescriptor::kRestoreJSSP) { 539 if (flags & CallDescriptor::kRestoreJSSP) {
539 __ mov(jssp, csp); 540 __ Ldr(jssp, MemOperand(csp));
541 __ Mov(csp, jssp);
542 }
543 if (flags & CallDescriptor::kRestoreCSP) {
544 __ Mov(csp, jssp);
545 __ AssertCspAligned();
540 } 546 }
541 frame_access_state()->ClearSPDelta(); 547 frame_access_state()->ClearSPDelta();
542 RecordCallPosition(instr);
543 break; 548 break;
544 } 549 }
545 case kArchTailCallCodeObjectFromJSFunction: 550 case kArchTailCallCodeObjectFromJSFunction:
546 case kArchTailCallCodeObject: { 551 case kArchTailCallCodeObject: {
547 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); 552 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
548 AssembleDeconstructActivationRecord(stack_param_delta); 553 AssembleDeconstructActivationRecord(stack_param_delta);
549 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { 554 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
550 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, 555 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
551 i.TempRegister(0), i.TempRegister(1), 556 i.TempRegister(0), i.TempRegister(1),
552 i.TempRegister(2)); 557 i.TempRegister(2));
(...skipping 15 matching lines...) Expand all
568 if (FLAG_debug_code) { 573 if (FLAG_debug_code) {
569 // Check the function's context matches the context argument. 574 // Check the function's context matches the context argument.
570 UseScratchRegisterScope scope(masm()); 575 UseScratchRegisterScope scope(masm());
571 Register temp = scope.AcquireX(); 576 Register temp = scope.AcquireX();
572 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); 577 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
573 __ cmp(cp, temp); 578 __ cmp(cp, temp);
574 __ Assert(eq, kWrongFunctionContext); 579 __ Assert(eq, kWrongFunctionContext);
575 } 580 }
576 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); 581 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
577 __ Call(x10); 582 __ Call(x10);
583 RecordCallPosition(instr);
578 // TODO(titzer): this is ugly. JSSP should be a caller-save register 584 // TODO(titzer): this is ugly. JSSP should be a caller-save register
579 // in this case, but it is not possible to express in the register 585 // in this case, but it is not possible to express in the register
580 // allocator. 586 // allocator.
581 CallDescriptor::Flags flags = 587 CallDescriptor::Flags flags =
582 static_cast<CallDescriptor::Flags>(MiscField::decode(opcode)); 588 static_cast<CallDescriptor::Flags>(MiscField::decode(opcode));
583 if (flags & CallDescriptor::kRestoreJSSP) { 589 if (flags & CallDescriptor::kRestoreJSSP) {
584 __ mov(jssp, csp); 590 __ Ldr(jssp, MemOperand(csp));
591 __ Mov(csp, jssp);
592 }
593 if (flags & CallDescriptor::kRestoreCSP) {
594 __ Mov(csp, jssp);
595 __ AssertCspAligned();
585 } 596 }
586 frame_access_state()->ClearSPDelta(); 597 frame_access_state()->ClearSPDelta();
587 RecordCallPosition(instr);
588 break; 598 break;
589 } 599 }
590 case kArchTailCallJSFunctionFromJSFunction: 600 case kArchTailCallJSFunctionFromJSFunction:
591 case kArchTailCallJSFunction: { 601 case kArchTailCallJSFunction: {
592 Register func = i.InputRegister(0); 602 Register func = i.InputRegister(0);
593 if (FLAG_debug_code) { 603 if (FLAG_debug_code) {
594 // Check the function's context matches the context argument. 604 // Check the function's context matches the context argument.
595 UseScratchRegisterScope scope(masm()); 605 UseScratchRegisterScope scope(masm());
596 Register temp = scope.AcquireX(); 606 Register temp = scope.AcquireX();
597 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); 607 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
(...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after
960 i.InputInt6(3)); 970 i.InputInt6(3));
961 break; 971 break;
962 case kArm64TestAndBranch32: 972 case kArm64TestAndBranch32:
963 case kArm64TestAndBranch: 973 case kArm64TestAndBranch:
964 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch. 974 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
965 break; 975 break;
966 case kArm64CompareAndBranch32: 976 case kArm64CompareAndBranch32:
967 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. 977 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
968 break; 978 break;
969 case kArm64ClaimCSP: { 979 case kArm64ClaimCSP: {
970 int count = i.InputInt32(0); 980 int count = RoundUp(i.InputInt32(0), 2);
971 Register prev = __ StackPointer(); 981 Register prev = __ StackPointer();
972 if (prev.Is(jssp)) { 982 if (prev.Is(jssp)) {
973 __ AlignAndSetCSPForFrame(); 983 // TODO(titzer): make this a macro-assembler method.
984 // Align the CSP and store the previous JSSP on the stack.
985 UseScratchRegisterScope scope(masm());
986 Register tmp = scope.AcquireX();
987
988 int sp_alignment = __ ActivationFrameAlignment();
989 __ Sub(tmp, jssp, kPointerSize);
990 __ And(tmp, tmp, Operand(~static_cast<uint64_t>(sp_alignment - 1)));
991 __ Mov(csp, tmp);
992 __ Str(jssp, MemOperand(csp));
993 if (count > 0) {
994 __ SetStackPointer(csp);
995 __ Claim(count);
996 __ SetStackPointer(prev);
997 }
998 } else {
999 __ AssertCspAligned();
1000 if (count > 0) {
ahaas 2016/03/29 08:33:58 count is <= 0 here, so this condition is always fa
titzer 2016/03/29 09:10:53 There's no need to rely on that invariant, since i
1001 __ Claim(count);
1002 frame_access_state()->IncreaseSPDelta(count);
1003 }
974 } 1004 }
975 if (count > 0) {
976 __ Claim(count);
977 }
978 __ SetStackPointer(prev);
979 frame_access_state()->IncreaseSPDelta(count);
980 break; 1005 break;
981 } 1006 }
982 case kArm64ClaimJSSP: { 1007 case kArm64ClaimJSSP: {
983 int count = i.InputInt32(0); 1008 int count = i.InputInt32(0);
984 if (csp.Is(__ StackPointer())) { 1009 if (csp.Is(__ StackPointer())) {
985 // No JSP is set up. Compute it from the CSP. 1010 // No JSSP is set up. Compute it from the CSP.
986 int even = RoundUp(count, 2); 1011 __ AssertCspAligned();
987 __ Sub(jssp, csp, count * kPointerSize); 1012 if (count > 0) {
988 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned. 1013 int even = RoundUp(count, 2);
989 frame_access_state()->IncreaseSPDelta(even); 1014 __ Sub(jssp, csp, count * kPointerSize);
1015 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
1016 frame_access_state()->IncreaseSPDelta(even);
1017 } else {
1018 __ Mov(jssp, csp);
1019 }
990 } else { 1020 } else {
991 // JSSP is the current stack pointer, just use regular Claim(). 1021 // JSSP is the current stack pointer, just use regular Claim().
992 __ Claim(count); 1022 __ Claim(count);
993 frame_access_state()->IncreaseSPDelta(count); 1023 frame_access_state()->IncreaseSPDelta(count);
994 } 1024 }
995 break; 1025 break;
996 } 1026 }
997 case kArm64PokeCSP: // fall through 1027 case kArm64PokeCSP: // fall through
998 case kArm64PokeJSSP: { 1028 case kArm64PokeJSSP: {
999 Register prev = __ StackPointer(); 1029 Register prev = __ StackPointer();
(...skipping 460 matching lines...) Expand 10 before | Expand all | Expand 10 after
1460 void CodeGenerator::AssembleDeoptimizerCall( 1490 void CodeGenerator::AssembleDeoptimizerCall(
1461 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { 1491 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1462 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( 1492 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1463 isolate(), deoptimization_id, bailout_type); 1493 isolate(), deoptimization_id, bailout_type);
1464 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); 1494 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1465 } 1495 }
1466 1496
1467 1497
1468 void CodeGenerator::AssemblePrologue() { 1498 void CodeGenerator::AssemblePrologue() {
1469 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 1499 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1500 if (descriptor->UseNativeStack()) {
1501 __ AssertCspAligned();
1502 }
1503
1470 frame()->AlignFrame(16); 1504 frame()->AlignFrame(16);
1471 int stack_shrink_slots = frame()->GetSpillSlotCount(); 1505 int stack_shrink_slots = frame()->GetSpillSlotCount();
1472 if (frame()->needs_frame()) { 1506 if (frame()->needs_frame()) {
1473 if (descriptor->IsJSFunctionCall()) { 1507 if (descriptor->IsJSFunctionCall()) {
1474 DCHECK(!descriptor->UseNativeStack()); 1508 DCHECK(!descriptor->UseNativeStack());
1475 __ SetStackPointer(jssp); 1509 __ SetStackPointer(jssp);
1476 __ Prologue(this->info()->GeneratePreagedPrologue()); 1510 __ Prologue(this->info()->GeneratePreagedPrologue());
1477 } else { 1511 } else {
1478 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) { 1512 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) {
1479 __ SetStackPointer(csp); 1513 __ SetStackPointer(csp);
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1572 pop_count += (pop_count & 1); // align 1606 pop_count += (pop_count & 1); // align
1573 } else { 1607 } else {
1574 __ Mov(jssp, fp); 1608 __ Mov(jssp, fp);
1575 } 1609 }
1576 __ Pop(fp, lr); 1610 __ Pop(fp, lr);
1577 } 1611 }
1578 } else if (descriptor->UseNativeStack()) { 1612 } else if (descriptor->UseNativeStack()) {
1579 pop_count += (pop_count & 1); // align 1613 pop_count += (pop_count & 1); // align
1580 } 1614 }
1581 __ Drop(pop_count); 1615 __ Drop(pop_count);
1616
1617 if (descriptor->UseNativeStack()) {
1618 __ AssertCspAligned();
1619 }
1582 __ Ret(); 1620 __ Ret();
1583 } 1621 }
1584 1622
1585 1623
1586 void CodeGenerator::AssembleMove(InstructionOperand* source, 1624 void CodeGenerator::AssembleMove(InstructionOperand* source,
1587 InstructionOperand* destination) { 1625 InstructionOperand* destination) {
1588 Arm64OperandConverter g(this, nullptr); 1626 Arm64OperandConverter g(this, nullptr);
1589 // Dispatch on the source and destination operand kinds. Not all 1627 // Dispatch on the source and destination operand kinds. Not all
1590 // combinations are possible. 1628 // combinations are possible.
1591 if (source->IsRegister()) { 1629 if (source->IsRegister()) {
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
1765 padding_size -= kInstructionSize; 1803 padding_size -= kInstructionSize;
1766 } 1804 }
1767 } 1805 }
1768 } 1806 }
1769 1807
1770 #undef __ 1808 #undef __
1771 1809
1772 } // namespace compiler 1810 } // namespace compiler
1773 } // namespace internal 1811 } // namespace internal
1774 } // namespace v8 1812 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698