Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(233)

Side by Side Diff: src/compiler/arm64/code-generator-arm64.cc

Issue 1494123002: [turbofan, arm64] Fix native stack parameters on arm64. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Minor changes according to comments. Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm64/macro-assembler-arm64.cc ('k') | src/compiler/arm64/instruction-selector-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-generator.h" 5 #include "src/compiler/code-generator.h"
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/arm64/macro-assembler-arm64.h" 8 #include "src/arm64/macro-assembler-arm64.h"
9 #include "src/ast/scopes.h" 9 #include "src/ast/scopes.h"
10 #include "src/compiler/code-generator-impl.h" 10 #include "src/compiler/code-generator-impl.h"
(...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after
870 case kArm64CompareAndBranch32: 870 case kArm64CompareAndBranch32:
871 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch. 871 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
872 break; 872 break;
873 case kArm64ClaimForCallArguments: { 873 case kArm64ClaimForCallArguments: {
874 __ Claim(i.InputInt32(0)); 874 __ Claim(i.InputInt32(0));
875 frame_access_state()->IncreaseSPDelta(i.InputInt32(0)); 875 frame_access_state()->IncreaseSPDelta(i.InputInt32(0));
876 break; 876 break;
877 } 877 }
878 case kArm64Poke: { 878 case kArm64Poke: {
879 Operand operand(i.InputInt32(1) * kPointerSize); 879 Operand operand(i.InputInt32(1) * kPointerSize);
880 __ Poke(i.InputRegister(0), operand); 880 if (instr->InputAt(0)->IsDoubleRegister()) {
881 __ Poke(i.InputFloat64Register(0), operand);
882 } else {
883 __ Poke(i.InputRegister(0), operand);
884 }
881 break; 885 break;
882 } 886 }
883 case kArm64PokePair: { 887 case kArm64PokePair: {
884 int slot = i.InputInt32(2) - 1; 888 int slot = i.InputInt32(2) - 1;
885 __ PokePair(i.InputRegister(1), i.InputRegister(0), slot * kPointerSize); 889 if (instr->InputAt(0)->IsDoubleRegister()) {
890 __ PokePair(i.InputFloat64Register(1), i.InputFloat64Register(0),
891 slot * kPointerSize);
892 } else {
893 __ PokePair(i.InputRegister(1), i.InputRegister(0),
894 slot * kPointerSize);
895 }
886 break; 896 break;
887 } 897 }
888 case kArm64Clz: 898 case kArm64Clz:
889 __ Clz(i.OutputRegister64(), i.InputRegister64(0)); 899 __ Clz(i.OutputRegister64(), i.InputRegister64(0));
890 break; 900 break;
891 case kArm64Clz32: 901 case kArm64Clz32:
892 __ Clz(i.OutputRegister32(), i.InputRegister32(0)); 902 __ Clz(i.OutputRegister32(), i.InputRegister32(0));
893 break; 903 break;
894 case kArm64Cmp: 904 case kArm64Cmp:
895 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand(1)); 905 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand(1));
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after
1285 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 1295 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1286 if (descriptor->IsCFunctionCall()) { 1296 if (descriptor->IsCFunctionCall()) {
1287 __ SetStackPointer(csp); 1297 __ SetStackPointer(csp);
1288 __ Push(lr, fp); 1298 __ Push(lr, fp);
1289 __ Mov(fp, csp); 1299 __ Mov(fp, csp);
1290 } else if (descriptor->IsJSFunctionCall()) { 1300 } else if (descriptor->IsJSFunctionCall()) {
1291 CompilationInfo* info = this->info(); 1301 CompilationInfo* info = this->info();
1292 __ SetStackPointer(jssp); 1302 __ SetStackPointer(jssp);
1293 __ Prologue(info->IsCodePreAgingActive()); 1303 __ Prologue(info->IsCodePreAgingActive());
1294 } else if (frame()->needs_frame()) { 1304 } else if (frame()->needs_frame()) {
1295 __ SetStackPointer(jssp); 1305 if (descriptor->UseNativeStack()) {
1306 __ SetStackPointer(csp);
1307 } else {
1308 __ SetStackPointer(jssp);
1309 }
1296 __ StubPrologue(); 1310 __ StubPrologue();
1297 } else { 1311 } else {
1312 if (descriptor->UseNativeStack()) {
1313 __ SetStackPointer(csp);
1314 } else {
1315 __ SetStackPointer(jssp);
1316 }
1298 frame()->SetElidedFrameSizeInSlots(0); 1317 frame()->SetElidedFrameSizeInSlots(0);
1299 } 1318 }
1300 frame_access_state()->SetFrameAccessToDefault(); 1319 frame_access_state()->SetFrameAccessToDefault();
1301 1320
1302 int stack_shrink_slots = frame()->GetSpillSlotCount(); 1321 int stack_shrink_slots = frame()->GetSpillSlotCount();
1303 if (info()->is_osr()) { 1322 if (info()->is_osr()) {
1304 // TurboFan OSR-compiled functions cannot be entered directly. 1323 // TurboFan OSR-compiled functions cannot be entered directly.
1305 __ Abort(kShouldNotDirectlyEnterOsrFunction); 1324 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1306 1325
1307 // Unoptimized code jumps directly to this entrypoint while the unoptimized 1326 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1308 // frame is still on the stack. Optimized code uses OSR values directly from 1327 // frame is still on the stack. Optimized code uses OSR values directly from
1309 // the unoptimized frame. Thus, all that needs to be done is to allocate the 1328 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1310 // remaining stack slots. 1329 // remaining stack slots.
1311 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); 1330 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1312 osr_pc_offset_ = __ pc_offset(); 1331 osr_pc_offset_ = __ pc_offset();
1313 // TODO(titzer): cannot address target function == local #-1 1332 // TODO(titzer): cannot address target function == local #-1
1314 __ ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1333 __ ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1315 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots(); 1334 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1316 } 1335 }
1317 1336
1318 if (csp.Is(masm()->StackPointer())) { 1337 // If frame()->needs_frame() is false, then
1338 // frame()->AlignSavedCalleeRegisterSlots() is guaranteed to return 0.
1339 if (csp.Is(masm()->StackPointer()) && frame()->needs_frame()) {
1319 // The system stack pointer requires 16-byte alignment at function call 1340 // The system stack pointer requires 16-byte alignment at function call
1320 // boundaries. 1341 // boundaries.
1342
1321 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots(); 1343 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1322 } 1344 }
1323 __ Claim(stack_shrink_slots); 1345 __ Claim(stack_shrink_slots);
1324 1346
1325 // Save FP registers. 1347 // Save FP registers.
1326 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, 1348 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1327 descriptor->CalleeSavedFPRegisters()); 1349 descriptor->CalleeSavedFPRegisters());
1328 int saved_count = saves_fp.Count(); 1350 int saved_count = saves_fp.Count();
1329 if (saved_count != 0) { 1351 if (saved_count != 0) {
1330 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list()); 1352 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1367 if (descriptor->IsCFunctionCall()) { 1389 if (descriptor->IsCFunctionCall()) {
1368 __ Mov(csp, fp); 1390 __ Mov(csp, fp);
1369 __ Pop(fp, lr); 1391 __ Pop(fp, lr);
1370 } else if (frame()->needs_frame()) { 1392 } else if (frame()->needs_frame()) {
1371 // Canonicalize JSFunction return sites for now. 1393 // Canonicalize JSFunction return sites for now.
1372 if (return_label_.is_bound()) { 1394 if (return_label_.is_bound()) {
1373 __ B(&return_label_); 1395 __ B(&return_label_);
1374 return; 1396 return;
1375 } else { 1397 } else {
1376 __ Bind(&return_label_); 1398 __ Bind(&return_label_);
1377 __ Mov(jssp, fp); 1399 if (descriptor->UseNativeStack()) {
1400 __ Mov(csp, fp);
1401 } else {
1402 __ Mov(jssp, fp);
1403 }
1378 __ Pop(fp, lr); 1404 __ Pop(fp, lr);
1379 } 1405 }
1406 } else if (descriptor->UseNativeStack()) {
1407 pop_count += (pop_count & 1);
1380 } 1408 }
1381 __ Drop(pop_count); 1409 __ Drop(pop_count);
1382 __ Ret(); 1410 __ Ret();
1383 } 1411 }
1384 1412
1385 1413
1386 void CodeGenerator::AssembleMove(InstructionOperand* source, 1414 void CodeGenerator::AssembleMove(InstructionOperand* source,
1387 InstructionOperand* destination) { 1415 InstructionOperand* destination) {
1388 Arm64OperandConverter g(this, NULL); 1416 Arm64OperandConverter g(this, NULL);
1389 // Dispatch on the source and destination operand kinds. Not all 1417 // Dispatch on the source and destination operand kinds. Not all
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
1565 padding_size -= kInstructionSize; 1593 padding_size -= kInstructionSize;
1566 } 1594 }
1567 } 1595 }
1568 } 1596 }
1569 1597
1570 #undef __ 1598 #undef __
1571 1599
1572 } // namespace compiler 1600 } // namespace compiler
1573 } // namespace internal 1601 } // namespace internal
1574 } // namespace v8 1602 } // namespace v8
OLDNEW
« no previous file with comments | « src/arm64/macro-assembler-arm64.cc ('k') | src/compiler/arm64/instruction-selector-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698