Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(25)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 246643014: CodeStubs contain their corresponding Isolate* now. (part 1) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Feedback. Rebased. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/code-stubs-arm64.h ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 descriptor->platform_specific_descriptor_ = &default_descriptor; 527 descriptor->platform_specific_descriptor_ = &default_descriptor;
528 } 528 }
529 } 529 }
530 530
531 531
532 #define __ ACCESS_MASM(masm) 532 #define __ ACCESS_MASM(masm)
533 533
534 534
535 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 535 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
536 // Update the static counter each time a new code stub is generated. 536 // Update the static counter each time a new code stub is generated.
537 Isolate* isolate = masm->isolate(); 537 isolate()->counters()->code_stubs()->Increment();
538 isolate->counters()->code_stubs()->Increment();
539 538
540 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 539 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
541 int param_count = descriptor->register_param_count_; 540 int param_count = descriptor->register_param_count_;
542 { 541 {
543 // Call the runtime system in a fresh internal frame. 542 // Call the runtime system in a fresh internal frame.
544 FrameScope scope(masm, StackFrame::INTERNAL); 543 FrameScope scope(masm, StackFrame::INTERNAL);
545 ASSERT((descriptor->register_param_count_ == 0) || 544 ASSERT((descriptor->register_param_count_ == 0) ||
546 x0.Is(descriptor->register_params_[param_count - 1])); 545 x0.Is(descriptor->register_params_[param_count - 1]));
547 546
548 // Push arguments 547 // Push arguments
549 MacroAssembler::PushPopQueue queue(masm); 548 MacroAssembler::PushPopQueue queue(masm);
550 for (int i = 0; i < param_count; ++i) { 549 for (int i = 0; i < param_count; ++i) {
(...skipping 491 matching lines...) Expand 10 before | Expand all | Expand 10 after
1042 lhs_type, rhs_type, 1041 lhs_type, rhs_type,
1043 &flat_string_check, &slow); 1042 &flat_string_check, &slow);
1044 } 1043 }
1045 1044
1046 // Check for both being sequential ASCII strings, and inline if that is the 1045 // Check for both being sequential ASCII strings, and inline if that is the
1047 // case. 1046 // case.
1048 __ Bind(&flat_string_check); 1047 __ Bind(&flat_string_check);
1049 __ JumpIfBothInstanceTypesAreNotSequentialAscii(lhs_type, rhs_type, x14, 1048 __ JumpIfBothInstanceTypesAreNotSequentialAscii(lhs_type, rhs_type, x14,
1050 x15, &slow); 1049 x15, &slow);
1051 1050
1052 Isolate* isolate = masm->isolate(); 1051 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x10,
1053 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, x10,
1054 x11); 1052 x11);
1055 if (cond == eq) { 1053 if (cond == eq) {
1056 StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs, 1054 StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs,
1057 x10, x11, x12); 1055 x10, x11, x12);
1058 } else { 1056 } else {
1059 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, lhs, rhs, 1057 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, lhs, rhs,
1060 x10, x11, x12, x13); 1058 x10, x11, x12, x13);
1061 } 1059 }
1062 1060
1063 // Never fall through to here. 1061 // Never fall through to here.
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1105 // We don't care if MacroAssembler scratch registers are corrupted. 1103 // We don't care if MacroAssembler scratch registers are corrupted.
1106 saved_regs.Remove(*(masm->TmpList())); 1104 saved_regs.Remove(*(masm->TmpList()));
1107 saved_fp_regs.Remove(*(masm->FPTmpList())); 1105 saved_fp_regs.Remove(*(masm->FPTmpList()));
1108 1106
1109 __ PushCPURegList(saved_regs); 1107 __ PushCPURegList(saved_regs);
1110 if (save_doubles_ == kSaveFPRegs) { 1108 if (save_doubles_ == kSaveFPRegs) {
1111 __ PushCPURegList(saved_fp_regs); 1109 __ PushCPURegList(saved_fp_regs);
1112 } 1110 }
1113 1111
1114 AllowExternalCallThatCantCauseGC scope(masm); 1112 AllowExternalCallThatCantCauseGC scope(masm);
1115 __ Mov(x0, ExternalReference::isolate_address(masm->isolate())); 1113 __ Mov(x0, ExternalReference::isolate_address(isolate()));
1116 __ CallCFunction( 1114 __ CallCFunction(
1117 ExternalReference::store_buffer_overflow_function(masm->isolate()), 1115 ExternalReference::store_buffer_overflow_function(isolate()), 1, 0);
1118 1, 0);
1119 1116
1120 if (save_doubles_ == kSaveFPRegs) { 1117 if (save_doubles_ == kSaveFPRegs) {
1121 __ PopCPURegList(saved_fp_regs); 1118 __ PopCPURegList(saved_fp_regs);
1122 } 1119 }
1123 __ PopCPURegList(saved_regs); 1120 __ PopCPURegList(saved_regs);
1124 __ Ret(); 1121 __ Ret();
1125 } 1122 }
1126 1123
1127 1124
1128 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 1125 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
1129 Isolate* isolate) { 1126 Isolate* isolate) {
1130 StoreBufferOverflowStub stub1(kDontSaveFPRegs); 1127 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
1131 stub1.GetCode(isolate); 1128 stub1.GetCode(isolate);
1132 StoreBufferOverflowStub stub2(kSaveFPRegs); 1129 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
1133 stub2.GetCode(isolate); 1130 stub2.GetCode(isolate);
1134 } 1131 }
1135 1132
1136 1133
1137 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { 1134 void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
1138 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); 1135 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
1139 UseScratchRegisterScope temps(masm); 1136 UseScratchRegisterScope temps(masm);
1140 Register saved_lr = temps.UnsafeAcquire(to_be_pushed_lr()); 1137 Register saved_lr = temps.UnsafeAcquire(to_be_pushed_lr());
1141 Register return_address = temps.AcquireX(); 1138 Register return_address = temps.AcquireX();
1142 __ Mov(return_address, lr); 1139 __ Mov(return_address, lr);
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
1307 // Find the inverse for exponents of -0.5. 1304 // Find the inverse for exponents of -0.5.
1308 __ Fmov(scratch0_double, 1.0); 1305 __ Fmov(scratch0_double, 1.0);
1309 __ Fdiv(result_double, scratch0_double, result_double); 1306 __ Fdiv(result_double, scratch0_double, result_double);
1310 __ B(&done); 1307 __ B(&done);
1311 } 1308 }
1312 1309
1313 { 1310 {
1314 AllowExternalCallThatCantCauseGC scope(masm); 1311 AllowExternalCallThatCantCauseGC scope(masm);
1315 __ Mov(saved_lr, lr); 1312 __ Mov(saved_lr, lr);
1316 __ CallCFunction( 1313 __ CallCFunction(
1317 ExternalReference::power_double_double_function(masm->isolate()), 1314 ExternalReference::power_double_double_function(isolate()),
1318 0, 2); 1315 0, 2);
1319 __ Mov(lr, saved_lr); 1316 __ Mov(lr, saved_lr);
1320 __ B(&done); 1317 __ B(&done);
1321 } 1318 }
1322 1319
1323 // Handle SMI exponents. 1320 // Handle SMI exponents.
1324 __ Bind(&exponent_is_smi); 1321 __ Bind(&exponent_is_smi);
1325 // x10 base_tagged The tagged base (input). 1322 // x10 base_tagged The tagged base (input).
1326 // x11 exponent_tagged The tagged exponent (input). 1323 // x11 exponent_tagged The tagged exponent (input).
1327 // d1 base_double The base as a double. 1324 // d1 base_double The base as a double.
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1390 // Put the arguments back on the stack. 1387 // Put the arguments back on the stack.
1391 __ Push(base_tagged, exponent_tagged); 1388 __ Push(base_tagged, exponent_tagged);
1392 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); 1389 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1);
1393 1390
1394 // Return. 1391 // Return.
1395 __ Bind(&done); 1392 __ Bind(&done);
1396 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1, 1393 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1,
1397 result_double); 1394 result_double);
1398 ASSERT(result_tagged.is(x0)); 1395 ASSERT(result_tagged.is(x0));
1399 __ IncrementCounter( 1396 __ IncrementCounter(
1400 masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1); 1397 isolate()->counters()->math_pow(), 1, scratch0, scratch1);
1401 __ Ret(); 1398 __ Ret();
1402 } else { 1399 } else {
1403 AllowExternalCallThatCantCauseGC scope(masm); 1400 AllowExternalCallThatCantCauseGC scope(masm);
1404 __ Mov(saved_lr, lr); 1401 __ Mov(saved_lr, lr);
1405 __ Fmov(base_double, base_double_copy); 1402 __ Fmov(base_double, base_double_copy);
1406 __ Scvtf(exponent_double, exponent_integer); 1403 __ Scvtf(exponent_double, exponent_integer);
1407 __ CallCFunction( 1404 __ CallCFunction(
1408 ExternalReference::power_double_double_function(masm->isolate()), 1405 ExternalReference::power_double_double_function(isolate()),
1409 0, 2); 1406 0, 2);
1410 __ Mov(lr, saved_lr); 1407 __ Mov(lr, saved_lr);
1411 __ Bind(&done); 1408 __ Bind(&done);
1412 __ IncrementCounter( 1409 __ IncrementCounter(
1413 masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1); 1410 isolate()->counters()->math_pow(), 1, scratch0, scratch1);
1414 __ Ret(); 1411 __ Ret();
1415 } 1412 }
1416 } 1413 }
1417 1414
1418 1415
1419 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1416 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1420 // It is important that the following stubs are generated in this order 1417 // It is important that the following stubs are generated in this order
1421 // because pregenerated stubs can only call other pregenerated stubs. 1418 // because pregenerated stubs can only call other pregenerated stubs.
1422 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses 1419 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses
1423 // CEntryStub. 1420 // CEntryStub.
1424 CEntryStub::GenerateAheadOfTime(isolate); 1421 CEntryStub::GenerateAheadOfTime(isolate);
1425 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 1422 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1426 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 1423 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1427 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 1424 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1428 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 1425 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1429 BinaryOpICStub::GenerateAheadOfTime(isolate); 1426 BinaryOpICStub::GenerateAheadOfTime(isolate);
1430 StoreRegistersStateStub::GenerateAheadOfTime(isolate); 1427 StoreRegistersStateStub::GenerateAheadOfTime(isolate);
1431 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); 1428 RestoreRegistersStateStub::GenerateAheadOfTime(isolate);
1432 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 1429 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1433 } 1430 }
1434 1431
1435 1432
1436 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { 1433 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1437 StoreRegistersStateStub stub1(kDontSaveFPRegs); 1434 StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
1438 stub1.GetCode(isolate); 1435 stub1.GetCode(isolate);
1439 StoreRegistersStateStub stub2(kSaveFPRegs); 1436 StoreRegistersStateStub stub2(isolate, kSaveFPRegs);
1440 stub2.GetCode(isolate); 1437 stub2.GetCode(isolate);
1441 } 1438 }
1442 1439
1443 1440
1444 void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { 1441 void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1445 RestoreRegistersStateStub stub1(kDontSaveFPRegs); 1442 RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
1446 stub1.GetCode(isolate); 1443 stub1.GetCode(isolate);
1447 RestoreRegistersStateStub stub2(kSaveFPRegs); 1444 RestoreRegistersStateStub stub2(isolate, kSaveFPRegs);
1448 stub2.GetCode(isolate); 1445 stub2.GetCode(isolate);
1449 } 1446 }
1450 1447
1451 1448
1452 void CodeStub::GenerateFPStubs(Isolate* isolate) { 1449 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1453 // Floating-point code doesn't get special handling in ARM64, so there's 1450 // Floating-point code doesn't get special handling in ARM64, so there's
1454 // nothing to do here. 1451 // nothing to do here.
1455 USE(isolate); 1452 USE(isolate);
1456 } 1453 }
1457 1454
1458 1455
1459 bool CEntryStub::NeedsImmovableCode() { 1456 bool CEntryStub::NeedsImmovableCode() {
1460 // CEntryStub stores the return address on the stack before calling into 1457 // CEntryStub stores the return address on the stack before calling into
1461 // C++ code. In some cases, the VM accesses this address, but it is not used 1458 // C++ code. In some cases, the VM accesses this address, but it is not used
1462 // when the C++ code returns to the stub because LR holds the return address 1459 // when the C++ code returns to the stub because LR holds the return address
1463 // in AAPCS64. If the stub is moved (perhaps during a GC), we could end up 1460 // in AAPCS64. If the stub is moved (perhaps during a GC), we could end up
1464 // returning to dead code. 1461 // returning to dead code.
1465 // TODO(jbramley): Whilst this is the only analysis that makes sense, I can't 1462 // TODO(jbramley): Whilst this is the only analysis that makes sense, I can't
1466 // find any comment to confirm this, and I don't hit any crashes whatever 1463 // find any comment to confirm this, and I don't hit any crashes whatever
1467 // this function returns. The anaylsis should be properly confirmed. 1464 // this function returns. The anaylsis should be properly confirmed.
1468 return true; 1465 return true;
1469 } 1466 }
1470 1467
1471 1468
1472 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 1469 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1473 CEntryStub stub(1, kDontSaveFPRegs); 1470 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1474 stub.GetCode(isolate); 1471 stub.GetCode(isolate);
1475 CEntryStub stub_fp(1, kSaveFPRegs); 1472 CEntryStub stub_fp(isolate, 1, kSaveFPRegs);
1476 stub_fp.GetCode(isolate); 1473 stub_fp.GetCode(isolate);
1477 } 1474 }
1478 1475
1479 1476
1480 void CEntryStub::Generate(MacroAssembler* masm) { 1477 void CEntryStub::Generate(MacroAssembler* masm) {
1481 // The Abort mechanism relies on CallRuntime, which in turn relies on 1478 // The Abort mechanism relies on CallRuntime, which in turn relies on
1482 // CEntryStub, so until this stub has been generated, we have to use a 1479 // CEntryStub, so until this stub has been generated, we have to use a
1483 // fall-back Abort mechanism. 1480 // fall-back Abort mechanism.
1484 // 1481 //
1485 // Note that this stub must be generated before any use of Abort. 1482 // Note that this stub must be generated before any use of Abort.
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
1575 // After a successful call, the exit frame, preserved registers (x21-x23) and 1572 // After a successful call, the exit frame, preserved registers (x21-x23) and
1576 // the arguments (including the receiver) are dropped or popped as 1573 // the arguments (including the receiver) are dropped or popped as
1577 // appropriate. The stub then returns. 1574 // appropriate. The stub then returns.
1578 // 1575 //
1579 // After an unsuccessful call, the exit frame and suchlike are left 1576 // After an unsuccessful call, the exit frame and suchlike are left
1580 // untouched, and the stub either throws an exception by jumping to one of 1577 // untouched, and the stub either throws an exception by jumping to one of
1581 // the exception_returned label. 1578 // the exception_returned label.
1582 1579
1583 ASSERT(csp.Is(__ StackPointer())); 1580 ASSERT(csp.Is(__ StackPointer()));
1584 1581
1585 Isolate* isolate = masm->isolate();
1586
1587 // Prepare AAPCS64 arguments to pass to the builtin. 1582 // Prepare AAPCS64 arguments to pass to the builtin.
1588 __ Mov(x0, argc); 1583 __ Mov(x0, argc);
1589 __ Mov(x1, argv); 1584 __ Mov(x1, argv);
1590 __ Mov(x2, ExternalReference::isolate_address(isolate)); 1585 __ Mov(x2, ExternalReference::isolate_address(isolate()));
1591 1586
1592 Label return_location; 1587 Label return_location;
1593 __ Adr(x12, &return_location); 1588 __ Adr(x12, &return_location);
1594 __ Poke(x12, 0); 1589 __ Poke(x12, 0);
1595 1590
1596 if (__ emit_debug_code()) { 1591 if (__ emit_debug_code()) {
1597 // Verify that the slot below fp[kSPOffset]-8 points to the return location 1592 // Verify that the slot below fp[kSPOffset]-8 points to the return location
1598 // (currently in x12). 1593 // (currently in x12).
1599 UseScratchRegisterScope temps(masm); 1594 UseScratchRegisterScope temps(masm);
1600 Register temp = temps.AcquireX(); 1595 Register temp = temps.AcquireX();
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1640 1635
1641 // The stack pointer is still csp if we aren't returning, and the frame 1636 // The stack pointer is still csp if we aren't returning, and the frame
1642 // hasn't changed (except for the return address). 1637 // hasn't changed (except for the return address).
1643 __ SetStackPointer(csp); 1638 __ SetStackPointer(csp);
1644 1639
1645 // Handling of exception. 1640 // Handling of exception.
1646 __ Bind(&exception_returned); 1641 __ Bind(&exception_returned);
1647 1642
1648 // Retrieve the pending exception. 1643 // Retrieve the pending exception.
1649 ExternalReference pending_exception_address( 1644 ExternalReference pending_exception_address(
1650 Isolate::kPendingExceptionAddress, isolate); 1645 Isolate::kPendingExceptionAddress, isolate());
1651 const Register& exception = result; 1646 const Register& exception = result;
1652 const Register& exception_address = x11; 1647 const Register& exception_address = x11;
1653 __ Mov(exception_address, Operand(pending_exception_address)); 1648 __ Mov(exception_address, Operand(pending_exception_address));
1654 __ Ldr(exception, MemOperand(exception_address)); 1649 __ Ldr(exception, MemOperand(exception_address));
1655 1650
1656 // Clear the pending exception. 1651 // Clear the pending exception.
1657 __ Mov(x10, Operand(isolate->factory()->the_hole_value())); 1652 __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
1658 __ Str(x10, MemOperand(exception_address)); 1653 __ Str(x10, MemOperand(exception_address));
1659 1654
1660 // x0 exception The exception descriptor. 1655 // x0 exception The exception descriptor.
1661 // x21 argv 1656 // x21 argv
1662 // x22 argc 1657 // x22 argc
1663 // x23 target 1658 // x23 target
1664 1659
1665 // Special handling of termination exceptions, which are uncatchable by 1660 // Special handling of termination exceptions, which are uncatchable by
1666 // JavaScript code. 1661 // JavaScript code.
1667 Label throw_termination_exception; 1662 Label throw_termination_exception;
1668 __ Cmp(exception, Operand(isolate->factory()->termination_exception())); 1663 __ Cmp(exception, Operand(isolate()->factory()->termination_exception()));
1669 __ B(eq, &throw_termination_exception); 1664 __ B(eq, &throw_termination_exception);
1670 1665
1671 // We didn't execute a return case, so the stack frame hasn't been updated 1666 // We didn't execute a return case, so the stack frame hasn't been updated
1672 // (except for the return address slot). However, we don't need to initialize 1667 // (except for the return address slot). However, we don't need to initialize
1673 // jssp because the throw method will immediately overwrite it when it 1668 // jssp because the throw method will immediately overwrite it when it
1674 // unwinds the stack. 1669 // unwinds the stack.
1675 __ SetStackPointer(jssp); 1670 __ SetStackPointer(jssp);
1676 1671
1677 ASM_LOCATION("Throw normal"); 1672 ASM_LOCATION("Throw normal");
1678 __ Mov(argv, 0); 1673 __ Mov(argv, 0);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1718 __ PushCalleeSavedRegisters(); 1713 __ PushCalleeSavedRegisters();
1719 __ Mov(jssp, csp); 1714 __ Mov(jssp, csp);
1720 __ SetStackPointer(jssp); 1715 __ SetStackPointer(jssp);
1721 1716
1722 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1717 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1723 1718
1724 // Set up the reserved register for 0.0. 1719 // Set up the reserved register for 0.0.
1725 __ Fmov(fp_zero, 0.0); 1720 __ Fmov(fp_zero, 0.0);
1726 1721
1727 // Build an entry frame (see layout below). 1722 // Build an entry frame (see layout below).
1728 Isolate* isolate = masm->isolate();
1729
1730 // Build an entry frame.
1731 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 1723 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1732 int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used. 1724 int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used.
1733 __ Mov(x13, bad_frame_pointer); 1725 __ Mov(x13, bad_frame_pointer);
1734 __ Mov(x12, Smi::FromInt(marker)); 1726 __ Mov(x12, Smi::FromInt(marker));
1735 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate)); 1727 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
1736 __ Ldr(x10, MemOperand(x11)); 1728 __ Ldr(x10, MemOperand(x11));
1737 1729
1738 __ Push(x13, xzr, x12, x10); 1730 __ Push(x13, xzr, x12, x10);
1739 // Set up fp. 1731 // Set up fp.
1740 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset); 1732 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset);
1741 1733
1742 // Push the JS entry frame marker. Also set js_entry_sp if this is the 1734 // Push the JS entry frame marker. Also set js_entry_sp if this is the
1743 // outermost JS call. 1735 // outermost JS call.
1744 Label non_outermost_js, done; 1736 Label non_outermost_js, done;
1745 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); 1737 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1746 __ Mov(x10, ExternalReference(js_entry_sp)); 1738 __ Mov(x10, ExternalReference(js_entry_sp));
1747 __ Ldr(x11, MemOperand(x10)); 1739 __ Ldr(x11, MemOperand(x10));
1748 __ Cbnz(x11, &non_outermost_js); 1740 __ Cbnz(x11, &non_outermost_js);
1749 __ Str(fp, MemOperand(x10)); 1741 __ Str(fp, MemOperand(x10));
1750 __ Mov(x12, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 1742 __ Mov(x12, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1751 __ Push(x12); 1743 __ Push(x12);
1752 __ B(&done); 1744 __ B(&done);
1753 __ Bind(&non_outermost_js); 1745 __ Bind(&non_outermost_js);
1754 // We spare one instruction by pushing xzr since the marker is 0. 1746 // We spare one instruction by pushing xzr since the marker is 0.
1755 ASSERT(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME) == NULL); 1747 ASSERT(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME) == NULL);
(...skipping 19 matching lines...) Expand all
1775 // that. 1767 // that.
1776 { 1768 {
1777 Assembler::BlockPoolsScope block_pools(masm); 1769 Assembler::BlockPoolsScope block_pools(masm);
1778 __ bind(&handler_entry); 1770 __ bind(&handler_entry);
1779 handler_offset_ = handler_entry.pos(); 1771 handler_offset_ = handler_entry.pos();
1780 // Caught exception: Store result (exception) in the pending exception 1772 // Caught exception: Store result (exception) in the pending exception
1781 // field in the JSEnv and return a failure sentinel. Coming in here the 1773 // field in the JSEnv and return a failure sentinel. Coming in here the
1782 // fp will be invalid because the PushTryHandler below sets it to 0 to 1774 // fp will be invalid because the PushTryHandler below sets it to 0 to
1783 // signal the existence of the JSEntry frame. 1775 // signal the existence of the JSEntry frame.
1784 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 1776 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1785 isolate))); 1777 isolate())));
1786 } 1778 }
1787 __ Str(code_entry, MemOperand(x10)); 1779 __ Str(code_entry, MemOperand(x10));
1788 __ LoadRoot(x0, Heap::kExceptionRootIndex); 1780 __ LoadRoot(x0, Heap::kExceptionRootIndex);
1789 __ B(&exit); 1781 __ B(&exit);
1790 1782
1791 // Invoke: Link this frame into the handler chain. There's only one 1783 // Invoke: Link this frame into the handler chain. There's only one
1792 // handler block in this code object, so its index is 0. 1784 // handler block in this code object, so its index is 0.
1793 __ Bind(&invoke); 1785 __ Bind(&invoke);
1794 __ PushTryHandler(StackHandler::JS_ENTRY, 0); 1786 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1795 // If an exception not caught by another handler occurs, this handler 1787 // If an exception not caught by another handler occurs, this handler
1796 // returns control to the code after the B(&invoke) above, which 1788 // returns control to the code after the B(&invoke) above, which
1797 // restores all callee-saved registers (including cp and fp) to their 1789 // restores all callee-saved registers (including cp and fp) to their
1798 // saved values before returning a failure to C. 1790 // saved values before returning a failure to C.
1799 1791
1800 // Clear any pending exceptions. 1792 // Clear any pending exceptions.
1801 __ Mov(x10, Operand(isolate->factory()->the_hole_value())); 1793 __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
1802 __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 1794 __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1803 isolate))); 1795 isolate())));
1804 __ Str(x10, MemOperand(x11)); 1796 __ Str(x10, MemOperand(x11));
1805 1797
1806 // Invoke the function by calling through the JS entry trampoline builtin. 1798 // Invoke the function by calling through the JS entry trampoline builtin.
1807 // Notice that we cannot store a reference to the trampoline code directly in 1799 // Notice that we cannot store a reference to the trampoline code directly in
1808 // this stub, because runtime stubs are not traversed when doing GC. 1800 // this stub, because runtime stubs are not traversed when doing GC.
1809 1801
1810 // Expected registers by Builtins::JSEntryTrampoline 1802 // Expected registers by Builtins::JSEntryTrampoline
1811 // x0: code entry. 1803 // x0: code entry.
1812 // x1: function. 1804 // x1: function.
1813 // x2: receiver. 1805 // x2: receiver.
1814 // x3: argc. 1806 // x3: argc.
1815 // x4: argv. 1807 // x4: argv.
1816 ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline 1808 ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline
1817 : Builtins::kJSEntryTrampoline, 1809 : Builtins::kJSEntryTrampoline,
1818 isolate); 1810 isolate());
1819 __ Mov(x10, entry); 1811 __ Mov(x10, entry);
1820 1812
1821 // Call the JSEntryTrampoline. 1813 // Call the JSEntryTrampoline.
1822 __ Ldr(x11, MemOperand(x10)); // Dereference the address. 1814 __ Ldr(x11, MemOperand(x10)); // Dereference the address.
1823 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); 1815 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
1824 __ Blr(x12); 1816 __ Blr(x12);
1825 1817
1826 // Unlink this frame from the handler chain. 1818 // Unlink this frame from the handler chain.
1827 __ PopTryHandler(); 1819 __ PopTryHandler();
1828 1820
(...skipping 12 matching lines...) Expand all
1841 Label non_outermost_js_2; 1833 Label non_outermost_js_2;
1842 __ Pop(x10); 1834 __ Pop(x10);
1843 __ Cmp(x10, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 1835 __ Cmp(x10, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1844 __ B(ne, &non_outermost_js_2); 1836 __ B(ne, &non_outermost_js_2);
1845 __ Mov(x11, ExternalReference(js_entry_sp)); 1837 __ Mov(x11, ExternalReference(js_entry_sp));
1846 __ Str(xzr, MemOperand(x11)); 1838 __ Str(xzr, MemOperand(x11));
1847 __ Bind(&non_outermost_js_2); 1839 __ Bind(&non_outermost_js_2);
1848 1840
1849 // Restore the top frame descriptors from the stack. 1841 // Restore the top frame descriptors from the stack.
1850 __ Pop(x10); 1842 __ Pop(x10);
1851 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate)); 1843 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
1852 __ Str(x10, MemOperand(x11)); 1844 __ Str(x10, MemOperand(x11));
1853 1845
1854 // Reset the stack to the callee saved registers. 1846 // Reset the stack to the callee saved registers.
1855 __ Drop(-EntryFrameConstants::kCallerFPOffset, kByteSizeInBytes); 1847 __ Drop(-EntryFrameConstants::kCallerFPOffset, kByteSizeInBytes);
1856 // Restore the callee-saved registers and return. 1848 // Restore the callee-saved registers and return.
1857 ASSERT(jssp.Is(__ StackPointer())); 1849 ASSERT(jssp.Is(__ StackPointer()));
1858 __ Mov(csp, jssp); 1850 __ Mov(csp, jssp);
1859 __ SetStackPointer(csp); 1851 __ SetStackPointer(csp);
1860 __ PopCalleeSavedRegisters(); 1852 __ PopCalleeSavedRegisters();
1861 // After this point, we must not modify jssp because it is a callee-saved 1853 // After this point, we must not modify jssp because it is a callee-saved
1862 // register which we have just restored. 1854 // register which we have just restored.
1863 __ Ret(); 1855 __ Ret();
1864 } 1856 }
1865 1857
1866 1858
1867 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1859 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1868 Label miss; 1860 Label miss;
1869 Register receiver; 1861 Register receiver;
1870 if (kind() == Code::KEYED_LOAD_IC) { 1862 if (kind() == Code::KEYED_LOAD_IC) {
1871 // ----------- S t a t e ------------- 1863 // ----------- S t a t e -------------
1872 // -- lr : return address 1864 // -- lr : return address
1873 // -- x1 : receiver 1865 // -- x1 : receiver
1874 // -- x0 : key 1866 // -- x0 : key
1875 // ----------------------------------- 1867 // -----------------------------------
1876 Register key = x0; 1868 Register key = x0;
1877 receiver = x1; 1869 receiver = x1;
1878 __ Cmp(key, Operand(masm->isolate()->factory()->prototype_string())); 1870 __ Cmp(key, Operand(isolate()->factory()->prototype_string()));
1879 __ B(ne, &miss); 1871 __ B(ne, &miss);
1880 } else { 1872 } else {
1881 ASSERT(kind() == Code::LOAD_IC); 1873 ASSERT(kind() == Code::LOAD_IC);
1882 // ----------- S t a t e ------------- 1874 // ----------- S t a t e -------------
1883 // -- lr : return address 1875 // -- lr : return address
1884 // -- x2 : name 1876 // -- x2 : name
1885 // -- x0 : receiver 1877 // -- x0 : receiver
1886 // -- sp[0] : receiver 1878 // -- sp[0] : receiver
1887 // ----------------------------------- 1879 // -----------------------------------
1888 receiver = x0; 1880 receiver = x0;
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
2026 2018
2027 // Before null, smi and string checks, check that the rhs is a function. 2019 // Before null, smi and string checks, check that the rhs is a function.
2028 // For a non-function rhs, an exception must be thrown. 2020 // For a non-function rhs, an exception must be thrown.
2029 __ JumpIfSmi(function, &slow); 2021 __ JumpIfSmi(function, &slow);
2030 __ JumpIfNotObjectType( 2022 __ JumpIfNotObjectType(
2031 function, scratch1, object_type, JS_FUNCTION_TYPE, &slow); 2023 function, scratch1, object_type, JS_FUNCTION_TYPE, &slow);
2032 2024
2033 __ Mov(result, res_false); 2025 __ Mov(result, res_false);
2034 2026
2035 // Null is not instance of anything. 2027 // Null is not instance of anything.
2036 __ Cmp(object_type, Operand(masm->isolate()->factory()->null_value())); 2028 __ Cmp(object_type, Operand(isolate()->factory()->null_value()));
2037 __ B(ne, &object_not_null); 2029 __ B(ne, &object_not_null);
2038 __ Ret(); 2030 __ Ret();
2039 2031
2040 __ Bind(&object_not_null); 2032 __ Bind(&object_not_null);
2041 // Smi values are not instances of anything. 2033 // Smi values are not instances of anything.
2042 __ JumpIfNotSmi(object, &object_not_null_or_smi); 2034 __ JumpIfNotSmi(object, &object_not_null_or_smi);
2043 __ Ret(); 2035 __ Ret();
2044 2036
2045 __ Bind(&object_not_null_or_smi); 2037 __ Bind(&object_not_null_or_smi);
2046 // String values are not instances of anything. 2038 // String values are not instances of anything.
(...skipping 580 matching lines...) Expand 10 before | Expand all | Expand 10 after
2627 // jssp[40]: previous index 2619 // jssp[40]: previous index
2628 // jssp[48]: subject string 2620 // jssp[48]: subject string
2629 // jssp[56]: JSRegExp object 2621 // jssp[56]: JSRegExp object
2630 2622
2631 const int kLastMatchInfoOffset = 4 * kPointerSize; 2623 const int kLastMatchInfoOffset = 4 * kPointerSize;
2632 const int kPreviousIndexOffset = 5 * kPointerSize; 2624 const int kPreviousIndexOffset = 5 * kPointerSize;
2633 const int kSubjectOffset = 6 * kPointerSize; 2625 const int kSubjectOffset = 6 * kPointerSize;
2634 const int kJSRegExpOffset = 7 * kPointerSize; 2626 const int kJSRegExpOffset = 7 * kPointerSize;
2635 2627
2636 // Ensure that a RegExp stack is allocated. 2628 // Ensure that a RegExp stack is allocated.
2637 Isolate* isolate = masm->isolate();
2638 ExternalReference address_of_regexp_stack_memory_address = 2629 ExternalReference address_of_regexp_stack_memory_address =
2639 ExternalReference::address_of_regexp_stack_memory_address(isolate); 2630 ExternalReference::address_of_regexp_stack_memory_address(isolate());
2640 ExternalReference address_of_regexp_stack_memory_size = 2631 ExternalReference address_of_regexp_stack_memory_size =
2641 ExternalReference::address_of_regexp_stack_memory_size(isolate); 2632 ExternalReference::address_of_regexp_stack_memory_size(isolate());
2642 __ Mov(x10, address_of_regexp_stack_memory_size); 2633 __ Mov(x10, address_of_regexp_stack_memory_size);
2643 __ Ldr(x10, MemOperand(x10)); 2634 __ Ldr(x10, MemOperand(x10));
2644 __ Cbz(x10, &runtime); 2635 __ Cbz(x10, &runtime);
2645 2636
2646 // Check that the first argument is a JSRegExp object. 2637 // Check that the first argument is a JSRegExp object.
2647 ASSERT(jssp.Is(__ StackPointer())); 2638 ASSERT(jssp.Is(__ StackPointer()));
2648 __ Peek(jsregexp_object, kJSRegExpOffset); 2639 __ Peek(jsregexp_object, kJSRegExpOffset);
2649 __ JumpIfSmi(jsregexp_object, &runtime); 2640 __ JumpIfSmi(jsregexp_object, &runtime);
2650 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime); 2641 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime);
2651 2642
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
2793 __ Ldr(code_object, FieldMemOperand(x10, JSRegExp::kDataAsciiCodeOffset)); 2784 __ Ldr(code_object, FieldMemOperand(x10, JSRegExp::kDataAsciiCodeOffset));
2794 2785
2795 // (E) Carry on. String handling is done. 2786 // (E) Carry on. String handling is done.
2796 2787
2797 // Check that the irregexp code has been generated for the actual string 2788 // Check that the irregexp code has been generated for the actual string
2798 // encoding. If it has, the field contains a code object otherwise it contains 2789 // encoding. If it has, the field contains a code object otherwise it contains
2799 // a smi (code flushing support). 2790 // a smi (code flushing support).
2800 __ JumpIfSmi(code_object, &runtime); 2791 __ JumpIfSmi(code_object, &runtime);
2801 2792
2802 // All checks done. Now push arguments for native regexp code. 2793 // All checks done. Now push arguments for native regexp code.
2803 __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, 2794 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1,
2804 x10, 2795 x10,
2805 x11); 2796 x11);
2806 2797
2807 // Isolates: note we add an additional parameter here (isolate pointer). 2798 // Isolates: note we add an additional parameter here (isolate pointer).
2808 __ EnterExitFrame(false, x10, 1); 2799 __ EnterExitFrame(false, x10, 1);
2809 ASSERT(csp.Is(__ StackPointer())); 2800 ASSERT(csp.Is(__ StackPointer()));
2810 2801
2811 // We have 9 arguments to pass to the regexp code, therefore we have to pass 2802 // We have 9 arguments to pass to the regexp code, therefore we have to pass
2812 // one on the stack and the rest as registers. 2803 // one on the stack and the rest as registers.
2813 2804
2814 // Note that the placement of the argument on the stack isn't standard 2805 // Note that the placement of the argument on the stack isn't standard
2815 // AAPCS64: 2806 // AAPCS64:
2816 // csp[0]: Space for the return address placed by DirectCEntryStub. 2807 // csp[0]: Space for the return address placed by DirectCEntryStub.
2817 // csp[8]: Argument 9, the current isolate address. 2808 // csp[8]: Argument 9, the current isolate address.
2818 2809
2819 __ Mov(x10, ExternalReference::isolate_address(isolate)); 2810 __ Mov(x10, ExternalReference::isolate_address(isolate()));
2820 __ Poke(x10, kPointerSize); 2811 __ Poke(x10, kPointerSize);
2821 2812
2822 Register length = w11; 2813 Register length = w11;
2823 Register previous_index_in_bytes = w12; 2814 Register previous_index_in_bytes = w12;
2824 Register start = x13; 2815 Register start = x13;
2825 2816
2826 // Load start of the subject string. 2817 // Load start of the subject string.
2827 __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag); 2818 __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag);
2828 // Load the length from the original subject string from the previous stack 2819 // Load the length from the original subject string from the previous stack
2829 // frame. Therefore we have to use fp, which points exactly to two pointer 2820 // frame. Therefore we have to use fp, which points exactly to two pointer
(...skipping 28 matching lines...) Expand all
2858 // is not sliced). 2849 // is not sliced).
2859 __ Add(w10, previous_index_in_bytes, sliced_string_offset); 2850 __ Add(w10, previous_index_in_bytes, sliced_string_offset);
2860 __ Add(x2, start, Operand(w10, UXTW)); 2851 __ Add(x2, start, Operand(w10, UXTW));
2861 2852
2862 // Argument 4 (x3): 2853 // Argument 4 (x3):
2863 // End of input = start of input + (length of input - previous index) 2854 // End of input = start of input + (length of input - previous index)
2864 __ Sub(w10, length, previous_index_in_bytes); 2855 __ Sub(w10, length, previous_index_in_bytes);
2865 __ Add(x3, x2, Operand(w10, UXTW)); 2856 __ Add(x3, x2, Operand(w10, UXTW));
2866 2857
2867 // Argument 5 (x4): static offsets vector buffer. 2858 // Argument 5 (x4): static offsets vector buffer.
2868 __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate)); 2859 __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate()));
2869 2860
2870 // Argument 6 (x5): Set the number of capture registers to zero to force 2861 // Argument 6 (x5): Set the number of capture registers to zero to force
2871 // global regexps to behave as non-global. This stub is not used for global 2862 // global regexps to behave as non-global. This stub is not used for global
2872 // regexps. 2863 // regexps.
2873 __ Mov(x5, 0); 2864 __ Mov(x5, 0);
2874 2865
2875 // Argument 7 (x6): Start (high end) of backtracking stack memory area. 2866 // Argument 7 (x6): Start (high end) of backtracking stack memory area.
2876 __ Mov(x10, address_of_regexp_stack_memory_address); 2867 __ Mov(x10, address_of_regexp_stack_memory_address);
2877 __ Ldr(x10, MemOperand(x10)); 2868 __ Ldr(x10, MemOperand(x10));
2878 __ Mov(x11, address_of_regexp_stack_memory_size); 2869 __ Mov(x11, address_of_regexp_stack_memory_size);
2879 __ Ldr(x11, MemOperand(x11)); 2870 __ Ldr(x11, MemOperand(x11));
2880 __ Add(x6, x10, x11); 2871 __ Add(x6, x10, x11);
2881 2872
2882 // Argument 8 (x7): Indicate that this is a direct call from JavaScript. 2873 // Argument 8 (x7): Indicate that this is a direct call from JavaScript.
2883 __ Mov(x7, 1); 2874 __ Mov(x7, 1);
2884 2875
2885 // Locate the code entry and call it. 2876 // Locate the code entry and call it.
2886 __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag); 2877 __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag);
2887 DirectCEntryStub stub; 2878 DirectCEntryStub stub(isolate());
2888 stub.GenerateCall(masm, code_object); 2879 stub.GenerateCall(masm, code_object);
2889 2880
2890 __ LeaveExitFrame(false, x10, true); 2881 __ LeaveExitFrame(false, x10, true);
2891 2882
2892 // The generated regexp code returns an int32 in w0. 2883 // The generated regexp code returns an int32 in w0.
2893 Label failure, exception; 2884 Label failure, exception;
2894 __ CompareAndBranch(w0, NativeRegExpMacroAssembler::FAILURE, eq, &failure); 2885 __ CompareAndBranch(w0, NativeRegExpMacroAssembler::FAILURE, eq, &failure);
2895 __ CompareAndBranch(w0, 2886 __ CompareAndBranch(w0,
2896 NativeRegExpMacroAssembler::EXCEPTION, 2887 NativeRegExpMacroAssembler::EXCEPTION,
2897 eq, 2888 eq,
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
2963 kLRHasNotBeenSaved, 2954 kLRHasNotBeenSaved,
2964 kDontSaveFPRegs); 2955 kDontSaveFPRegs);
2965 2956
2966 Register last_match_offsets = x13; 2957 Register last_match_offsets = x13;
2967 Register offsets_vector_index = x14; 2958 Register offsets_vector_index = x14;
2968 Register current_offset = x15; 2959 Register current_offset = x15;
2969 2960
2970 // Get the static offsets vector filled by the native regexp code 2961 // Get the static offsets vector filled by the native regexp code
2971 // and fill the last match info. 2962 // and fill the last match info.
2972 ExternalReference address_of_static_offsets_vector = 2963 ExternalReference address_of_static_offsets_vector =
2973 ExternalReference::address_of_static_offsets_vector(isolate); 2964 ExternalReference::address_of_static_offsets_vector(isolate());
2974 __ Mov(offsets_vector_index, address_of_static_offsets_vector); 2965 __ Mov(offsets_vector_index, address_of_static_offsets_vector);
2975 2966
2976 Label next_capture, done; 2967 Label next_capture, done;
2977 // Capture register counter starts from number of capture registers and 2968 // Capture register counter starts from number of capture registers and
2978 // iterates down to zero (inclusive). 2969 // iterates down to zero (inclusive).
2979 __ Add(last_match_offsets, 2970 __ Add(last_match_offsets,
2980 last_match_info_elements, 2971 last_match_info_elements,
2981 RegExpImpl::kFirstCaptureOffset - kHeapObjectTag); 2972 RegExpImpl::kFirstCaptureOffset - kHeapObjectTag);
2982 __ Bind(&next_capture); 2973 __ Bind(&next_capture);
2983 __ Subs(number_of_capture_registers, number_of_capture_registers, 2); 2974 __ Subs(number_of_capture_registers, number_of_capture_registers, 2);
(...skipping 18 matching lines...) Expand all
3002 __ PopCPURegList(used_callee_saved_registers); 2993 __ PopCPURegList(used_callee_saved_registers);
3003 // Drop the 4 arguments of the stub from the stack. 2994 // Drop the 4 arguments of the stub from the stack.
3004 __ Drop(4); 2995 __ Drop(4);
3005 __ Ret(); 2996 __ Ret();
3006 2997
3007 __ Bind(&exception); 2998 __ Bind(&exception);
3008 Register exception_value = x0; 2999 Register exception_value = x0;
3009 // A stack overflow (on the backtrack stack) may have occured 3000 // A stack overflow (on the backtrack stack) may have occured
3010 // in the RegExp code but no exception has been created yet. 3001 // in the RegExp code but no exception has been created yet.
3011 // If there is no pending exception, handle that in the runtime system. 3002 // If there is no pending exception, handle that in the runtime system.
3012 __ Mov(x10, Operand(isolate->factory()->the_hole_value())); 3003 __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
3013 __ Mov(x11, 3004 __ Mov(x11,
3014 Operand(ExternalReference(Isolate::kPendingExceptionAddress, 3005 Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3015 isolate))); 3006 isolate())));
3016 __ Ldr(exception_value, MemOperand(x11)); 3007 __ Ldr(exception_value, MemOperand(x11));
3017 __ Cmp(x10, exception_value); 3008 __ Cmp(x10, exception_value);
3018 __ B(eq, &runtime); 3009 __ B(eq, &runtime);
3019 3010
3020 __ Str(x10, MemOperand(x11)); // Clear pending exception. 3011 __ Str(x10, MemOperand(x11)); // Clear pending exception.
3021 3012
3022 // Check if the exception is a termination. If so, throw as uncatchable. 3013 // Check if the exception is a termination. If so, throw as uncatchable.
3023 Label termination_exception; 3014 Label termination_exception;
3024 __ JumpIfRoot(exception_value, 3015 __ JumpIfRoot(exception_value,
3025 Heap::kTerminationExceptionRootIndex, 3016 Heap::kTerminationExceptionRootIndex,
3026 &termination_exception); 3017 &termination_exception);
3027 3018
3028 __ Throw(exception_value, x10, x11, x12, x13); 3019 __ Throw(exception_value, x10, x11, x12, x13);
3029 3020
3030 __ Bind(&termination_exception); 3021 __ Bind(&termination_exception);
3031 __ ThrowUncatchable(exception_value, x10, x11, x12, x13); 3022 __ ThrowUncatchable(exception_value, x10, x11, x12, x13);
3032 3023
3033 __ Bind(&failure); 3024 __ Bind(&failure);
3034 __ Mov(x0, Operand(masm->isolate()->factory()->null_value())); 3025 __ Mov(x0, Operand(isolate()->factory()->null_value()));
3035 __ PopCPURegList(used_callee_saved_registers); 3026 __ PopCPURegList(used_callee_saved_registers);
3036 // Drop the 4 arguments of the stub from the stack. 3027 // Drop the 4 arguments of the stub from the stack.
3037 __ Drop(4); 3028 __ Drop(4);
3038 __ Ret(); 3029 __ Ret();
3039 3030
3040 __ Bind(&runtime); 3031 __ Bind(&runtime);
3041 __ PopCPURegList(used_callee_saved_registers); 3032 __ PopCPURegList(used_callee_saved_registers);
3042 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1); 3033 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
3043 3034
3044 // Deferred code for string handling. 3035 // Deferred code for string handling.
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
3154 // Make sure the function is the Array() function 3145 // Make sure the function is the Array() function
3155 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); 3146 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
3156 __ Cmp(function, scratch1); 3147 __ Cmp(function, scratch1);
3157 __ B(ne, &not_array_function); 3148 __ B(ne, &not_array_function);
3158 3149
3159 // The target function is the Array constructor, 3150 // The target function is the Array constructor,
3160 // Create an AllocationSite if we don't already have it, store it in the 3151 // Create an AllocationSite if we don't already have it, store it in the
3161 // slot. 3152 // slot.
3162 { 3153 {
3163 FrameScope scope(masm, StackFrame::INTERNAL); 3154 FrameScope scope(masm, StackFrame::INTERNAL);
3164 CreateAllocationSiteStub create_stub; 3155 CreateAllocationSiteStub create_stub(masm->isolate());
3165 3156
3166 // Arguments register must be smi-tagged to call out. 3157 // Arguments register must be smi-tagged to call out.
3167 __ SmiTag(argc); 3158 __ SmiTag(argc);
3168 __ Push(argc, function, feedback_vector, index); 3159 __ Push(argc, function, feedback_vector, index);
3169 3160
3170 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot 3161 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
3171 // index in x3. 3162 // index in x3.
3172 ASSERT(feedback_vector.Is(x2) && index.Is(x3)); 3163 ASSERT(feedback_vector.Is(x2) && index.Is(x3));
3173 __ CallStub(&create_stub); 3164 __ CallStub(&create_stub);
3174 3165
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
3258 JUMP_FUNCTION, 3249 JUMP_FUNCTION,
3259 NullCallWrapper()); 3250 NullCallWrapper());
3260 3251
3261 if (NeedsChecks()) { 3252 if (NeedsChecks()) {
3262 // Slow-case: Non-function called. 3253 // Slow-case: Non-function called.
3263 __ Bind(&slow); 3254 __ Bind(&slow);
3264 if (RecordCallTarget()) { 3255 if (RecordCallTarget()) {
3265 // If there is a call target cache, mark it megamorphic in the 3256 // If there is a call target cache, mark it megamorphic in the
3266 // non-function case. MegamorphicSentinel is an immortal immovable object 3257 // non-function case. MegamorphicSentinel is an immortal immovable object
3267 // (megamorphic symbol) so no write barrier is needed. 3258 // (megamorphic symbol) so no write barrier is needed.
3268 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), 3259 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
3269 masm->isolate()->heap()->megamorphic_symbol()); 3260 isolate()->heap()->megamorphic_symbol());
3270 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot, 3261 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
3271 kPointerSizeLog2)); 3262 kPointerSizeLog2));
3272 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex); 3263 __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
3273 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize)); 3264 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
3274 } 3265 }
3275 // Check for function proxy. 3266 // Check for function proxy.
3276 // x10 : function type. 3267 // x10 : function type.
3277 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); 3268 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function);
3278 __ Push(function); // put proxy as additional argument 3269 __ Push(function); // put proxy as additional argument
3279 __ Mov(x0, argc_ + 1); 3270 __ Mov(x0, argc_ + 1);
3280 __ Mov(x2, 0); 3271 __ Mov(x2, 0);
3281 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); 3272 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
3282 { 3273 {
3283 Handle<Code> adaptor = 3274 Handle<Code> adaptor =
3284 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3275 isolate()->builtins()->ArgumentsAdaptorTrampoline();
3285 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3276 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3286 } 3277 }
3287 3278
3288 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3279 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3289 // of the original receiver from the call site). 3280 // of the original receiver from the call site).
3290 __ Bind(&non_function); 3281 __ Bind(&non_function);
3291 __ Poke(function, argc_ * kXRegSize); 3282 __ Poke(function, argc_ * kXRegSize);
3292 __ Mov(x0, argc_); // Set up the number of arguments. 3283 __ Mov(x0, argc_); // Set up the number of arguments.
3293 __ Mov(x2, 0); 3284 __ Mov(x2, 0);
3294 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); 3285 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
3295 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3286 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3296 RelocInfo::CODE_TARGET); 3287 RelocInfo::CODE_TARGET);
3297 } 3288 }
3298 3289
3299 if (CallAsMethod()) { 3290 if (CallAsMethod()) {
3300 __ Bind(&wrap); 3291 __ Bind(&wrap);
3301 // Wrap the receiver and patch it back onto the stack. 3292 // Wrap the receiver and patch it back onto the stack.
3302 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 3293 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
3303 __ Push(x1, x3); 3294 __ Push(x1, x3);
3304 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 3295 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3305 __ Pop(x1); 3296 __ Pop(x1);
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
3368 __ B(ne, &non_function_call); 3359 __ B(ne, &non_function_call);
3369 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); 3360 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3370 __ B(&do_call); 3361 __ B(&do_call);
3371 3362
3372 __ Bind(&non_function_call); 3363 __ Bind(&non_function_call);
3373 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3364 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3374 3365
3375 __ Bind(&do_call); 3366 __ Bind(&do_call);
3376 // Set expected number of arguments to zero (not changing x0). 3367 // Set expected number of arguments to zero (not changing x0).
3377 __ Mov(x2, 0); 3368 __ Mov(x2, 0);
3378 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3369 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3379 RelocInfo::CODE_TARGET); 3370 RelocInfo::CODE_TARGET);
3380 } 3371 }
3381 3372
3382 3373
3383 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3374 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3384 // If the receiver is a smi trigger the non-string case. 3375 // If the receiver is a smi trigger the non-string case.
3385 __ JumpIfSmi(object_, receiver_not_string_); 3376 __ JumpIfSmi(object_, receiver_not_string_);
3386 3377
3387 // Fetch the instance type of the receiver into result register. 3378 // Fetch the instance type of the receiver into result register.
3388 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3379 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
3558 3549
3559 __ Bind(&values_in_d_regs); 3550 __ Bind(&values_in_d_regs);
3560 __ Fcmp(lhs_d, rhs_d); 3551 __ Fcmp(lhs_d, rhs_d);
3561 __ B(vs, &unordered); // Overflow flag set if either is NaN. 3552 __ B(vs, &unordered); // Overflow flag set if either is NaN.
3562 STATIC_ASSERT((LESS == -1) && (EQUAL == 0) && (GREATER == 1)); 3553 STATIC_ASSERT((LESS == -1) && (EQUAL == 0) && (GREATER == 1));
3563 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). 3554 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL).
3564 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. 3555 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0.
3565 __ Ret(); 3556 __ Ret();
3566 3557
3567 __ Bind(&unordered); 3558 __ Bind(&unordered);
3568 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, 3559 ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
3569 CompareIC::GENERIC); 3560 CompareIC::GENERIC);
3570 __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); 3561 __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
3571 3562
3572 __ Bind(&maybe_undefined1); 3563 __ Bind(&maybe_undefined1);
3573 if (Token::IsOrderedRelationalCompareOp(op_)) { 3564 if (Token::IsOrderedRelationalCompareOp(op_)) {
3574 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss); 3565 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss);
3575 __ JumpIfSmi(lhs, &unordered); 3566 __ JumpIfSmi(lhs, &unordered);
3576 __ JumpIfNotObjectType(lhs, x10, x10, HEAP_NUMBER_TYPE, &maybe_undefined2); 3567 __ JumpIfNotObjectType(lhs, x10, x10, HEAP_NUMBER_TYPE, &maybe_undefined2);
3577 __ B(&unordered); 3568 __ B(&unordered);
3578 } 3569 }
3579 3570
3580 __ Bind(&maybe_undefined2); 3571 __ Bind(&maybe_undefined2);
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after
3800 // This method handles the case where a compare stub had the wrong 3791 // This method handles the case where a compare stub had the wrong
3801 // implementation. It calls a miss handler, which re-writes the stub. All other 3792 // implementation. It calls a miss handler, which re-writes the stub. All other
3802 // ICCompareStub::Generate* methods should fall back into this one if their 3793 // ICCompareStub::Generate* methods should fall back into this one if their
3803 // operands were not the expected types. 3794 // operands were not the expected types.
3804 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 3795 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
3805 ASM_LOCATION("ICCompareStub[Miss]"); 3796 ASM_LOCATION("ICCompareStub[Miss]");
3806 3797
3807 Register stub_entry = x11; 3798 Register stub_entry = x11;
3808 { 3799 {
3809 ExternalReference miss = 3800 ExternalReference miss =
3810 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 3801 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
3811 3802
3812 FrameScope scope(masm, StackFrame::INTERNAL); 3803 FrameScope scope(masm, StackFrame::INTERNAL);
3813 Register op = x10; 3804 Register op = x10;
3814 Register left = x1; 3805 Register left = x1;
3815 Register right = x0; 3806 Register right = x0;
3816 // Preserve some caller-saved registers. 3807 // Preserve some caller-saved registers.
3817 __ Push(x1, x0, lr); 3808 __ Push(x1, x0, lr);
3818 // Push the arguments. 3809 // Push the arguments.
3819 __ Mov(op, Smi::FromInt(op_)); 3810 __ Mov(op, Smi::FromInt(op_));
3820 __ Push(left, right, op); 3811 __ Push(left, right, op);
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
4114 4105
4115 // Locate first character of result. 4106 // Locate first character of result.
4116 __ Add(result_char0, result_string, 4107 __ Add(result_char0, result_string,
4117 SeqTwoByteString::kHeaderSize - kHeapObjectTag); 4108 SeqTwoByteString::kHeaderSize - kHeapObjectTag);
4118 4109
4119 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); 4110 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
4120 __ Add(result_length, result_length, result_length); 4111 __ Add(result_length, result_length, result_length);
4121 __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong); 4112 __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
4122 4113
4123 __ Bind(&return_x0); 4114 __ Bind(&return_x0);
4124 Counters* counters = masm->isolate()->counters(); 4115 Counters* counters = isolate()->counters();
4125 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4); 4116 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
4126 __ Drop(3); 4117 __ Drop(3);
4127 __ Ret(); 4118 __ Ret();
4128 4119
4129 __ Bind(&runtime); 4120 __ Bind(&runtime);
4130 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1); 4121 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
4131 4122
4132 __ bind(&single_char); 4123 __ bind(&single_char);
4133 // x1: result_length 4124 // x1: result_length
4134 // x10: input_string 4125 // x10: input_string
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
4261 __ Cmp(scratch1, scratch2); 4252 __ Cmp(scratch1, scratch2);
4262 __ B(ne, chars_not_equal); 4253 __ B(ne, chars_not_equal);
4263 __ Add(index, index, 1); 4254 __ Add(index, index, 1);
4264 __ Cbnz(index, &loop); 4255 __ Cbnz(index, &loop);
4265 } 4256 }
4266 4257
4267 4258
4268 void StringCompareStub::Generate(MacroAssembler* masm) { 4259 void StringCompareStub::Generate(MacroAssembler* masm) {
4269 Label runtime; 4260 Label runtime;
4270 4261
4271 Counters* counters = masm->isolate()->counters(); 4262 Counters* counters = isolate()->counters();
4272 4263
4273 // Stack frame on entry. 4264 // Stack frame on entry.
4274 // sp[0]: right string 4265 // sp[0]: right string
4275 // sp[8]: left string 4266 // sp[8]: left string
4276 Register right = x10; 4267 Register right = x10;
4277 Register left = x11; 4268 Register left = x11;
4278 Register result = x0; 4269 Register result = x0;
4279 __ Pop(right, left); 4270 __ Pop(right, left);
4280 4271
4281 Label not_same; 4272 Label not_same;
(...skipping 25 matching lines...) Expand all
4307 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); 4298 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4308 } 4299 }
4309 4300
4310 4301
4311 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 4302 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4312 // ----------- S t a t e ------------- 4303 // ----------- S t a t e -------------
4313 // -- x1 : left 4304 // -- x1 : left
4314 // -- x0 : right 4305 // -- x0 : right
4315 // -- lr : return address 4306 // -- lr : return address
4316 // ----------------------------------- 4307 // -----------------------------------
4317 Isolate* isolate = masm->isolate();
4318 4308
4319 // Load x2 with the allocation site. We stick an undefined dummy value here 4309 // Load x2 with the allocation site. We stick an undefined dummy value here
4320 // and replace it with the real allocation site later when we instantiate this 4310 // and replace it with the real allocation site later when we instantiate this
4321 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). 4311 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
4322 __ LoadObject(x2, handle(isolate->heap()->undefined_value())); 4312 __ LoadObject(x2, handle(isolate()->heap()->undefined_value()));
4323 4313
4324 // Make sure that we actually patched the allocation site. 4314 // Make sure that we actually patched the allocation site.
4325 if (FLAG_debug_code) { 4315 if (FLAG_debug_code) {
4326 __ AssertNotSmi(x2, kExpectedAllocationSite); 4316 __ AssertNotSmi(x2, kExpectedAllocationSite);
4327 __ Ldr(x10, FieldMemOperand(x2, HeapObject::kMapOffset)); 4317 __ Ldr(x10, FieldMemOperand(x2, HeapObject::kMapOffset));
4328 __ AssertRegisterIsRoot(x10, Heap::kAllocationSiteMapRootIndex, 4318 __ AssertRegisterIsRoot(x10, Heap::kAllocationSiteMapRootIndex,
4329 kExpectedAllocationSite); 4319 kExpectedAllocationSite);
4330 } 4320 }
4331 4321
4332 // Tail call into the stub that handles binary operations with allocation 4322 // Tail call into the stub that handles binary operations with allocation
4333 // sites. 4323 // sites.
4334 BinaryOpWithAllocationSiteStub stub(state_); 4324 BinaryOpWithAllocationSiteStub stub(isolate(), state_);
4335 __ TailCallStub(&stub); 4325 __ TailCallStub(&stub);
4336 } 4326 }
4337 4327
4338 4328
4339 bool CodeStub::CanUseFPRegisters() { 4329 bool CodeStub::CanUseFPRegisters() {
4340 // FP registers always available on ARM64. 4330 // FP registers always available on ARM64.
4341 return true; 4331 return true;
4342 } 4332 }
4343 4333
4344 4334
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4385 4375
4386 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 4376 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4387 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4377 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4388 Register address = 4378 Register address =
4389 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address(); 4379 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address();
4390 ASSERT(!address.Is(regs_.object())); 4380 ASSERT(!address.Is(regs_.object()));
4391 ASSERT(!address.Is(x0)); 4381 ASSERT(!address.Is(x0));
4392 __ Mov(address, regs_.address()); 4382 __ Mov(address, regs_.address());
4393 __ Mov(x0, regs_.object()); 4383 __ Mov(x0, regs_.object());
4394 __ Mov(x1, address); 4384 __ Mov(x1, address);
4395 __ Mov(x2, ExternalReference::isolate_address(masm->isolate())); 4385 __ Mov(x2, ExternalReference::isolate_address(isolate()));
4396 4386
4397 AllowExternalCallThatCantCauseGC scope(masm); 4387 AllowExternalCallThatCantCauseGC scope(masm);
4398 ExternalReference function = 4388 ExternalReference function =
4399 ExternalReference::incremental_marking_record_write_function( 4389 ExternalReference::incremental_marking_record_write_function(
4400 masm->isolate()); 4390 isolate());
4401 __ CallCFunction(function, 3, 0); 4391 __ CallCFunction(function, 3, 0);
4402 4392
4403 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4393 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4404 } 4394 }
4405 4395
4406 4396
4407 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4397 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4408 MacroAssembler* masm, 4398 MacroAssembler* masm,
4409 OnNoNeedToInformIncrementalMarker on_no_need, 4399 OnNoNeedToInformIncrementalMarker on_no_need,
4410 Mode mode) { 4400 Mode mode) {
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
4583 4573
4584 __ Bind(&double_elements); 4574 __ Bind(&double_elements);
4585 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); 4575 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset));
4586 __ StoreNumberToDoubleElements(value, index_smi, x10, x11, d0, d1, 4576 __ StoreNumberToDoubleElements(value, index_smi, x10, x11, d0, d1,
4587 &slow_elements); 4577 &slow_elements);
4588 __ Ret(); 4578 __ Ret();
4589 } 4579 }
4590 4580
4591 4581
4592 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 4582 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4593 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); 4583 CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
4594 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); 4584 __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
4595 int parameter_count_offset = 4585 int parameter_count_offset =
4596 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; 4586 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4597 __ Ldr(x1, MemOperand(fp, parameter_count_offset)); 4587 __ Ldr(x1, MemOperand(fp, parameter_count_offset));
4598 if (function_mode_ == JS_FUNCTION_STUB_MODE) { 4588 if (function_mode_ == JS_FUNCTION_STUB_MODE) {
4599 __ Add(x1, x1, 1); 4589 __ Add(x1, x1, 1);
4600 } 4590 }
4601 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4591 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4602 __ Drop(x1); 4592 __ Drop(x1);
4603 // Return to IC Miss stub, continuation still on stack. 4593 // Return to IC Miss stub, continuation still on stack.
4604 __ Ret(); 4594 __ Ret();
4605 } 4595 }
4606 4596
4607 4597
4608 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by 4598 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by
4609 // a "Push lr" instruction, followed by a call. 4599 // a "Push lr" instruction, followed by a call.
4610 static const unsigned int kProfileEntryHookCallSize = 4600 static const unsigned int kProfileEntryHookCallSize =
4611 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); 4601 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize);
4612 4602
4613 4603
4614 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4604 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4615 if (masm->isolate()->function_entry_hook() != NULL) { 4605 if (masm->isolate()->function_entry_hook() != NULL) {
4616 ProfileEntryHookStub stub; 4606 ProfileEntryHookStub stub(masm->isolate());
4617 Assembler::BlockConstPoolScope no_const_pools(masm); 4607 Assembler::BlockConstPoolScope no_const_pools(masm);
4618 Label entry_hook_call_start; 4608 Label entry_hook_call_start;
4619 __ Bind(&entry_hook_call_start); 4609 __ Bind(&entry_hook_call_start);
4620 __ Push(lr); 4610 __ Push(lr);
4621 __ CallStub(&stub); 4611 __ CallStub(&stub);
4622 ASSERT(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) == 4612 ASSERT(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) ==
4623 kProfileEntryHookCallSize); 4613 kProfileEntryHookCallSize);
4624 4614
4625 __ Pop(lr); 4615 __ Pop(lr);
4626 } 4616 }
4627 } 4617 }
4628 4618
4629 4619
4630 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4620 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4631 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); 4621 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
4632 4622
4633 // Save all kCallerSaved registers (including lr), since this can be called 4623 // Save all kCallerSaved registers (including lr), since this can be called
4634 // from anywhere. 4624 // from anywhere.
4635 // TODO(jbramley): What about FP registers? 4625 // TODO(jbramley): What about FP registers?
4636 __ PushCPURegList(kCallerSaved); 4626 __ PushCPURegList(kCallerSaved);
4637 ASSERT(kCallerSaved.IncludesAliasOf(lr)); 4627 ASSERT(kCallerSaved.IncludesAliasOf(lr));
4638 const int kNumSavedRegs = kCallerSaved.Count(); 4628 const int kNumSavedRegs = kCallerSaved.Count();
4639 4629
4640 // Compute the function's address as the first argument. 4630 // Compute the function's address as the first argument.
4641 __ Sub(x0, lr, kProfileEntryHookCallSize); 4631 __ Sub(x0, lr, kProfileEntryHookCallSize);
4642 4632
4643 #if V8_HOST_ARCH_ARM64 4633 #if V8_HOST_ARCH_ARM64
4644 uintptr_t entry_hook = 4634 uintptr_t entry_hook =
4645 reinterpret_cast<uintptr_t>(masm->isolate()->function_entry_hook()); 4635 reinterpret_cast<uintptr_t>(isolate()->function_entry_hook());
4646 __ Mov(x10, entry_hook); 4636 __ Mov(x10, entry_hook);
4647 #else 4637 #else
4648 // Under the simulator we need to indirect the entry hook through a trampoline 4638 // Under the simulator we need to indirect the entry hook through a trampoline
4649 // function at a known address. 4639 // function at a known address.
4650 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline)); 4640 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4651 __ Mov(x10, Operand(ExternalReference(&dispatcher, 4641 __ Mov(x10, Operand(ExternalReference(&dispatcher,
4652 ExternalReference::BUILTIN_CALL, 4642 ExternalReference::BUILTIN_CALL,
4653 masm->isolate()))); 4643 isolate())));
4654 // It additionally takes an isolate as a third parameter 4644 // It additionally takes an isolate as a third parameter
4655 __ Mov(x2, ExternalReference::isolate_address(masm->isolate())); 4645 __ Mov(x2, ExternalReference::isolate_address(isolate()));
4656 #endif 4646 #endif
4657 4647
4658 // The caller's return address is above the saved temporaries. 4648 // The caller's return address is above the saved temporaries.
4659 // Grab its location for the second argument to the hook. 4649 // Grab its location for the second argument to the hook.
4660 __ Add(x1, __ StackPointer(), kNumSavedRegs * kPointerSize); 4650 __ Add(x1, __ StackPointer(), kNumSavedRegs * kPointerSize);
4661 4651
4662 { 4652 {
4663 // Create a dummy frame, as CallCFunction requires this. 4653 // Create a dummy frame, as CallCFunction requires this.
4664 FrameScope frame(masm, StackFrame::MANUAL); 4654 FrameScope frame(masm, StackFrame::MANUAL);
4665 __ CallCFunction(x10, 2, 0); 4655 __ CallCFunction(x10, 2, 0);
(...skipping 24 matching lines...) Expand all
4690 __ SetStackPointer(old_stack_pointer); 4680 __ SetStackPointer(old_stack_pointer);
4691 } 4681 }
4692 4682
4693 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, 4683 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
4694 Register target) { 4684 Register target) {
4695 // Make sure the caller configured the stack pointer (see comment in 4685 // Make sure the caller configured the stack pointer (see comment in
4696 // DirectCEntryStub::Generate). 4686 // DirectCEntryStub::Generate).
4697 ASSERT(csp.Is(__ StackPointer())); 4687 ASSERT(csp.Is(__ StackPointer()));
4698 4688
4699 intptr_t code = 4689 intptr_t code =
4700 reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); 4690 reinterpret_cast<intptr_t>(GetCode(isolate()).location());
4701 __ Mov(lr, Operand(code, RelocInfo::CODE_TARGET)); 4691 __ Mov(lr, Operand(code, RelocInfo::CODE_TARGET));
4702 __ Mov(x10, target); 4692 __ Mov(x10, target);
4703 // Branch to the stub. 4693 // Branch to the stub.
4704 __ Blr(lr); 4694 __ Blr(lr);
4705 } 4695 }
4706 4696
4707 4697
4708 // Probe the name dictionary in the 'elements' register. 4698 // Probe the name dictionary in the 'elements' register.
4709 // Jump to the 'done' label if a property with the given name is found. 4699 // Jump to the 'done' label if a property with the given name is found.
4710 // Jump to the 'miss' label otherwise. 4700 // Jump to the 'miss' label otherwise.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
4769 if (name.is(x0)) { 4759 if (name.is(x0)) {
4770 ASSERT(!elements.is(x1)); 4760 ASSERT(!elements.is(x1));
4771 __ Mov(x1, name); 4761 __ Mov(x1, name);
4772 __ Mov(x0, elements); 4762 __ Mov(x0, elements);
4773 } else { 4763 } else {
4774 __ Mov(x0, elements); 4764 __ Mov(x0, elements);
4775 __ Mov(x1, name); 4765 __ Mov(x1, name);
4776 } 4766 }
4777 4767
4778 Label not_found; 4768 Label not_found;
4779 NameDictionaryLookupStub stub(POSITIVE_LOOKUP); 4769 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
4780 __ CallStub(&stub); 4770 __ CallStub(&stub);
4781 __ Cbz(x0, &not_found); 4771 __ Cbz(x0, &not_found);
4782 __ Mov(scratch2, x2); // Move entry index into scratch2. 4772 __ Mov(scratch2, x2); // Move entry index into scratch2.
4783 __ PopCPURegList(spill_list); 4773 __ PopCPURegList(spill_list);
4784 __ B(done); 4774 __ B(done);
4785 4775
4786 __ Bind(&not_found); 4776 __ Bind(&not_found);
4787 __ PopCPURegList(spill_list); 4777 __ PopCPURegList(spill_list);
4788 __ B(miss); 4778 __ B(miss);
4789 } 4779 }
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
4840 } 4830 }
4841 4831
4842 CPURegList spill_list(CPURegister::kRegister, kXRegSizeInBits, 0, 6); 4832 CPURegList spill_list(CPURegister::kRegister, kXRegSizeInBits, 0, 6);
4843 spill_list.Combine(lr); 4833 spill_list.Combine(lr);
4844 spill_list.Remove(scratch0); // Scratch registers don't need to be preserved. 4834 spill_list.Remove(scratch0); // Scratch registers don't need to be preserved.
4845 4835
4846 __ PushCPURegList(spill_list); 4836 __ PushCPURegList(spill_list);
4847 4837
4848 __ Ldr(x0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 4838 __ Ldr(x0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
4849 __ Mov(x1, Operand(name)); 4839 __ Mov(x1, Operand(name));
4850 NameDictionaryLookupStub stub(NEGATIVE_LOOKUP); 4840 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
4851 __ CallStub(&stub); 4841 __ CallStub(&stub);
4852 // Move stub return value to scratch0. Note that scratch0 is not included in 4842 // Move stub return value to scratch0. Note that scratch0 is not included in
4853 // spill_list and won't be clobbered by PopCPURegList. 4843 // spill_list and won't be clobbered by PopCPURegList.
4854 __ Mov(scratch0, x0); 4844 __ Mov(scratch0, x0);
4855 __ PopCPURegList(spill_list); 4845 __ PopCPURegList(spill_list);
4856 4846
4857 __ Cbz(scratch0, done); 4847 __ Cbz(scratch0, done);
4858 __ B(miss); 4848 __ B(miss);
4859 } 4849 }
4860 4850
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
4943 __ Mov(result, 0); 4933 __ Mov(result, 0);
4944 __ Ret(); 4934 __ Ret();
4945 } 4935 }
4946 4936
4947 4937
4948 template<class T> 4938 template<class T>
4949 static void CreateArrayDispatch(MacroAssembler* masm, 4939 static void CreateArrayDispatch(MacroAssembler* masm,
4950 AllocationSiteOverrideMode mode) { 4940 AllocationSiteOverrideMode mode) {
4951 ASM_LOCATION("CreateArrayDispatch"); 4941 ASM_LOCATION("CreateArrayDispatch");
4952 if (mode == DISABLE_ALLOCATION_SITES) { 4942 if (mode == DISABLE_ALLOCATION_SITES) {
4953 T stub(GetInitialFastElementsKind(), mode); 4943 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4954 __ TailCallStub(&stub); 4944 __ TailCallStub(&stub);
4955 4945
4956 } else if (mode == DONT_OVERRIDE) { 4946 } else if (mode == DONT_OVERRIDE) {
4957 Register kind = x3; 4947 Register kind = x3;
4958 int last_index = 4948 int last_index =
4959 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 4949 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
4960 for (int i = 0; i <= last_index; ++i) { 4950 for (int i = 0; i <= last_index; ++i) {
4961 Label next; 4951 Label next;
4962 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); 4952 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
4963 // TODO(jbramley): Is this the best way to handle this? Can we make the 4953 // TODO(jbramley): Is this the best way to handle this? Can we make the
4964 // tail calls conditional, rather than hopping over each one? 4954 // tail calls conditional, rather than hopping over each one?
4965 __ CompareAndBranch(kind, candidate_kind, ne, &next); 4955 __ CompareAndBranch(kind, candidate_kind, ne, &next);
4966 T stub(candidate_kind); 4956 T stub(masm->isolate(), candidate_kind);
4967 __ TailCallStub(&stub); 4957 __ TailCallStub(&stub);
4968 __ Bind(&next); 4958 __ Bind(&next);
4969 } 4959 }
4970 4960
4971 // If we reached this point there is a problem. 4961 // If we reached this point there is a problem.
4972 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4962 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4973 4963
4974 } else { 4964 } else {
4975 UNREACHABLE(); 4965 UNREACHABLE();
4976 } 4966 }
(...skipping 29 matching lines...) Expand all
5006 4996
5007 // Look at the last argument. 4997 // Look at the last argument.
5008 // TODO(jbramley): What does a 0 argument represent? 4998 // TODO(jbramley): What does a 0 argument represent?
5009 __ Peek(x10, 0); 4999 __ Peek(x10, 0);
5010 __ Cbz(x10, &normal_sequence); 5000 __ Cbz(x10, &normal_sequence);
5011 5001
5012 if (mode == DISABLE_ALLOCATION_SITES) { 5002 if (mode == DISABLE_ALLOCATION_SITES) {
5013 ElementsKind initial = GetInitialFastElementsKind(); 5003 ElementsKind initial = GetInitialFastElementsKind();
5014 ElementsKind holey_initial = GetHoleyElementsKind(initial); 5004 ElementsKind holey_initial = GetHoleyElementsKind(initial);
5015 5005
5016 ArraySingleArgumentConstructorStub stub_holey(holey_initial, 5006 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
5007 holey_initial,
5017 DISABLE_ALLOCATION_SITES); 5008 DISABLE_ALLOCATION_SITES);
5018 __ TailCallStub(&stub_holey); 5009 __ TailCallStub(&stub_holey);
5019 5010
5020 __ Bind(&normal_sequence); 5011 __ Bind(&normal_sequence);
5021 ArraySingleArgumentConstructorStub stub(initial, 5012 ArraySingleArgumentConstructorStub stub(masm->isolate(),
5013 initial,
5022 DISABLE_ALLOCATION_SITES); 5014 DISABLE_ALLOCATION_SITES);
5023 __ TailCallStub(&stub); 5015 __ TailCallStub(&stub);
5024 } else if (mode == DONT_OVERRIDE) { 5016 } else if (mode == DONT_OVERRIDE) {
5025 // We are going to create a holey array, but our kind is non-holey. 5017 // We are going to create a holey array, but our kind is non-holey.
5026 // Fix kind and retry (only if we have an allocation site in the slot). 5018 // Fix kind and retry (only if we have an allocation site in the slot).
5027 __ Orr(kind, kind, 1); 5019 __ Orr(kind, kind, 1);
5028 5020
5029 if (FLAG_debug_code) { 5021 if (FLAG_debug_code) {
5030 __ Ldr(x10, FieldMemOperand(allocation_site, 0)); 5022 __ Ldr(x10, FieldMemOperand(allocation_site, 0));
5031 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, 5023 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex,
(...skipping 11 matching lines...) Expand all
5043 __ Str(x11, FieldMemOperand(allocation_site, 5035 __ Str(x11, FieldMemOperand(allocation_site,
5044 AllocationSite::kTransitionInfoOffset)); 5036 AllocationSite::kTransitionInfoOffset));
5045 5037
5046 __ Bind(&normal_sequence); 5038 __ Bind(&normal_sequence);
5047 int last_index = 5039 int last_index =
5048 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 5040 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
5049 for (int i = 0; i <= last_index; ++i) { 5041 for (int i = 0; i <= last_index; ++i) {
5050 Label next; 5042 Label next;
5051 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); 5043 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
5052 __ CompareAndBranch(kind, candidate_kind, ne, &next); 5044 __ CompareAndBranch(kind, candidate_kind, ne, &next);
5053 ArraySingleArgumentConstructorStub stub(candidate_kind); 5045 ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind);
5054 __ TailCallStub(&stub); 5046 __ TailCallStub(&stub);
5055 __ Bind(&next); 5047 __ Bind(&next);
5056 } 5048 }
5057 5049
5058 // If we reached this point there is a problem. 5050 // If we reached this point there is a problem.
5059 __ Abort(kUnexpectedElementsKindInArrayConstructor); 5051 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5060 } else { 5052 } else {
5061 UNREACHABLE(); 5053 UNREACHABLE();
5062 } 5054 }
5063 } 5055 }
5064 5056
5065 5057
5066 template<class T> 5058 template<class T>
5067 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 5059 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5068 int to_index = GetSequenceIndexFromFastElementsKind( 5060 int to_index = GetSequenceIndexFromFastElementsKind(
5069 TERMINAL_FAST_ELEMENTS_KIND); 5061 TERMINAL_FAST_ELEMENTS_KIND);
5070 for (int i = 0; i <= to_index; ++i) { 5062 for (int i = 0; i <= to_index; ++i) {
5071 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 5063 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5072 T stub(kind); 5064 T stub(isolate, kind);
5073 stub.GetCode(isolate); 5065 stub.GetCode(isolate);
5074 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 5066 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
5075 T stub1(kind, DISABLE_ALLOCATION_SITES); 5067 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
5076 stub1.GetCode(isolate); 5068 stub1.GetCode(isolate);
5077 } 5069 }
5078 } 5070 }
5079 } 5071 }
5080 5072
5081 5073
5082 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 5074 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
5083 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 5075 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5084 isolate); 5076 isolate);
5085 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 5077 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5086 isolate); 5078 isolate);
5087 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( 5079 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5088 isolate); 5080 isolate);
5089 } 5081 }
5090 5082
5091 5083
5092 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( 5084 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
5093 Isolate* isolate) { 5085 Isolate* isolate) {
5094 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; 5086 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
5095 for (int i = 0; i < 2; i++) { 5087 for (int i = 0; i < 2; i++) {
5096 // For internal arrays we only need a few things 5088 // For internal arrays we only need a few things
5097 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); 5089 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
5098 stubh1.GetCode(isolate); 5090 stubh1.GetCode(isolate);
5099 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); 5091 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
5100 stubh2.GetCode(isolate); 5092 stubh2.GetCode(isolate);
5101 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); 5093 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
5102 stubh3.GetCode(isolate); 5094 stubh3.GetCode(isolate);
5103 } 5095 }
5104 } 5096 }
5105 5097
5106 5098
5107 void ArrayConstructorStub::GenerateDispatchToArrayStub( 5099 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5108 MacroAssembler* masm, 5100 MacroAssembler* masm,
5109 AllocationSiteOverrideMode mode) { 5101 AllocationSiteOverrideMode mode) {
5110 Register argc = x0; 5102 Register argc = x0;
5111 if (argument_count_ == ANY) { 5103 if (argument_count_ == ANY) {
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
5195 5187
5196 // One argument. 5188 // One argument.
5197 if (IsFastPackedElementsKind(kind)) { 5189 if (IsFastPackedElementsKind(kind)) {
5198 Label packed_case; 5190 Label packed_case;
5199 5191
5200 // We might need to create a holey array; look at the first argument. 5192 // We might need to create a holey array; look at the first argument.
5201 __ Peek(x10, 0); 5193 __ Peek(x10, 0);
5202 __ Cbz(x10, &packed_case); 5194 __ Cbz(x10, &packed_case);
5203 5195
5204 InternalArraySingleArgumentConstructorStub 5196 InternalArraySingleArgumentConstructorStub
5205 stub1_holey(GetHoleyElementsKind(kind)); 5197 stub1_holey(isolate(), GetHoleyElementsKind(kind));
5206 __ TailCallStub(&stub1_holey); 5198 __ TailCallStub(&stub1_holey);
5207 5199
5208 __ Bind(&packed_case); 5200 __ Bind(&packed_case);
5209 } 5201 }
5210 InternalArraySingleArgumentConstructorStub stub1(kind); 5202 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
5211 __ TailCallStub(&stub1); 5203 __ TailCallStub(&stub1);
5212 5204
5213 __ Bind(&zero_case); 5205 __ Bind(&zero_case);
5214 // No arguments. 5206 // No arguments.
5215 InternalArrayNoArgumentConstructorStub stub0(kind); 5207 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
5216 __ TailCallStub(&stub0); 5208 __ TailCallStub(&stub0);
5217 5209
5218 __ Bind(&n_case); 5210 __ Bind(&n_case);
5219 // N arguments. 5211 // N arguments.
5220 InternalArrayNArgumentsConstructorStub stubN(kind); 5212 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
5221 __ TailCallStub(&stubN); 5213 __ TailCallStub(&stubN);
5222 } 5214 }
5223 5215
5224 5216
5225 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 5217 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5226 // ----------- S t a t e ------------- 5218 // ----------- S t a t e -------------
5227 // -- x0 : argc 5219 // -- x0 : argc
5228 // -- x1 : constructor 5220 // -- x1 : constructor
5229 // -- sp[0] : return address 5221 // -- sp[0] : return address
5230 // -- sp[4] : last argument 5222 // -- sp[4] : last argument
5231 // ----------------------------------- 5223 // -----------------------------------
5232 Handle<Object> undefined_sentinel(
5233 masm->isolate()->heap()->undefined_value(), masm->isolate());
5234 5224
5235 Register constructor = x1; 5225 Register constructor = x1;
5236 5226
5237 if (FLAG_debug_code) { 5227 if (FLAG_debug_code) {
5238 // The array construct code is only set for the global and natives 5228 // The array construct code is only set for the global and natives
5239 // builtin Array functions which always have maps. 5229 // builtin Array functions which always have maps.
5240 5230
5241 Label unexpected_map, map_ok; 5231 Label unexpected_map, map_ok;
5242 // Initial map for the builtin Array function should be a map. 5232 // Initial map for the builtin Array function should be a map.
5243 __ Ldr(x10, FieldMemOperand(constructor, 5233 __ Ldr(x10, FieldMemOperand(constructor,
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
5302 5292
5303 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 5293 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5304 STATIC_ASSERT(FCA::kCalleeIndex == 5); 5294 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5305 STATIC_ASSERT(FCA::kDataIndex == 4); 5295 STATIC_ASSERT(FCA::kDataIndex == 4);
5306 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 5296 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5307 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 5297 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5308 STATIC_ASSERT(FCA::kIsolateIndex == 1); 5298 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5309 STATIC_ASSERT(FCA::kHolderIndex == 0); 5299 STATIC_ASSERT(FCA::kHolderIndex == 0);
5310 STATIC_ASSERT(FCA::kArgsLength == 7); 5300 STATIC_ASSERT(FCA::kArgsLength == 7);
5311 5301
5312 Isolate* isolate = masm->isolate();
5313
5314 // FunctionCallbackArguments: context, callee and call data. 5302 // FunctionCallbackArguments: context, callee and call data.
5315 __ Push(context, callee, call_data); 5303 __ Push(context, callee, call_data);
5316 5304
5317 // Load context from callee 5305 // Load context from callee
5318 __ Ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset)); 5306 __ Ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5319 5307
5320 if (!call_data_undefined) { 5308 if (!call_data_undefined) {
5321 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); 5309 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
5322 } 5310 }
5323 Register isolate_reg = x5; 5311 Register isolate_reg = x5;
5324 __ Mov(isolate_reg, ExternalReference::isolate_address(isolate)); 5312 __ Mov(isolate_reg, ExternalReference::isolate_address(isolate()));
5325 5313
5326 // FunctionCallbackArguments: 5314 // FunctionCallbackArguments:
5327 // return value, return value default, isolate, holder. 5315 // return value, return value default, isolate, holder.
5328 __ Push(call_data, call_data, isolate_reg, holder); 5316 __ Push(call_data, call_data, isolate_reg, holder);
5329 5317
5330 // Prepare arguments. 5318 // Prepare arguments.
5331 Register args = x6; 5319 Register args = x6;
5332 __ Mov(args, masm->StackPointer()); 5320 __ Mov(args, masm->StackPointer());
5333 5321
5334 // Allocate the v8::Arguments structure in the arguments' space, since it's 5322 // Allocate the v8::Arguments structure in the arguments' space, since it's
(...skipping 17 matching lines...) Expand all
5352 // FunctionCallbackInfo::length_ = argc and 5340 // FunctionCallbackInfo::length_ = argc and
5353 // FunctionCallbackInfo::is_construct_call = 0 5341 // FunctionCallbackInfo::is_construct_call = 0
5354 __ Mov(x10, argc); 5342 __ Mov(x10, argc);
5355 __ Stp(x10, xzr, MemOperand(x0, 2 * kPointerSize)); 5343 __ Stp(x10, xzr, MemOperand(x0, 2 * kPointerSize));
5356 5344
5357 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; 5345 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
5358 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); 5346 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5359 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; 5347 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
5360 ApiFunction thunk_fun(thunk_address); 5348 ApiFunction thunk_fun(thunk_address);
5361 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, 5349 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5362 masm->isolate()); 5350 isolate());
5363 5351
5364 AllowExternalCallThatCantCauseGC scope(masm); 5352 AllowExternalCallThatCantCauseGC scope(masm);
5365 MemOperand context_restore_operand( 5353 MemOperand context_restore_operand(
5366 fp, (2 + FCA::kContextSaveIndex) * kPointerSize); 5354 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5367 // Stores return the first js argument 5355 // Stores return the first js argument
5368 int return_value_offset = 0; 5356 int return_value_offset = 0;
5369 if (is_store) { 5357 if (is_store) {
5370 return_value_offset = 2 + FCA::kArgsLength; 5358 return_value_offset = 2 + FCA::kArgsLength;
5371 } else { 5359 } else {
5372 return_value_offset = 2 + FCA::kReturnValueOffset; 5360 return_value_offset = 2 + FCA::kReturnValueOffset;
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
5410 __ Poke(x1, 1 * kPointerSize); 5398 __ Poke(x1, 1 * kPointerSize);
5411 __ Add(x1, masm->StackPointer(), 1 * kPointerSize); // x1 = AccessorInfo& 5399 __ Add(x1, masm->StackPointer(), 1 * kPointerSize); // x1 = AccessorInfo&
5412 5400
5413 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; 5401 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5414 5402
5415 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); 5403 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
5416 ExternalReference::Type thunk_type = 5404 ExternalReference::Type thunk_type =
5417 ExternalReference::PROFILING_GETTER_CALL; 5405 ExternalReference::PROFILING_GETTER_CALL;
5418 ApiFunction thunk_fun(thunk_address); 5406 ApiFunction thunk_fun(thunk_address);
5419 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, 5407 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5420 masm->isolate()); 5408 isolate());
5421 5409
5422 const int spill_offset = 1 + kApiStackSpace; 5410 const int spill_offset = 1 + kApiStackSpace;
5423 __ CallApiFunctionAndReturn(api_function_address, 5411 __ CallApiFunctionAndReturn(api_function_address,
5424 thunk_ref, 5412 thunk_ref,
5425 kStackUnwindSpace, 5413 kStackUnwindSpace,
5426 spill_offset, 5414 spill_offset,
5427 MemOperand(fp, 6 * kPointerSize), 5415 MemOperand(fp, 6 * kPointerSize),
5428 NULL); 5416 NULL);
5429 } 5417 }
5430 5418
5431 5419
5432 #undef __ 5420 #undef __
5433 5421
5434 } } // namespace v8::internal 5422 } } // namespace v8::internal
5435 5423
5436 #endif // V8_TARGET_ARCH_ARM64 5424 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/code-stubs-arm64.h ('k') | src/arm64/debug-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698