Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(471)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 901083004: Contribution of PowerPC port (continuation of 422063005) - PPC dir update (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Contribution of PowerPC port (continuation of 422063005) - PPC dir update -comments and rebase Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ppc/code-stubs-ppc.h ('k') | src/ppc/codegen-ppc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_PPC 7 #if V8_TARGET_ARCH_PPC
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 150
151 // Do fast-path convert from double to int. 151 // Do fast-path convert from double to int.
152 __ ConvertDoubleToInt64(double_scratch, 152 __ ConvertDoubleToInt64(double_scratch,
153 #if !V8_TARGET_ARCH_PPC64 153 #if !V8_TARGET_ARCH_PPC64
154 scratch, 154 scratch,
155 #endif 155 #endif
156 result_reg, d0); 156 result_reg, d0);
157 157
158 // Test for overflow 158 // Test for overflow
159 #if V8_TARGET_ARCH_PPC64 159 #if V8_TARGET_ARCH_PPC64
160 __ TestIfInt32(result_reg, scratch, r0); 160 __ TestIfInt32(result_reg, r0);
161 #else 161 #else
162 __ TestIfInt32(scratch, result_reg, r0); 162 __ TestIfInt32(scratch, result_reg, r0);
163 #endif 163 #endif
164 __ beq(&fastpath_done); 164 __ beq(&fastpath_done);
165 } 165 }
166 166
167 __ Push(scratch_high, scratch_low); 167 __ Push(scratch_high, scratch_low);
168 // Account for saved regs if input is sp. 168 // Account for saved regs if input is sp.
169 if (input_reg.is(sp)) double_offset += 2 * kPointerSize; 169 if (input_reg.is(sp)) double_offset += 2 * kPointerSize;
170 170
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after
321 __ slwi(r5, r5, Operand(HeapNumber::kNonMantissaBitsInTopWord)); 321 __ slwi(r5, r5, Operand(HeapNumber::kNonMantissaBitsInTopWord));
322 // Or with all low-bits of mantissa. 322 // Or with all low-bits of mantissa.
323 __ lwz(r6, FieldMemOperand(r3, HeapNumber::kMantissaOffset)); 323 __ lwz(r6, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
324 __ orx(r3, r6, r5); 324 __ orx(r3, r6, r5);
325 __ cmpi(r3, Operand::Zero()); 325 __ cmpi(r3, Operand::Zero());
326 // For equal we already have the right value in r3: Return zero (equal) 326 // For equal we already have the right value in r3: Return zero (equal)
327 // if all bits in mantissa are zero (it's an Infinity) and non-zero if 327 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
328 // not (it's a NaN). For <= and >= we need to load r0 with the failing 328 // not (it's a NaN). For <= and >= we need to load r0 with the failing
329 // value if it's a NaN. 329 // value if it's a NaN.
330 if (cond != eq) { 330 if (cond != eq) {
331 Label not_equal; 331 if (CpuFeatures::IsSupported(ISELECT)) {
332 __ bne(&not_equal); 332 __ li(r4, Operand((cond == le) ? GREATER : LESS));
333 // All-zero means Infinity means equal. 333 __ isel(eq, r3, r3, r4);
334 __ Ret();
335 __ bind(&not_equal);
336 if (cond == le) {
337 __ li(r3, Operand(GREATER)); // NaN <= NaN should fail.
338 } else { 334 } else {
339 __ li(r3, Operand(LESS)); // NaN >= NaN should fail. 335 Label not_equal;
336 __ bne(&not_equal);
337 // All-zero means Infinity means equal.
338 __ Ret();
339 __ bind(&not_equal);
340 if (cond == le) {
341 __ li(r3, Operand(GREATER)); // NaN <= NaN should fail.
342 } else {
343 __ li(r3, Operand(LESS)); // NaN >= NaN should fail.
344 }
340 } 345 }
341 } 346 }
342 __ Ret(); 347 __ Ret();
343 } 348 }
344 // No fall through here. 349 // No fall through here.
345 350
346 __ bind(&not_identical); 351 __ bind(&not_identical);
347 } 352 }
348 353
349 354
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 // NOTICE! This code is only reached after a smi-fast-case check, so 569 // NOTICE! This code is only reached after a smi-fast-case check, so
565 // it is certain that at least one operand isn't a smi. 570 // it is certain that at least one operand isn't a smi.
566 571
567 // Handle the case where the objects are identical. Either returns the answer 572 // Handle the case where the objects are identical. Either returns the answer
568 // or goes to slow. Only falls through if the objects were not identical. 573 // or goes to slow. Only falls through if the objects were not identical.
569 EmitIdenticalObjectComparison(masm, &slow, cc); 574 EmitIdenticalObjectComparison(masm, &slow, cc);
570 575
571 // If either is a Smi (we know that not both are), then they can only 576 // If either is a Smi (we know that not both are), then they can only
572 // be strictly equal if the other is a HeapNumber. 577 // be strictly equal if the other is a HeapNumber.
573 STATIC_ASSERT(kSmiTag == 0); 578 STATIC_ASSERT(kSmiTag == 0);
574 DCHECK_EQ(0, Smi::FromInt(0)); 579 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
575 __ and_(r5, lhs, rhs); 580 __ and_(r5, lhs, rhs);
576 __ JumpIfNotSmi(r5, &not_smis); 581 __ JumpIfNotSmi(r5, &not_smis);
577 // One operand is a smi. EmitSmiNonsmiComparison generates code that can: 582 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
578 // 1) Return the answer. 583 // 1) Return the answer.
579 // 2) Go to slow. 584 // 2) Go to slow.
580 // 3) Fall through to both_loaded_as_doubles. 585 // 3) Fall through to both_loaded_as_doubles.
581 // 4) Jump to lhs_not_nan. 586 // 4) Jump to lhs_not_nan.
582 // In cases 3 and 4 we have found out we were dealing with a number-number 587 // In cases 3 and 4 we have found out we were dealing with a number-number
583 // comparison. The double values of the numbers have been loaded 588 // comparison. The double values of the numbers have been loaded
584 // into d7 and d6. 589 // into d7 and d6.
585 EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict()); 590 EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
586 591
587 __ bind(&both_loaded_as_doubles); 592 __ bind(&both_loaded_as_doubles);
588 // The arguments have been converted to doubles and stored in d6 and d7 593 // The arguments have been converted to doubles and stored in d6 and d7
589 __ bind(&lhs_not_nan); 594 __ bind(&lhs_not_nan);
590 Label no_nan; 595 Label no_nan;
591 __ fcmpu(d7, d6); 596 __ fcmpu(d7, d6);
592 597
593 Label nan, equal, less_than; 598 Label nan, equal, less_than;
594 __ bunordered(&nan); 599 __ bunordered(&nan);
595 __ beq(&equal); 600 if (CpuFeatures::IsSupported(ISELECT)) {
596 __ blt(&less_than); 601 DCHECK(EQUAL == 0);
597 __ li(r3, Operand(GREATER)); 602 __ li(r4, Operand(GREATER));
598 __ Ret(); 603 __ li(r5, Operand(LESS));
599 __ bind(&equal); 604 __ isel(eq, r3, r0, r4);
600 __ li(r3, Operand(EQUAL)); 605 __ isel(lt, r3, r5, r3);
601 __ Ret(); 606 __ Ret();
602 __ bind(&less_than); 607 } else {
603 __ li(r3, Operand(LESS)); 608 __ beq(&equal);
604 __ Ret(); 609 __ blt(&less_than);
610 __ li(r3, Operand(GREATER));
611 __ Ret();
612 __ bind(&equal);
613 __ li(r3, Operand(EQUAL));
614 __ Ret();
615 __ bind(&less_than);
616 __ li(r3, Operand(LESS));
617 __ Ret();
618 }
605 619
606 __ bind(&nan); 620 __ bind(&nan);
607 // If one of the sides was a NaN then the v flag is set. Load r3 with 621 // If one of the sides was a NaN then the v flag is set. Load r3 with
608 // whatever it takes to make the comparison fail, since comparisons with NaN 622 // whatever it takes to make the comparison fail, since comparisons with NaN
609 // always fail. 623 // always fail.
610 if (cc == lt || cc == le) { 624 if (cc == lt || cc == le) {
611 __ li(r3, Operand(GREATER)); 625 __ li(r3, Operand(GREATER));
612 } else { 626 } else {
613 __ li(r3, Operand(LESS)); 627 __ li(r3, Operand(LESS));
614 } 628 }
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
855 __ mr(scratch, exponent); 869 __ mr(scratch, exponent);
856 } else { 870 } else {
857 // Exponent has previously been stored into scratch as untagged integer. 871 // Exponent has previously been stored into scratch as untagged integer.
858 __ mr(exponent, scratch); 872 __ mr(exponent, scratch);
859 } 873 }
860 __ fmr(double_scratch, double_base); // Back up base. 874 __ fmr(double_scratch, double_base); // Back up base.
861 __ li(scratch2, Operand(1)); 875 __ li(scratch2, Operand(1));
862 __ ConvertIntToDouble(scratch2, double_result); 876 __ ConvertIntToDouble(scratch2, double_result);
863 877
864 // Get absolute value of exponent. 878 // Get absolute value of exponent.
865 Label positive_exponent;
866 __ cmpi(scratch, Operand::Zero()); 879 __ cmpi(scratch, Operand::Zero());
867 __ bge(&positive_exponent); 880 if (CpuFeatures::IsSupported(ISELECT)) {
868 __ neg(scratch, scratch); 881 __ neg(scratch2, scratch);
869 __ bind(&positive_exponent); 882 __ isel(lt, scratch, scratch2, scratch);
883 } else {
884 Label positive_exponent;
885 __ bge(&positive_exponent);
886 __ neg(scratch, scratch);
887 __ bind(&positive_exponent);
888 }
870 889
871 Label while_true, no_carry, loop_end; 890 Label while_true, no_carry, loop_end;
872 __ bind(&while_true); 891 __ bind(&while_true);
873 __ andi(scratch2, scratch, Operand(1)); 892 __ andi(scratch2, scratch, Operand(1));
874 __ beq(&no_carry, cr0); 893 __ beq(&no_carry, cr0);
875 __ fmul(double_result, double_result, double_scratch); 894 __ fmul(double_result, double_result, double_scratch);
876 __ bind(&no_carry); 895 __ bind(&no_carry);
877 __ ShiftRightArithImm(scratch, scratch, 1, SetRC); 896 __ ShiftRightArithImm(scratch, scratch, 1, SetRC);
878 __ beq(&loop_end, cr0); 897 __ beq(&loop_end, cr0);
879 __ fmul(double_scratch, double_scratch, double_scratch); 898 __ fmul(double_scratch, double_scratch, double_scratch);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
930 __ Ret(); 949 __ Ret();
931 } 950 }
932 } 951 }
933 952
934 953
935 bool CEntryStub::NeedsImmovableCode() { return true; } 954 bool CEntryStub::NeedsImmovableCode() { return true; }
936 955
937 956
938 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 957 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
939 CEntryStub::GenerateAheadOfTime(isolate); 958 CEntryStub::GenerateAheadOfTime(isolate);
940 // WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
941 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 959 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
942 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 960 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
943 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 961 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
944 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 962 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
963 CreateWeakCellStub::GenerateAheadOfTime(isolate);
945 BinaryOpICStub::GenerateAheadOfTime(isolate); 964 BinaryOpICStub::GenerateAheadOfTime(isolate);
946 StoreRegistersStateStub::GenerateAheadOfTime(isolate); 965 StoreRegistersStateStub::GenerateAheadOfTime(isolate);
947 RestoreRegistersStateStub::GenerateAheadOfTime(isolate); 966 RestoreRegistersStateStub::GenerateAheadOfTime(isolate);
948 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 967 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
949 } 968 }
950 969
951 970
952 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) { 971 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
953 StoreRegistersStateStub stub(isolate); 972 StoreRegistersStateStub stub(isolate);
954 stub.GetCode(); 973 stub.GetCode();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
993 __ add(r4, r4, sp); 1012 __ add(r4, r4, sp);
994 __ subi(r4, r4, Operand(kPointerSize)); 1013 __ subi(r4, r4, Operand(kPointerSize));
995 1014
996 // Enter the exit frame that transitions from JavaScript to C++. 1015 // Enter the exit frame that transitions from JavaScript to C++.
997 FrameScope scope(masm, StackFrame::MANUAL); 1016 FrameScope scope(masm, StackFrame::MANUAL);
998 1017
999 // Need at least one extra slot for return address location. 1018 // Need at least one extra slot for return address location.
1000 int arg_stack_space = 1; 1019 int arg_stack_space = 1;
1001 1020
1002 // PPC LINUX ABI: 1021 // PPC LINUX ABI:
1003 #if V8_TARGET_ARCH_PPC64 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS 1022 #if !ABI_RETURNS_OBJECT_PAIRS_IN_REGS
1004 // Pass buffer for return value on stack if necessary 1023 // Pass buffer for return value on stack if necessary
1005 if (result_size() > 1) { 1024 if (result_size() > 1) {
1006 DCHECK_EQ(2, result_size()); 1025 DCHECK_EQ(2, result_size());
1007 arg_stack_space += 2; 1026 arg_stack_space += 2;
1008 } 1027 }
1009 #endif 1028 #endif
1010 1029
1011 __ EnterExitFrame(save_doubles(), arg_stack_space); 1030 __ EnterExitFrame(save_doubles(), arg_stack_space);
1012 1031
1013 // Store a copy of argc in callee-saved registers for later. 1032 // Store a copy of argc in callee-saved registers for later.
1014 __ mr(r14, r3); 1033 __ mr(r14, r3);
1015 1034
1016 // r3, r14: number of arguments including receiver (C callee-saved) 1035 // r3, r14: number of arguments including receiver (C callee-saved)
1017 // r4: pointer to the first argument 1036 // r4: pointer to the first argument
1018 // r15: pointer to builtin function (C callee-saved) 1037 // r15: pointer to builtin function (C callee-saved)
1019 1038
1020 // Result returned in registers or stack, depending on result size and ABI. 1039 // Result returned in registers or stack, depending on result size and ABI.
1021 1040
1022 Register isolate_reg = r5; 1041 Register isolate_reg = r5;
1023 #if V8_TARGET_ARCH_PPC64 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS 1042 #if !ABI_RETURNS_OBJECT_PAIRS_IN_REGS
1024 if (result_size() > 1) { 1043 if (result_size() > 1) {
1025 // The return value is 16-byte non-scalar value. 1044 // The return value is 16-byte non-scalar value.
1026 // Use frame storage reserved by calling function to pass return 1045 // Use frame storage reserved by calling function to pass return
1027 // buffer as implicit first argument. 1046 // buffer as implicit first argument.
1028 __ mr(r5, r4); 1047 __ mr(r5, r4);
1029 __ mr(r4, r3); 1048 __ mr(r4, r3);
1030 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize)); 1049 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
1031 isolate_reg = r6; 1050 isolate_reg = r6;
1032 } 1051 }
1033 #endif 1052 #endif
(...skipping 27 matching lines...) Expand all
1061 __ bind(&here); 1080 __ bind(&here);
1062 __ mflr(r8); 1081 __ mflr(r8);
1063 1082
1064 // Constant used below is dependent on size of Call() macro instructions 1083 // Constant used below is dependent on size of Call() macro instructions
1065 __ addi(r0, r8, Operand(20)); 1084 __ addi(r0, r8, Operand(20));
1066 1085
1067 __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); 1086 __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
1068 __ Call(target); 1087 __ Call(target);
1069 } 1088 }
1070 1089
1071 #if V8_TARGET_ARCH_PPC64 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS 1090 #if !ABI_RETURNS_OBJECT_PAIRS_IN_REGS
1072 // If return value is on the stack, pop it to registers. 1091 // If return value is on the stack, pop it to registers.
1073 if (result_size() > 1) { 1092 if (result_size() > 1) {
1074 __ LoadP(r4, MemOperand(r3, kPointerSize)); 1093 __ LoadP(r4, MemOperand(r3, kPointerSize));
1075 __ LoadP(r3, MemOperand(r3)); 1094 __ LoadP(r3, MemOperand(r3));
1076 } 1095 }
1077 #endif 1096 #endif
1078 1097
1079 // Runtime functions should not return 'the hole'. Allowing it to escape may 1098 // Runtime functions should not return 'the hole'. Allowing it to escape may
1080 // lead to crashes in the IC code later. 1099 // lead to crashes in the IC code later.
1081 if (FLAG_debug_code) { 1100 if (FLAG_debug_code) {
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
1492 if (HasArgsInRegisters()) { 1511 if (HasArgsInRegisters()) {
1493 __ Push(r3, r4); 1512 __ Push(r3, r4);
1494 } 1513 }
1495 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 1514 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
1496 } else { 1515 } else {
1497 { 1516 {
1498 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1517 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1499 __ Push(r3, r4); 1518 __ Push(r3, r4);
1500 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 1519 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
1501 } 1520 }
1502 Label true_value, done; 1521 if (CpuFeatures::IsSupported(ISELECT)) {
1503 __ cmpi(r3, Operand::Zero()); 1522 __ cmpi(r3, Operand::Zero());
1504 __ beq(&true_value); 1523 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
1524 __ LoadRoot(r4, Heap::kFalseValueRootIndex);
1525 __ isel(eq, r3, r3, r4);
1526 } else {
1527 Label true_value, done;
1528 __ cmpi(r3, Operand::Zero());
1529 __ beq(&true_value);
1505 1530
1506 __ LoadRoot(r3, Heap::kFalseValueRootIndex); 1531 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
1507 __ b(&done); 1532 __ b(&done);
1508 1533
1509 __ bind(&true_value); 1534 __ bind(&true_value);
1510 __ LoadRoot(r3, Heap::kTrueValueRootIndex); 1535 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
1511 1536
1512 __ bind(&done); 1537 __ bind(&done);
1538 }
1513 __ Ret(HasArgsInRegisters() ? 0 : 2); 1539 __ Ret(HasArgsInRegisters() ? 0 : 2);
1514 } 1540 }
1515 } 1541 }
1516 1542
1517 1543
1518 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 1544 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1519 Label miss; 1545 Label miss;
1520 Register receiver = LoadDescriptor::ReceiverRegister(); 1546 Register receiver = LoadDescriptor::ReceiverRegister();
1547 // Ensure that the vector and slot registers won't be clobbered before
1548 // calling the miss handler.
1549 DCHECK(!FLAG_vector_ics ||
1550 !AreAliased(r7, r8, VectorLoadICDescriptor::VectorRegister(),
1551 VectorLoadICDescriptor::SlotRegister()));
1521 1552
1522 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r6, 1553 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r7,
1523 r7, &miss); 1554 r8, &miss);
1524 __ bind(&miss); 1555 __ bind(&miss);
1525 PropertyAccessCompiler::TailCallBuiltin( 1556 PropertyAccessCompiler::TailCallBuiltin(
1526 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 1557 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1527 } 1558 }
1528 1559
1529 1560
1530 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { 1561 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1531 // Return address is in lr. 1562 // Return address is in lr.
1532 Label miss; 1563 Label miss;
1533 1564
1534 Register receiver = LoadDescriptor::ReceiverRegister(); 1565 Register receiver = LoadDescriptor::ReceiverRegister();
1535 Register index = LoadDescriptor::NameRegister(); 1566 Register index = LoadDescriptor::NameRegister();
1536 Register scratch = r6; 1567 Register scratch = r8;
1537 Register result = r3; 1568 Register result = r3;
1538 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 1569 DCHECK(!scratch.is(receiver) && !scratch.is(index));
1570 DCHECK(!FLAG_vector_ics ||
1571 (!scratch.is(VectorLoadICDescriptor::VectorRegister()) &&
1572 result.is(VectorLoadICDescriptor::SlotRegister())));
1539 1573
1574 // StringCharAtGenerator doesn't use the result register until it's passed
1575 // the different miss possibilities. If it did, we would have a conflict
1576 // when FLAG_vector_ics is true.
1540 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, 1577 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1541 &miss, // When not a string. 1578 &miss, // When not a string.
1542 &miss, // When not a number. 1579 &miss, // When not a number.
1543 &miss, // When index out of range. 1580 &miss, // When index out of range.
1544 STRING_INDEX_IS_ARRAY_INDEX, 1581 STRING_INDEX_IS_ARRAY_INDEX,
1545 RECEIVER_IS_STRING); 1582 RECEIVER_IS_STRING);
1546 char_at_generator.GenerateFast(masm); 1583 char_at_generator.GenerateFast(masm);
1547 __ Ret(); 1584 __ Ret();
1548 1585
1549 StubRuntimeCallHelper call_helper; 1586 StubRuntimeCallHelper call_helper;
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1666 __ bind(&adaptor_frame); 1703 __ bind(&adaptor_frame);
1667 __ LoadP(r5, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1704 __ LoadP(r5, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
1668 __ SmiToPtrArrayOffset(r7, r5); 1705 __ SmiToPtrArrayOffset(r7, r5);
1669 __ add(r6, r6, r7); 1706 __ add(r6, r6, r7);
1670 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); 1707 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
1671 __ StoreP(r6, MemOperand(sp, 1 * kPointerSize)); 1708 __ StoreP(r6, MemOperand(sp, 1 * kPointerSize));
1672 1709
1673 // r4 = parameter count (tagged) 1710 // r4 = parameter count (tagged)
1674 // r5 = argument count (tagged) 1711 // r5 = argument count (tagged)
1675 // Compute the mapped parameter count = min(r4, r5) in r4. 1712 // Compute the mapped parameter count = min(r4, r5) in r4.
1676 Label skip;
1677 __ cmp(r4, r5); 1713 __ cmp(r4, r5);
1678 __ blt(&skip); 1714 if (CpuFeatures::IsSupported(ISELECT)) {
1679 __ mr(r4, r5); 1715 __ isel(lt, r4, r4, r5);
1680 __ bind(&skip); 1716 } else {
1717 Label skip;
1718 __ blt(&skip);
1719 __ mr(r4, r5);
1720 __ bind(&skip);
1721 }
1681 1722
1682 __ bind(&try_allocate); 1723 __ bind(&try_allocate);
1683 1724
1684 // Compute the sizes of backing store, parameter map, and arguments object. 1725 // Compute the sizes of backing store, parameter map, and arguments object.
1685 // 1. Parameter map, has 2 extra words containing context and backing store. 1726 // 1. Parameter map, has 2 extra words containing context and backing store.
1686 const int kParameterMapHeaderSize = 1727 const int kParameterMapHeaderSize =
1687 FixedArray::kHeaderSize + 2 * kPointerSize; 1728 FixedArray::kHeaderSize + 2 * kPointerSize;
1688 // If there are no mapped parameters, we do not need the parameter_map. 1729 // If there are no mapped parameters, we do not need the parameter_map.
1689 Label skip2, skip3;
1690 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0); 1730 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1691 __ bne(&skip2); 1731 if (CpuFeatures::IsSupported(ISELECT)) {
1692 __ li(r11, Operand::Zero()); 1732 __ SmiToPtrArrayOffset(r11, r4);
1693 __ b(&skip3); 1733 __ addi(r11, r11, Operand(kParameterMapHeaderSize));
1694 __ bind(&skip2); 1734 __ isel(eq, r11, r0, r11);
1695 __ SmiToPtrArrayOffset(r11, r4); 1735 } else {
1696 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); 1736 Label skip2, skip3;
1697 __ bind(&skip3); 1737 __ bne(&skip2);
1738 __ li(r11, Operand::Zero());
1739 __ b(&skip3);
1740 __ bind(&skip2);
1741 __ SmiToPtrArrayOffset(r11, r4);
1742 __ addi(r11, r11, Operand(kParameterMapHeaderSize));
1743 __ bind(&skip3);
1744 }
1698 1745
1699 // 2. Backing store. 1746 // 2. Backing store.
1700 __ SmiToPtrArrayOffset(r7, r5); 1747 __ SmiToPtrArrayOffset(r7, r5);
1701 __ add(r11, r11, r7); 1748 __ add(r11, r11, r7);
1702 __ addi(r11, r11, Operand(FixedArray::kHeaderSize)); 1749 __ addi(r11, r11, Operand(FixedArray::kHeaderSize));
1703 1750
1704 // 3. Arguments object. 1751 // 3. Arguments object.
1705 __ addi(r11, r11, Operand(Heap::kSloppyArgumentsObjectSize)); 1752 __ addi(r11, r11, Operand(Heap::kSloppyArgumentsObjectSize));
1706 1753
1707 // Do the allocation of all three objects in one go. 1754 // Do the allocation of all three objects in one go.
1708 __ Allocate(r11, r3, r6, r7, &runtime, TAG_OBJECT); 1755 __ Allocate(r11, r3, r6, r7, &runtime, TAG_OBJECT);
1709 1756
1710 // r3 = address of new object(s) (tagged) 1757 // r3 = address of new object(s) (tagged)
1711 // r5 = argument count (smi-tagged) 1758 // r5 = argument count (smi-tagged)
1712 // Get the arguments boilerplate from the current native context into r4. 1759 // Get the arguments boilerplate from the current native context into r4.
1713 const int kNormalOffset = 1760 const int kNormalOffset =
1714 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); 1761 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
1715 const int kAliasedOffset = 1762 const int kAliasedOffset =
1716 Context::SlotOffset(Context::ALIASED_ARGUMENTS_MAP_INDEX); 1763 Context::SlotOffset(Context::ALIASED_ARGUMENTS_MAP_INDEX);
1717 1764
1718 __ LoadP(r7, 1765 __ LoadP(r7,
1719 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1766 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1720 __ LoadP(r7, FieldMemOperand(r7, GlobalObject::kNativeContextOffset)); 1767 __ LoadP(r7, FieldMemOperand(r7, GlobalObject::kNativeContextOffset));
1721 Label skip4, skip5;
1722 __ cmpi(r4, Operand::Zero()); 1768 __ cmpi(r4, Operand::Zero());
1723 __ bne(&skip4); 1769 if (CpuFeatures::IsSupported(ISELECT)) {
1724 __ LoadP(r7, MemOperand(r7, kNormalOffset)); 1770 __ LoadP(r11, MemOperand(r7, kNormalOffset));
1725 __ b(&skip5); 1771 __ LoadP(r7, MemOperand(r7, kAliasedOffset));
1726 __ bind(&skip4); 1772 __ isel(eq, r7, r11, r7);
1727 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); 1773 } else {
1728 __ bind(&skip5); 1774 Label skip4, skip5;
1775 __ bne(&skip4);
1776 __ LoadP(r7, MemOperand(r7, kNormalOffset));
1777 __ b(&skip5);
1778 __ bind(&skip4);
1779 __ LoadP(r7, MemOperand(r7, kAliasedOffset));
1780 __ bind(&skip5);
1781 }
1729 1782
1730 // r3 = address of new object (tagged) 1783 // r3 = address of new object (tagged)
1731 // r4 = mapped parameter count (tagged) 1784 // r4 = mapped parameter count (tagged)
1732 // r5 = argument count (smi-tagged) 1785 // r5 = argument count (smi-tagged)
1733 // r7 = address of arguments map (tagged) 1786 // r7 = address of arguments map (tagged)
1734 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); 1787 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0);
1735 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 1788 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1736 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); 1789 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
1737 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1790 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1738 1791
(...skipping 16 matching lines...) Expand all
1755 // If we allocated a parameter map, r7 will point there, otherwise 1808 // If we allocated a parameter map, r7 will point there, otherwise
1756 // it will point to the backing store. 1809 // it will point to the backing store.
1757 __ addi(r7, r3, Operand(Heap::kSloppyArgumentsObjectSize)); 1810 __ addi(r7, r3, Operand(Heap::kSloppyArgumentsObjectSize));
1758 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1811 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1759 1812
1760 // r3 = address of new object (tagged) 1813 // r3 = address of new object (tagged)
1761 // r4 = mapped parameter count (tagged) 1814 // r4 = mapped parameter count (tagged)
1762 // r5 = argument count (tagged) 1815 // r5 = argument count (tagged)
1763 // r7 = address of parameter map or backing store (tagged) 1816 // r7 = address of parameter map or backing store (tagged)
1764 // Initialize parameter map. If there are no mapped arguments, we're done. 1817 // Initialize parameter map. If there are no mapped arguments, we're done.
1765 Label skip_parameter_map, skip6; 1818 Label skip_parameter_map;
1766 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0); 1819 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1767 __ bne(&skip6); 1820 if (CpuFeatures::IsSupported(ISELECT)) {
1768 // Move backing store address to r6, because it is 1821 __ isel(eq, r6, r7, r6);
1769 // expected there when filling in the unmapped arguments. 1822 __ beq(&skip_parameter_map);
1770 __ mr(r6, r7); 1823 } else {
1771 __ b(&skip_parameter_map); 1824 Label skip6;
1772 __ bind(&skip6); 1825 __ bne(&skip6);
1826 // Move backing store address to r6, because it is
1827 // expected there when filling in the unmapped arguments.
1828 __ mr(r6, r7);
1829 __ b(&skip_parameter_map);
1830 __ bind(&skip6);
1831 }
1773 1832
1774 __ LoadRoot(r9, Heap::kSloppyArgumentsElementsMapRootIndex); 1833 __ LoadRoot(r9, Heap::kSloppyArgumentsElementsMapRootIndex);
1775 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kMapOffset), r0); 1834 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kMapOffset), r0);
1776 __ AddSmiLiteral(r9, r4, Smi::FromInt(2), r0); 1835 __ AddSmiLiteral(r9, r4, Smi::FromInt(2), r0);
1777 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kLengthOffset), r0); 1836 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kLengthOffset), r0);
1778 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize), 1837 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize),
1779 r0); 1838 r0);
1780 __ SmiToPtrArrayOffset(r9, r4); 1839 __ SmiToPtrArrayOffset(r9, r4);
1781 __ add(r9, r7, r9); 1840 __ add(r9, r7, r9);
1782 __ addi(r9, r9, Operand(kParameterMapHeaderSize)); 1841 __ addi(r9, r9, Operand(kParameterMapHeaderSize));
(...skipping 892 matching lines...) Expand 10 before | Expand all | Expand 10 after
2675 GenerateRecordCallTarget(masm); 2734 GenerateRecordCallTarget(masm);
2676 2735
2677 __ SmiToPtrArrayOffset(r8, r6); 2736 __ SmiToPtrArrayOffset(r8, r6);
2678 __ add(r8, r5, r8); 2737 __ add(r8, r5, r8);
2679 if (FLAG_pretenuring_call_new) { 2738 if (FLAG_pretenuring_call_new) {
2680 // Put the AllocationSite from the feedback vector into r5. 2739 // Put the AllocationSite from the feedback vector into r5.
2681 // By adding kPointerSize we encode that we know the AllocationSite 2740 // By adding kPointerSize we encode that we know the AllocationSite
2682 // entry is at the feedback vector slot given by r6 + 1. 2741 // entry is at the feedback vector slot given by r6 + 1.
2683 __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize + kPointerSize)); 2742 __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize + kPointerSize));
2684 } else { 2743 } else {
2685 Label feedback_register_initialized;
2686 // Put the AllocationSite from the feedback vector into r5, or undefined. 2744 // Put the AllocationSite from the feedback vector into r5, or undefined.
2687 __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize)); 2745 __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize));
2688 __ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset)); 2746 __ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset));
2689 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); 2747 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
2690 __ beq(&feedback_register_initialized); 2748 if (CpuFeatures::IsSupported(ISELECT)) {
2691 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); 2749 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
2692 __ bind(&feedback_register_initialized); 2750 __ isel(eq, r5, r5, r8);
2751 } else {
2752 Label feedback_register_initialized;
2753 __ beq(&feedback_register_initialized);
2754 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2755 __ bind(&feedback_register_initialized);
2756 }
2693 } 2757 }
2694 2758
2695 __ AssertUndefinedOrAllocationSite(r5, r8); 2759 __ AssertUndefinedOrAllocationSite(r5, r8);
2696 } 2760 }
2697 2761
2762 // Pass function as original constructor.
2763 __ mr(r6, r4);
2764
2698 // Jump to the function-specific construct stub. 2765 // Jump to the function-specific construct stub.
2699 Register jmp_reg = r7; 2766 Register jmp_reg = r7;
2700 __ LoadP(jmp_reg, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 2767 __ LoadP(jmp_reg, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2701 __ LoadP(jmp_reg, 2768 __ LoadP(jmp_reg,
2702 FieldMemOperand(jmp_reg, SharedFunctionInfo::kConstructStubOffset)); 2769 FieldMemOperand(jmp_reg, SharedFunctionInfo::kConstructStubOffset));
2703 __ addi(ip, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); 2770 __ addi(ip, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
2704 __ JumpToJSEntry(ip); 2771 __ JumpToJSEntry(ip);
2705 2772
2706 // r3: number of arguments 2773 // r3: number of arguments
2707 // r4: called object 2774 // r4: called object
(...skipping 21 matching lines...) Expand all
2729 __ LoadP(vector, 2796 __ LoadP(vector,
2730 FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset)); 2797 FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2731 __ LoadP(vector, 2798 __ LoadP(vector,
2732 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset)); 2799 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
2733 } 2800 }
2734 2801
2735 2802
2736 void CallIC_ArrayStub::Generate(MacroAssembler* masm) { 2803 void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
2737 // r4 - function 2804 // r4 - function
2738 // r6 - slot id 2805 // r6 - slot id
2806 // r5 - vector
2739 Label miss; 2807 Label miss;
2740 int argc = arg_count(); 2808 int argc = arg_count();
2741 ParameterCount actual(argc); 2809 ParameterCount actual(argc);
2742 2810
2743 EmitLoadTypeFeedbackVector(masm, r5);
2744
2745 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7); 2811 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7);
2746 __ cmp(r4, r7); 2812 __ cmp(r4, r7);
2747 __ bne(&miss); 2813 __ bne(&miss);
2748 2814
2749 __ mov(r3, Operand(arg_count())); 2815 __ mov(r3, Operand(arg_count()));
2750 __ SmiToPtrArrayOffset(r7, r6); 2816 __ SmiToPtrArrayOffset(r7, r6);
2751 __ add(r7, r5, r7); 2817 __ add(r7, r5, r7);
2752 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); 2818 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize));
2753 2819
2754 // Verify that r7 contains an AllocationSite 2820 // Verify that r7 contains an AllocationSite
(...skipping 12 matching lines...) Expand all
2767 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); 2833 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod());
2768 2834
2769 // Unreachable. 2835 // Unreachable.
2770 __ stop("Unexpected code address"); 2836 __ stop("Unexpected code address");
2771 } 2837 }
2772 2838
2773 2839
2774 void CallICStub::Generate(MacroAssembler* masm) { 2840 void CallICStub::Generate(MacroAssembler* masm) {
2775 // r4 - function 2841 // r4 - function
2776 // r6 - slot id (Smi) 2842 // r6 - slot id (Smi)
2843 // r5 - vector
2844 const int with_types_offset =
2845 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
2846 const int generic_offset =
2847 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
2777 Label extra_checks_or_miss, slow_start; 2848 Label extra_checks_or_miss, slow_start;
2778 Label slow, non_function, wrap, cont; 2849 Label slow, non_function, wrap, cont;
2779 Label have_js_function; 2850 Label have_js_function;
2780 int argc = arg_count(); 2851 int argc = arg_count();
2781 ParameterCount actual(argc); 2852 ParameterCount actual(argc);
2782 2853
2783 EmitLoadTypeFeedbackVector(masm, r5);
2784
2785 // The checks. First, does r4 match the recorded monomorphic target? 2854 // The checks. First, does r4 match the recorded monomorphic target?
2786 __ SmiToPtrArrayOffset(r7, r6); 2855 __ SmiToPtrArrayOffset(r7, r6);
2787 __ add(r7, r5, r7); 2856 __ add(r7, r5, r7);
2788 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); 2857 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize));
2789 __ cmp(r4, r7); 2858
2859 // We don't know that we have a weak cell. We might have a private symbol
2860 // or an AllocationSite, but the memory is safe to examine.
2861 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2862 // FixedArray.
2863 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2864 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2865 // computed, meaning that it can't appear to be a pointer. If the low bit is
2866 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2867 // to be a pointer.
2868 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2869 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2870 WeakCell::kValueOffset &&
2871 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2872
2873 __ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset));
2874 __ cmp(r4, r8);
2790 __ bne(&extra_checks_or_miss); 2875 __ bne(&extra_checks_or_miss);
2791 2876
2877 // The compare above could have been a SMI/SMI comparison. Guard against this
2878 // convincing us that we have a monomorphic JSFunction.
2879 __ JumpIfSmi(r4, &extra_checks_or_miss);
2880
2792 __ bind(&have_js_function); 2881 __ bind(&have_js_function);
2793 if (CallAsMethod()) { 2882 if (CallAsMethod()) {
2794 EmitContinueIfStrictOrNative(masm, &cont); 2883 EmitContinueIfStrictOrNative(masm, &cont);
2795 // Compute the receiver in sloppy mode. 2884 // Compute the receiver in sloppy mode.
2796 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0); 2885 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0);
2797 2886
2798 __ JumpIfSmi(r6, &wrap); 2887 __ JumpIfSmi(r6, &wrap);
2799 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE); 2888 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE);
2800 __ blt(&wrap); 2889 __ blt(&wrap);
2801 2890
2802 __ bind(&cont); 2891 __ bind(&cont);
2803 } 2892 }
2804 2893
2805 __ InvokeFunction(r4, actual, JUMP_FUNCTION, NullCallWrapper()); 2894 __ InvokeFunction(r4, actual, JUMP_FUNCTION, NullCallWrapper());
2806 2895
2807 __ bind(&slow); 2896 __ bind(&slow);
2808 EmitSlowCase(masm, argc, &non_function); 2897 EmitSlowCase(masm, argc, &non_function);
2809 2898
2810 if (CallAsMethod()) { 2899 if (CallAsMethod()) {
2811 __ bind(&wrap); 2900 __ bind(&wrap);
2812 EmitWrapCase(masm, argc, &cont); 2901 EmitWrapCase(masm, argc, &cont);
2813 } 2902 }
2814 2903
2815 __ bind(&extra_checks_or_miss); 2904 __ bind(&extra_checks_or_miss);
2816 Label miss; 2905 Label uninitialized, miss;
2817 2906
2818 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); 2907 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex);
2819 __ beq(&slow_start); 2908 __ beq(&slow_start);
2909
2910 // The following cases attempt to handle MISS cases without going to the
2911 // runtime.
2912 if (FLAG_trace_ic) {
2913 __ b(&miss);
2914 }
2915
2820 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex); 2916 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex);
2917 __ beq(&uninitialized);
2918
2919 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2920 // to handle it here. More complex cases are dealt with in the runtime.
2921 __ AssertNotSmi(r7);
2922 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE);
2923 __ bne(&miss);
2924 __ SmiToPtrArrayOffset(r7, r6);
2925 __ add(r7, r5, r7);
2926 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
2927 __ StoreP(ip, FieldMemOperand(r7, FixedArray::kHeaderSize), r0);
2928 // We have to update statistics for runtime profiling.
2929 __ LoadP(r7, FieldMemOperand(r5, with_types_offset));
2930 __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0);
2931 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0);
2932 __ LoadP(r7, FieldMemOperand(r5, generic_offset));
2933 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0);
2934 __ StoreP(r7, FieldMemOperand(r5, generic_offset), r0);
2935 __ b(&slow_start);
2936
2937 __ bind(&uninitialized);
2938
2939 // We are going monomorphic, provided we actually have a JSFunction.
2940 __ JumpIfSmi(r4, &miss);
2941
2942 // Goto miss case if we do not have a function.
2943 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
2944 __ bne(&miss);
2945
2946 // Make sure the function is not the Array() function, which requires special
2947 // behavior on MISS.
2948 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7);
2949 __ cmp(r4, r7);
2821 __ beq(&miss); 2950 __ beq(&miss);
2822 2951
2823 if (!FLAG_trace_ic) { 2952 // Update stats.
2824 // We are going megamorphic. If the feedback is a JSFunction, it is fine 2953 __ LoadP(r7, FieldMemOperand(r5, with_types_offset));
2825 // to handle it here. More complex cases are dealt with in the runtime. 2954 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0);
2826 __ AssertNotSmi(r7); 2955 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0);
2827 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE); 2956
2828 __ bne(&miss); 2957 // Store the function. Use a stub since we need a frame for allocation.
2829 __ SmiToPtrArrayOffset(r7, r6); 2958 // r5 - vector
2830 __ add(r7, r5, r7); 2959 // r6 - slot
2831 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); 2960 // r4 - function
2832 __ StoreP(ip, FieldMemOperand(r7, FixedArray::kHeaderSize), r0); 2961 {
2833 // We have to update statistics for runtime profiling. 2962 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2834 const int with_types_offset = 2963 CreateWeakCellStub create_stub(masm->isolate());
2835 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); 2964 __ Push(r4);
2836 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); 2965 __ CallStub(&create_stub);
2837 __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0); 2966 __ Pop(r4);
2838 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0);
2839 const int generic_offset =
2840 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
2841 __ LoadP(r7, FieldMemOperand(r5, generic_offset));
2842 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0);
2843 __ StoreP(r7, FieldMemOperand(r5, generic_offset), r0);
2844 __ jmp(&slow_start);
2845 } 2967 }
2846 2968
2847 // We are here because tracing is on or we are going monomorphic. 2969 __ b(&have_js_function);
2970
2971 // We are here because tracing is on or we encountered a MISS case we can't
2972 // handle here.
2848 __ bind(&miss); 2973 __ bind(&miss);
2849 GenerateMiss(masm); 2974 GenerateMiss(masm);
2850 2975
2851 // the slow case 2976 // the slow case
2852 __ bind(&slow_start); 2977 __ bind(&slow_start);
2853 // Check that the function is really a JavaScript function. 2978 // Check that the function is really a JavaScript function.
2854 // r4: pushed function (to be verified) 2979 // r4: pushed function (to be verified)
2855 __ JumpIfSmi(r4, &non_function); 2980 __ JumpIfSmi(r4, &non_function);
2856 2981
2857 // Goto slow case if we do not have a function. 2982 // Goto slow case if we do not have a function.
2858 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); 2983 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
2859 __ bne(&slow); 2984 __ bne(&slow);
2860 __ b(&have_js_function); 2985 __ b(&have_js_function);
2861 } 2986 }
2862 2987
2863 2988
2864 void CallICStub::GenerateMiss(MacroAssembler* masm) { 2989 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2865 // Get the receiver of the function from the stack; 1 ~ return address. 2990 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2866 __ LoadP(r7, MemOperand(sp, (arg_count() + 1) * kPointerSize), r0);
2867 2991
2868 { 2992 // Push the function and feedback info.
2869 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 2993 __ Push(r4, r5, r6);
2870 2994
2871 // Push the receiver and the function and feedback info. 2995 // Call the entry.
2872 __ Push(r7, r4, r5, r6); 2996 IC::UtilityId id = GetICState() == DEFAULT ? IC::kCallIC_Miss
2997 : IC::kCallIC_Customization_Miss;
2873 2998
2874 // Call the entry. 2999 ExternalReference miss = ExternalReference(IC_Utility(id), masm->isolate());
2875 IC::UtilityId id = GetICState() == DEFAULT ? IC::kCallIC_Miss 3000 __ CallExternalReference(miss, 3);
2876 : IC::kCallIC_Customization_Miss;
2877 3001
2878 ExternalReference miss = ExternalReference(IC_Utility(id), masm->isolate()); 3002 // Move result to r4 and exit the internal frame.
2879 __ CallExternalReference(miss, 4); 3003 __ mr(r4, r3);
2880
2881 // Move result to r4 and exit the internal frame.
2882 __ mr(r4, r3);
2883 }
2884 } 3004 }
2885 3005
2886 3006
2887 // StringCharCodeAtGenerator 3007 // StringCharCodeAtGenerator
2888 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3008 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2889 // If the receiver is a smi trigger the non-string case. 3009 // If the receiver is a smi trigger the non-string case.
2890 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 3010 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2891 __ JumpIfSmi(object_, receiver_not_string_); 3011 __ JumpIfSmi(object_, receiver_not_string_);
2892 3012
2893 // Fetch the instance type of the receiver into result register. 3013 // Fetch the instance type of the receiver into result register.
(...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after
3261 __ SmiTag(r6, r6); 3381 __ SmiTag(r6, r6);
3262 StringCharAtGenerator generator(r3, r6, r5, r3, &runtime, &runtime, &runtime, 3382 StringCharAtGenerator generator(r3, r6, r5, r3, &runtime, &runtime, &runtime,
3263 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING); 3383 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
3264 generator.GenerateFast(masm); 3384 generator.GenerateFast(masm);
3265 __ Drop(3); 3385 __ Drop(3);
3266 __ Ret(); 3386 __ Ret();
3267 generator.SkipSlow(masm, &runtime); 3387 generator.SkipSlow(masm, &runtime);
3268 } 3388 }
3269 3389
3270 3390
3391 void ToNumberStub::Generate(MacroAssembler* masm) {
3392 // The ToNumber stub takes one argument in r3.
3393 Label not_smi;
3394 __ JumpIfNotSmi(r3, &not_smi);
3395 __ blr();
3396 __ bind(&not_smi);
3397
3398 Label not_heap_number;
3399 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3400 __ lbz(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
3401 // r3: object
3402 // r4: instance type.
3403 __ cmpi(r4, Operand(HEAP_NUMBER_TYPE));
3404 __ bne(&not_heap_number);
3405 __ blr();
3406 __ bind(&not_heap_number);
3407
3408 Label not_string, slow_string;
3409 __ cmpli(r4, Operand(FIRST_NONSTRING_TYPE));
3410 __ bge(&not_string);
3411 // Check if string has a cached array index.
3412 __ lwz(r5, FieldMemOperand(r3, String::kHashFieldOffset));
3413 __ And(r0, r5, Operand(String::kContainsCachedArrayIndexMask), SetRC);
3414 __ bne(&slow_string, cr0);
3415 __ IndexFromHash(r5, r3);
3416 __ blr();
3417 __ bind(&slow_string);
3418 __ push(r3); // Push argument.
3419 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
3420 __ bind(&not_string);
3421
3422 Label not_oddball;
3423 __ cmpi(r4, Operand(ODDBALL_TYPE));
3424 __ bne(&not_oddball);
3425 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset));
3426 __ blr();
3427 __ bind(&not_oddball);
3428
3429 __ push(r3); // Push argument.
3430 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
3431 }
3432
3433
3271 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, 3434 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3272 Register left, 3435 Register left,
3273 Register right, 3436 Register right,
3274 Register scratch1, 3437 Register scratch1,
3275 Register scratch2) { 3438 Register scratch2) {
3276 Register length = scratch1; 3439 Register length = scratch1;
3277 3440
3278 // Compare lengths. 3441 // Compare lengths.
3279 Label strings_not_equal, check_zero_length; 3442 Label strings_not_equal, check_zero_length;
3280 __ LoadP(length, FieldMemOperand(left, String::kLengthOffset)); 3443 __ LoadP(length, FieldMemOperand(left, String::kLengthOffset));
(...skipping 20 matching lines...) Expand all
3301 3464
3302 // Characters are equal. 3465 // Characters are equal.
3303 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); 3466 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL));
3304 __ Ret(); 3467 __ Ret();
3305 } 3468 }
3306 3469
3307 3470
3308 void StringHelper::GenerateCompareFlatOneByteStrings( 3471 void StringHelper::GenerateCompareFlatOneByteStrings(
3309 MacroAssembler* masm, Register left, Register right, Register scratch1, 3472 MacroAssembler* masm, Register left, Register right, Register scratch1,
3310 Register scratch2, Register scratch3) { 3473 Register scratch2, Register scratch3) {
3311 Label skip, result_not_equal, compare_lengths; 3474 Label result_not_equal, compare_lengths;
3312 // Find minimum length and length difference. 3475 // Find minimum length and length difference.
3313 __ LoadP(scratch1, FieldMemOperand(left, String::kLengthOffset)); 3476 __ LoadP(scratch1, FieldMemOperand(left, String::kLengthOffset));
3314 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); 3477 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset));
3315 __ sub(scratch3, scratch1, scratch2, LeaveOE, SetRC); 3478 __ sub(scratch3, scratch1, scratch2, LeaveOE, SetRC);
3316 Register length_delta = scratch3; 3479 Register length_delta = scratch3;
3317 __ ble(&skip, cr0); 3480 if (CpuFeatures::IsSupported(ISELECT)) {
3318 __ mr(scratch1, scratch2); 3481 __ isel(gt, scratch1, scratch2, scratch1, cr0);
3319 __ bind(&skip); 3482 } else {
3483 Label skip;
3484 __ ble(&skip, cr0);
3485 __ mr(scratch1, scratch2);
3486 __ bind(&skip);
3487 }
3320 Register min_length = scratch1; 3488 Register min_length = scratch1;
3321 STATIC_ASSERT(kSmiTag == 0); 3489 STATIC_ASSERT(kSmiTag == 0);
3322 __ cmpi(min_length, Operand::Zero()); 3490 __ cmpi(min_length, Operand::Zero());
3323 __ beq(&compare_lengths); 3491 __ beq(&compare_lengths);
3324 3492
3325 // Compare loop. 3493 // Compare loop.
3326 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2, 3494 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
3327 &result_not_equal); 3495 &result_not_equal);
3328 3496
3329 // Compare lengths - strings up to min-length are equal. 3497 // Compare lengths - strings up to min-length are equal.
3330 __ bind(&compare_lengths); 3498 __ bind(&compare_lengths);
3331 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0)); 3499 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
3332 // Use length_delta as result if it's zero. 3500 // Use length_delta as result if it's zero.
3333 __ mr(r3, length_delta); 3501 __ mr(r3, length_delta);
3334 __ cmpi(r3, Operand::Zero()); 3502 __ cmpi(r3, Operand::Zero());
3335 __ bind(&result_not_equal); 3503 __ bind(&result_not_equal);
3336 // Conditionally update the result based either on length_delta or 3504 // Conditionally update the result based either on length_delta or
3337 // the last comparion performed in the loop above. 3505 // the last comparion performed in the loop above.
3338 Label less_equal, equal; 3506 if (CpuFeatures::IsSupported(ISELECT)) {
3339 __ ble(&less_equal); 3507 __ li(r4, Operand(GREATER));
3340 __ LoadSmiLiteral(r3, Smi::FromInt(GREATER)); 3508 __ li(r5, Operand(LESS));
3341 __ Ret(); 3509 __ isel(eq, r3, r0, r4);
3342 __ bind(&less_equal); 3510 __ isel(lt, r3, r5, r3);
3343 __ beq(&equal); 3511 __ Ret();
3344 __ LoadSmiLiteral(r3, Smi::FromInt(LESS)); 3512 } else {
3345 __ bind(&equal); 3513 Label less_equal, equal;
3346 __ Ret(); 3514 __ ble(&less_equal);
3515 __ LoadSmiLiteral(r3, Smi::FromInt(GREATER));
3516 __ Ret();
3517 __ bind(&less_equal);
3518 __ beq(&equal);
3519 __ LoadSmiLiteral(r3, Smi::FromInt(LESS));
3520 __ bind(&equal);
3521 __ Ret();
3522 }
3347 } 3523 }
3348 3524
3349 3525
3350 void StringHelper::GenerateOneByteCharsCompareLoop( 3526 void StringHelper::GenerateOneByteCharsCompareLoop(
3351 MacroAssembler* masm, Register left, Register right, Register length, 3527 MacroAssembler* masm, Register left, Register right, Register length,
3352 Register scratch1, Label* chars_not_equal) { 3528 Register scratch1, Label* chars_not_equal) {
3353 // Change index to run from -length to -1 by adding length to string 3529 // Change index to run from -length to -1 by adding length to string
3354 // start. This means that loop ends when index reaches zero, which 3530 // start. This means that loop ends when index reaches zero, which
3355 // doesn't need an additional compare. 3531 // doesn't need an additional compare.
3356 __ SmiUntag(length); 3532 __ SmiUntag(length);
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
3504 3680
3505 __ bind(&done); 3681 __ bind(&done);
3506 3682
3507 // Compare operands 3683 // Compare operands
3508 __ fcmpu(d0, d1); 3684 __ fcmpu(d0, d1);
3509 3685
3510 // Don't base result on status bits when a NaN is involved. 3686 // Don't base result on status bits when a NaN is involved.
3511 __ bunordered(&unordered); 3687 __ bunordered(&unordered);
3512 3688
3513 // Return a result of -1, 0, or 1, based on status bits. 3689 // Return a result of -1, 0, or 1, based on status bits.
3514 __ beq(&equal); 3690 if (CpuFeatures::IsSupported(ISELECT)) {
3515 __ blt(&less_than); 3691 DCHECK(EQUAL == 0);
3516 // assume greater than 3692 __ li(r4, Operand(GREATER));
3517 __ li(r3, Operand(GREATER)); 3693 __ li(r5, Operand(LESS));
3518 __ Ret(); 3694 __ isel(eq, r3, r0, r4);
3519 __ bind(&equal); 3695 __ isel(lt, r3, r5, r3);
3520 __ li(r3, Operand(EQUAL)); 3696 __ Ret();
3521 __ Ret(); 3697 } else {
3522 __ bind(&less_than); 3698 __ beq(&equal);
3523 __ li(r3, Operand(LESS)); 3699 __ blt(&less_than);
3524 __ Ret(); 3700 // assume greater than
3701 __ li(r3, Operand(GREATER));
3702 __ Ret();
3703 __ bind(&equal);
3704 __ li(r3, Operand(EQUAL));
3705 __ Ret();
3706 __ bind(&less_than);
3707 __ li(r3, Operand(LESS));
3708 __ Ret();
3709 }
3525 3710
3526 __ bind(&unordered); 3711 __ bind(&unordered);
3527 __ bind(&generic_stub); 3712 __ bind(&generic_stub);
3528 CompareICStub stub(isolate(), op(), CompareICState::GENERIC, 3713 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
3529 CompareICState::GENERIC, CompareICState::GENERIC); 3714 CompareICState::GENERIC, CompareICState::GENERIC);
3530 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 3715 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3531 3716
3532 __ bind(&maybe_undefined1); 3717 __ bind(&maybe_undefined1);
3533 if (Token::IsOrderedRelationalCompareOp(op())) { 3718 if (Token::IsOrderedRelationalCompareOp(op())) {
3534 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); 3719 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
3728 __ sub(r3, r3, r4); 3913 __ sub(r3, r3, r4);
3729 __ Ret(); 3914 __ Ret();
3730 3915
3731 __ bind(&miss); 3916 __ bind(&miss);
3732 GenerateMiss(masm); 3917 GenerateMiss(masm);
3733 } 3918 }
3734 3919
3735 3920
3736 void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) { 3921 void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
3737 Label miss; 3922 Label miss;
3923 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
3738 __ and_(r5, r4, r3); 3924 __ and_(r5, r4, r3);
3739 __ JumpIfSmi(r5, &miss); 3925 __ JumpIfSmi(r5, &miss);
3926 __ GetWeakValue(r7, cell);
3740 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); 3927 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3741 __ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset)); 3928 __ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset));
3742 __ Cmpi(r5, Operand(known_map_), r0); 3929 __ cmp(r5, r7);
3743 __ bne(&miss); 3930 __ bne(&miss);
3744 __ Cmpi(r6, Operand(known_map_), r0); 3931 __ cmp(r6, r7);
3745 __ bne(&miss); 3932 __ bne(&miss);
3746 3933
3747 __ sub(r3, r3, r4); 3934 __ sub(r3, r3, r4);
3748 __ Ret(); 3935 __ Ret();
3749 3936
3750 __ bind(&miss); 3937 __ bind(&miss);
3751 GenerateMiss(masm); 3938 GenerateMiss(masm);
3752 } 3939 }
3753 3940
3754 3941
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
3818 // Compute the masked index: (hash + i + i * i) & mask. 4005 // Compute the masked index: (hash + i + i * i) & mask.
3819 Register index = scratch0; 4006 Register index = scratch0;
3820 // Capacity is smi 2^n. 4007 // Capacity is smi 2^n.
3821 __ LoadP(index, FieldMemOperand(properties, kCapacityOffset)); 4008 __ LoadP(index, FieldMemOperand(properties, kCapacityOffset));
3822 __ subi(index, index, Operand(1)); 4009 __ subi(index, index, Operand(1));
3823 __ LoadSmiLiteral( 4010 __ LoadSmiLiteral(
3824 ip, Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))); 4011 ip, Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i)));
3825 __ and_(index, index, ip); 4012 __ and_(index, index, ip);
3826 4013
3827 // Scale the index by multiplying by the entry size. 4014 // Scale the index by multiplying by the entry size.
3828 DCHECK(NameDictionary::kEntrySize == 3); 4015 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3829 __ ShiftLeftImm(ip, index, Operand(1)); 4016 __ ShiftLeftImm(ip, index, Operand(1));
3830 __ add(index, index, ip); // index *= 3. 4017 __ add(index, index, ip); // index *= 3.
3831 4018
3832 Register entity_name = scratch0; 4019 Register entity_name = scratch0;
3833 // Having undefined at this place means the name is not contained. 4020 // Having undefined at this place means the name is not contained.
3834 Register tmp = properties; 4021 Register tmp = properties;
3835 __ SmiToPtrArrayOffset(ip, index); 4022 __ SmiToPtrArrayOffset(ip, index);
3836 __ add(tmp, properties, ip); 4023 __ add(tmp, properties, ip);
3837 __ LoadP(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); 4024 __ LoadP(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3838 4025
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after
4007 __ mr(index, hash); 4194 __ mr(index, hash);
4008 } 4195 }
4009 __ srwi(r0, index, Operand(Name::kHashShift)); 4196 __ srwi(r0, index, Operand(Name::kHashShift));
4010 __ and_(index, mask, r0); 4197 __ and_(index, mask, r0);
4011 4198
4012 // Scale the index by multiplying by the entry size. 4199 // Scale the index by multiplying by the entry size.
4013 DCHECK(NameDictionary::kEntrySize == 3); 4200 DCHECK(NameDictionary::kEntrySize == 3);
4014 __ ShiftLeftImm(scratch, index, Operand(1)); 4201 __ ShiftLeftImm(scratch, index, Operand(1));
4015 __ add(index, index, scratch); // index *= 3. 4202 __ add(index, index, scratch); // index *= 3.
4016 4203
4017 DCHECK_EQ(kSmiTagSize, 1);
4018 __ ShiftLeftImm(scratch, index, Operand(kPointerSizeLog2)); 4204 __ ShiftLeftImm(scratch, index, Operand(kPointerSizeLog2));
4019 __ add(index, dictionary, scratch); 4205 __ add(index, dictionary, scratch);
4020 __ LoadP(entry_key, FieldMemOperand(index, kElementsStartOffset)); 4206 __ LoadP(entry_key, FieldMemOperand(index, kElementsStartOffset));
4021 4207
4022 // Having undefined at this place means the name is not contained. 4208 // Having undefined at this place means the name is not contained.
4023 __ cmp(entry_key, undefined); 4209 __ cmp(entry_key, undefined);
4024 __ beq(&not_in_dictionary); 4210 __ beq(&not_in_dictionary);
4025 4211
4026 // Stop if found the property. 4212 // Stop if found the property.
4027 __ cmp(entry_key, key); 4213 __ cmp(entry_key, key);
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after
4328 } 4514 }
4329 4515
4330 4516
4331 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 4517 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4332 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); 4518 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4333 VectorKeyedLoadStub stub(isolate()); 4519 VectorKeyedLoadStub stub(isolate());
4334 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4520 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4335 } 4521 }
4336 4522
4337 4523
4524 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4525 EmitLoadTypeFeedbackVector(masm, r5);
4526 CallICStub stub(isolate(), state());
4527 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4528 }
4529
4530
4531 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
4532 EmitLoadTypeFeedbackVector(masm, r5);
4533 CallIC_ArrayStub stub(isolate(), state());
4534 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4535 }
4536
4537
4338 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4538 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4339 if (masm->isolate()->function_entry_hook() != NULL) { 4539 if (masm->isolate()->function_entry_hook() != NULL) {
4340 PredictableCodeSizeScope predictable(masm, 4540 PredictableCodeSizeScope predictable(masm,
4341 #if V8_TARGET_ARCH_PPC64 4541 #if V8_TARGET_ARCH_PPC64
4342 14 * Assembler::kInstrSize); 4542 14 * Assembler::kInstrSize);
4343 #else 4543 #else
4344 11 * Assembler::kInstrSize); 4544 11 * Assembler::kInstrSize);
4345 #endif 4545 #endif
4346 ProfileEntryHookStub stub(masm->isolate()); 4546 ProfileEntryHookStub stub(masm->isolate());
4347 __ mflr(r0); 4547 __ mflr(r0);
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
4704 Label fast_elements_case; 4904 Label fast_elements_case;
4705 __ cmpi(r6, Operand(FAST_ELEMENTS)); 4905 __ cmpi(r6, Operand(FAST_ELEMENTS));
4706 __ beq(&fast_elements_case); 4906 __ beq(&fast_elements_case);
4707 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 4907 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4708 4908
4709 __ bind(&fast_elements_case); 4909 __ bind(&fast_elements_case);
4710 GenerateCase(masm, FAST_ELEMENTS); 4910 GenerateCase(masm, FAST_ELEMENTS);
4711 } 4911 }
4712 4912
4713 4913
4714 void CallApiFunctionStub::Generate(MacroAssembler* masm) { 4914 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
4915 return ref0.address() - ref1.address();
4916 }
4917
4918
4919 // Calls an API function. Allocates HandleScope, extracts returned value
4920 // from handle and propagates exceptions. Restores context. stack_space
4921 // - space to be unwound on exit (includes the call JS arguments space and
4922 // the additional space allocated for the fast call).
4923 static void CallApiFunctionAndReturn(MacroAssembler* masm,
4924 Register function_address,
4925 ExternalReference thunk_ref,
4926 int stack_space,
4927 MemOperand* stack_space_operand,
4928 MemOperand return_value_operand,
4929 MemOperand* context_restore_operand) {
4930 Isolate* isolate = masm->isolate();
4931 ExternalReference next_address =
4932 ExternalReference::handle_scope_next_address(isolate);
4933 const int kNextOffset = 0;
4934 const int kLimitOffset = AddressOffset(
4935 ExternalReference::handle_scope_limit_address(isolate), next_address);
4936 const int kLevelOffset = AddressOffset(
4937 ExternalReference::handle_scope_level_address(isolate), next_address);
4938
4939 // Additional parameter is the address of the actual callback.
4940 DCHECK(function_address.is(r4) || function_address.is(r5));
4941 Register scratch = r6;
4942
4943 __ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate)));
4944 __ lbz(scratch, MemOperand(scratch, 0));
4945 __ cmpi(scratch, Operand::Zero());
4946
4947 if (CpuFeatures::IsSupported(ISELECT)) {
4948 __ mov(scratch, Operand(thunk_ref));
4949 __ isel(eq, scratch, function_address, scratch);
4950 } else {
4951 Label profiler_disabled;
4952 Label end_profiler_check;
4953 __ beq(&profiler_disabled);
4954 __ mov(scratch, Operand(thunk_ref));
4955 __ b(&end_profiler_check);
4956 __ bind(&profiler_disabled);
4957 __ mr(scratch, function_address);
4958 __ bind(&end_profiler_check);
4959 }
4960
4961 // Allocate HandleScope in callee-save registers.
4962 // r17 - next_address
4963 // r14 - next_address->kNextOffset
4964 // r15 - next_address->kLimitOffset
4965 // r16 - next_address->kLevelOffset
4966 __ mov(r17, Operand(next_address));
4967 __ LoadP(r14, MemOperand(r17, kNextOffset));
4968 __ LoadP(r15, MemOperand(r17, kLimitOffset));
4969 __ lwz(r16, MemOperand(r17, kLevelOffset));
4970 __ addi(r16, r16, Operand(1));
4971 __ stw(r16, MemOperand(r17, kLevelOffset));
4972
4973 if (FLAG_log_timer_events) {
4974 FrameScope frame(masm, StackFrame::MANUAL);
4975 __ PushSafepointRegisters();
4976 __ PrepareCallCFunction(1, r3);
4977 __ mov(r3, Operand(ExternalReference::isolate_address(isolate)));
4978 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
4979 1);
4980 __ PopSafepointRegisters();
4981 }
4982
4983 // Native call returns to the DirectCEntry stub which redirects to the
4984 // return address pushed on stack (could have moved after GC).
4985 // DirectCEntry stub itself is generated early and never moves.
4986 DirectCEntryStub stub(isolate);
4987 stub.GenerateCall(masm, scratch);
4988
4989 if (FLAG_log_timer_events) {
4990 FrameScope frame(masm, StackFrame::MANUAL);
4991 __ PushSafepointRegisters();
4992 __ PrepareCallCFunction(1, r3);
4993 __ mov(r3, Operand(ExternalReference::isolate_address(isolate)));
4994 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
4995 1);
4996 __ PopSafepointRegisters();
4997 }
4998
4999 Label promote_scheduled_exception;
5000 Label exception_handled;
5001 Label delete_allocated_handles;
5002 Label leave_exit_frame;
5003 Label return_value_loaded;
5004
5005 // load value from ReturnValue
5006 __ LoadP(r3, return_value_operand);
5007 __ bind(&return_value_loaded);
5008 // No more valid handles (the result handle was the last one). Restore
5009 // previous handle scope.
5010 __ StoreP(r14, MemOperand(r17, kNextOffset));
5011 if (__ emit_debug_code()) {
5012 __ lwz(r4, MemOperand(r17, kLevelOffset));
5013 __ cmp(r4, r16);
5014 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
5015 }
5016 __ subi(r16, r16, Operand(1));
5017 __ stw(r16, MemOperand(r17, kLevelOffset));
5018 __ LoadP(r0, MemOperand(r17, kLimitOffset));
5019 __ cmp(r15, r0);
5020 __ bne(&delete_allocated_handles);
5021
5022 // Check if the function scheduled an exception.
5023 __ bind(&leave_exit_frame);
5024 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
5025 __ mov(r15, Operand(ExternalReference::scheduled_exception_address(isolate)));
5026 __ LoadP(r15, MemOperand(r15));
5027 __ cmp(r14, r15);
5028 __ bne(&promote_scheduled_exception);
5029 __ bind(&exception_handled);
5030
5031 bool restore_context = context_restore_operand != NULL;
5032 if (restore_context) {
5033 __ LoadP(cp, *context_restore_operand);
5034 }
5035 // LeaveExitFrame expects unwind space to be in a register.
5036 if (stack_space_operand != NULL) {
5037 __ lwz(r14, *stack_space_operand);
5038 } else {
5039 __ mov(r14, Operand(stack_space));
5040 }
5041 __ LeaveExitFrame(false, r14, !restore_context, stack_space_operand != NULL);
5042 __ blr();
5043
5044 __ bind(&promote_scheduled_exception);
5045 {
5046 FrameScope frame(masm, StackFrame::INTERNAL);
5047 __ CallExternalReference(
5048 ExternalReference(Runtime::kPromoteScheduledException, isolate), 0);
5049 }
5050 __ jmp(&exception_handled);
5051
5052 // HandleScope limit has changed. Delete allocated extensions.
5053 __ bind(&delete_allocated_handles);
5054 __ StoreP(r15, MemOperand(r17, kLimitOffset));
5055 __ mr(r14, r3);
5056 __ PrepareCallCFunction(1, r15);
5057 __ mov(r3, Operand(ExternalReference::isolate_address(isolate)));
5058 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5059 1);
5060 __ mr(r3, r14);
5061 __ b(&leave_exit_frame);
5062 }
5063
5064
5065 static void CallApiFunctionStubHelper(MacroAssembler* masm,
5066 const ParameterCount& argc,
5067 bool return_first_arg,
5068 bool call_data_undefined) {
4715 // ----------- S t a t e ------------- 5069 // ----------- S t a t e -------------
4716 // -- r3 : callee 5070 // -- r3 : callee
4717 // -- r7 : call_data 5071 // -- r7 : call_data
4718 // -- r5 : holder 5072 // -- r5 : holder
4719 // -- r4 : api_function_address 5073 // -- r4 : api_function_address
5074 // -- r6 : number of arguments if argc is a register
4720 // -- cp : context 5075 // -- cp : context
4721 // -- 5076 // --
4722 // -- sp[0] : last argument 5077 // -- sp[0] : last argument
4723 // -- ... 5078 // -- ...
4724 // -- sp[(argc - 1)* 4] : first argument 5079 // -- sp[(argc - 1)* 4] : first argument
4725 // -- sp[argc * 4] : receiver 5080 // -- sp[argc * 4] : receiver
4726 // ----------------------------------- 5081 // -----------------------------------
4727 5082
4728 Register callee = r3; 5083 Register callee = r3;
4729 Register call_data = r7; 5084 Register call_data = r7;
4730 Register holder = r5; 5085 Register holder = r5;
4731 Register api_function_address = r4; 5086 Register api_function_address = r4;
4732 Register context = cp; 5087 Register context = cp;
4733 5088
4734 int argc = this->argc();
4735 bool is_store = this->is_store();
4736 bool call_data_undefined = this->call_data_undefined();
4737
4738 typedef FunctionCallbackArguments FCA; 5089 typedef FunctionCallbackArguments FCA;
4739 5090
4740 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 5091 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4741 STATIC_ASSERT(FCA::kCalleeIndex == 5); 5092 STATIC_ASSERT(FCA::kCalleeIndex == 5);
4742 STATIC_ASSERT(FCA::kDataIndex == 4); 5093 STATIC_ASSERT(FCA::kDataIndex == 4);
4743 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 5094 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4744 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 5095 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4745 STATIC_ASSERT(FCA::kIsolateIndex == 1); 5096 STATIC_ASSERT(FCA::kIsolateIndex == 1);
4746 STATIC_ASSERT(FCA::kHolderIndex == 0); 5097 STATIC_ASSERT(FCA::kHolderIndex == 0);
4747 STATIC_ASSERT(FCA::kArgsLength == 7); 5098 STATIC_ASSERT(FCA::kArgsLength == 7);
4748 5099
5100 DCHECK(argc.is_immediate() || r3.is(argc.reg()));
5101
4749 // context save 5102 // context save
4750 __ push(context); 5103 __ push(context);
4751 // load context from callee 5104 // load context from callee
4752 __ LoadP(context, FieldMemOperand(callee, JSFunction::kContextOffset)); 5105 __ LoadP(context, FieldMemOperand(callee, JSFunction::kContextOffset));
4753 5106
4754 // callee 5107 // callee
4755 __ push(callee); 5108 __ push(callee);
4756 5109
4757 // call data 5110 // call data
4758 __ push(call_data); 5111 __ push(call_data);
4759 5112
4760 Register scratch = call_data; 5113 Register scratch = call_data;
4761 if (!call_data_undefined) { 5114 if (!call_data_undefined) {
4762 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 5115 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4763 } 5116 }
4764 // return value 5117 // return value
4765 __ push(scratch); 5118 __ push(scratch);
4766 // return value default 5119 // return value default
4767 __ push(scratch); 5120 __ push(scratch);
4768 // isolate 5121 // isolate
4769 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); 5122 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
4770 __ push(scratch); 5123 __ push(scratch);
4771 // holder 5124 // holder
4772 __ push(holder); 5125 __ push(holder);
4773 5126
4774 // Prepare arguments. 5127 // Prepare arguments.
4775 __ mr(scratch, sp); 5128 __ mr(scratch, sp);
4776 5129
4777 // Allocate the v8::Arguments structure in the arguments' space since 5130 // Allocate the v8::Arguments structure in the arguments' space since
4778 // it's not controlled by GC. 5131 // it's not controlled by GC.
4779 // PPC LINUX ABI: 5132 // PPC LINUX ABI:
4780 // 5133 //
4781 // Create 5 extra slots on stack: 5134 // Create 5 extra slots on stack:
4782 // [0] space for DirectCEntryStub's LR save 5135 // [0] space for DirectCEntryStub's LR save
4783 // [1-4] FunctionCallbackInfo 5136 // [1-4] FunctionCallbackInfo
4784 const int kApiStackSpace = 5; 5137 const int kApiStackSpace = 5;
5138 const int kFunctionCallbackInfoOffset =
5139 (kStackFrameExtraParamSlot + 1) * kPointerSize;
4785 5140
4786 FrameScope frame_scope(masm, StackFrame::MANUAL); 5141 FrameScope frame_scope(masm, StackFrame::MANUAL);
4787 __ EnterExitFrame(false, kApiStackSpace); 5142 __ EnterExitFrame(false, kApiStackSpace);
4788 5143
4789 DCHECK(!api_function_address.is(r3) && !scratch.is(r3)); 5144 DCHECK(!api_function_address.is(r3) && !scratch.is(r3));
4790 // r3 = FunctionCallbackInfo& 5145 // r3 = FunctionCallbackInfo&
4791 // Arguments is after the return address. 5146 // Arguments is after the return address.
4792 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize)); 5147 __ addi(r3, sp, Operand(kFunctionCallbackInfoOffset));
4793 // FunctionCallbackInfo::implicit_args_ 5148 // FunctionCallbackInfo::implicit_args_
4794 __ StoreP(scratch, MemOperand(r3, 0 * kPointerSize)); 5149 __ StoreP(scratch, MemOperand(r3, 0 * kPointerSize));
4795 // FunctionCallbackInfo::values_ 5150 if (argc.is_immediate()) {
4796 __ addi(ip, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); 5151 // FunctionCallbackInfo::values_
4797 __ StoreP(ip, MemOperand(r3, 1 * kPointerSize)); 5152 __ addi(ip, scratch,
4798 // FunctionCallbackInfo::length_ = argc 5153 Operand((FCA::kArgsLength - 1 + argc.immediate()) * kPointerSize));
4799 __ li(ip, Operand(argc)); 5154 __ StoreP(ip, MemOperand(r3, 1 * kPointerSize));
4800 __ stw(ip, MemOperand(r3, 2 * kPointerSize)); 5155 // FunctionCallbackInfo::length_ = argc
4801 // FunctionCallbackInfo::is_construct_call = 0 5156 __ li(ip, Operand(argc.immediate()));
4802 __ li(ip, Operand::Zero()); 5157 __ stw(ip, MemOperand(r3, 2 * kPointerSize));
4803 __ stw(ip, MemOperand(r3, 2 * kPointerSize + kIntSize)); 5158 // FunctionCallbackInfo::is_construct_call_ = 0
5159 __ li(ip, Operand::Zero());
5160 __ stw(ip, MemOperand(r3, 2 * kPointerSize + kIntSize));
5161 } else {
5162 __ ShiftLeftImm(ip, argc.reg(), Operand(kPointerSizeLog2));
5163 __ addi(ip, ip, Operand((FCA::kArgsLength - 1) * kPointerSize));
5164 // FunctionCallbackInfo::values_
5165 __ add(r0, scratch, ip);
5166 __ StoreP(r0, MemOperand(r3, 1 * kPointerSize));
5167 // FunctionCallbackInfo::length_ = argc
5168 __ stw(argc.reg(), MemOperand(r3, 2 * kPointerSize));
5169 // FunctionCallbackInfo::is_construct_call_
5170 __ stw(ip, MemOperand(r3, 2 * kPointerSize + kIntSize));
5171 }
4804 5172
4805 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
4806 ExternalReference thunk_ref = 5173 ExternalReference thunk_ref =
4807 ExternalReference::invoke_function_callback(isolate()); 5174 ExternalReference::invoke_function_callback(masm->isolate());
4808 5175
4809 AllowExternalCallThatCantCauseGC scope(masm); 5176 AllowExternalCallThatCantCauseGC scope(masm);
4810 MemOperand context_restore_operand( 5177 MemOperand context_restore_operand(
4811 fp, (2 + FCA::kContextSaveIndex) * kPointerSize); 5178 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
4812 // Stores return the first js argument 5179 // Stores return the first js argument
4813 int return_value_offset = 0; 5180 int return_value_offset = 0;
4814 if (is_store) { 5181 if (return_first_arg) {
4815 return_value_offset = 2 + FCA::kArgsLength; 5182 return_value_offset = 2 + FCA::kArgsLength;
4816 } else { 5183 } else {
4817 return_value_offset = 2 + FCA::kReturnValueOffset; 5184 return_value_offset = 2 + FCA::kReturnValueOffset;
4818 } 5185 }
4819 MemOperand return_value_operand(fp, return_value_offset * kPointerSize); 5186 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
4820 5187 int stack_space = 0;
4821 __ CallApiFunctionAndReturn(api_function_address, thunk_ref, 5188 MemOperand is_construct_call_operand =
4822 kStackUnwindSpace, return_value_operand, 5189 MemOperand(sp, kFunctionCallbackInfoOffset + 2 * kPointerSize + kIntSize);
4823 &context_restore_operand); 5190 MemOperand* stack_space_operand = &is_construct_call_operand;
5191 if (argc.is_immediate()) {
5192 stack_space = argc.immediate() + FCA::kArgsLength + 1;
5193 stack_space_operand = NULL;
5194 }
5195 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5196 stack_space_operand, return_value_operand,
5197 &context_restore_operand);
4824 } 5198 }
4825 5199
4826 5200
5201 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5202 bool call_data_undefined = this->call_data_undefined();
5203 CallApiFunctionStubHelper(masm, ParameterCount(r6), false,
5204 call_data_undefined);
5205 }
5206
5207
5208 void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5209 bool is_store = this->is_store();
5210 int argc = this->argc();
5211 bool call_data_undefined = this->call_data_undefined();
5212 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5213 call_data_undefined);
5214 }
5215
5216
4827 void CallApiGetterStub::Generate(MacroAssembler* masm) { 5217 void CallApiGetterStub::Generate(MacroAssembler* masm) {
4828 // ----------- S t a t e ------------- 5218 // ----------- S t a t e -------------
4829 // -- sp[0] : name 5219 // -- sp[0] : name
4830 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object 5220 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object
4831 // -- ... 5221 // -- ...
4832 // -- r5 : api_function_address 5222 // -- r5 : api_function_address
4833 // ----------------------------------- 5223 // -----------------------------------
4834 5224
4835 Register api_function_address = ApiGetterDescriptor::function_address(); 5225 Register api_function_address = ApiGetterDescriptor::function_address();
4836 DCHECK(api_function_address.is(r5)); 5226 DCHECK(api_function_address.is(r5));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4871 // Create PropertyAccessorInfo instance on the stack above the exit frame with 5261 // Create PropertyAccessorInfo instance on the stack above the exit frame with
4872 // r4 (internal::Object** args_) as the data. 5262 // r4 (internal::Object** args_) as the data.
4873 __ StoreP(r4, MemOperand(sp, kAccessorInfoSlot * kPointerSize)); 5263 __ StoreP(r4, MemOperand(sp, kAccessorInfoSlot * kPointerSize));
4874 // r4 = AccessorInfo& 5264 // r4 = AccessorInfo&
4875 __ addi(r4, sp, Operand(kAccessorInfoSlot * kPointerSize)); 5265 __ addi(r4, sp, Operand(kAccessorInfoSlot * kPointerSize));
4876 5266
4877 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; 5267 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
4878 5268
4879 ExternalReference thunk_ref = 5269 ExternalReference thunk_ref =
4880 ExternalReference::invoke_accessor_getter_callback(isolate()); 5270 ExternalReference::invoke_accessor_getter_callback(isolate());
4881 __ CallApiFunctionAndReturn(api_function_address, thunk_ref, 5271 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
4882 kStackUnwindSpace, 5272 kStackUnwindSpace, NULL,
4883 MemOperand(fp, 6 * kPointerSize), NULL); 5273 MemOperand(fp, 6 * kPointerSize), NULL);
4884 } 5274 }
4885 5275
4886 5276
4887 #undef __ 5277 #undef __
4888 } 5278 }
4889 } // namespace v8::internal 5279 } // namespace v8::internal
4890 5280
4891 #endif // V8_TARGET_ARCH_PPC 5281 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ppc/code-stubs-ppc.h ('k') | src/ppc/codegen-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698