OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 612 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
623 if (destination == kCoreRegisters) { | 623 if (destination == kCoreRegisters) { |
624 __ vmov(r2, r3, d7); | 624 __ vmov(r2, r3, d7); |
625 __ vmov(r0, r1, d6); | 625 __ vmov(r0, r1, d6); |
626 } | 626 } |
627 } else { | 627 } else { |
628 ASSERT(destination == kCoreRegisters); | 628 ASSERT(destination == kCoreRegisters); |
629 // Write Smi from r0 to r3 and r2 in double format. | 629 // Write Smi from r0 to r3 and r2 in double format. |
630 __ mov(scratch1, Operand(r0)); | 630 __ mov(scratch1, Operand(r0)); |
631 ConvertToDoubleStub stub1(r3, r2, scratch1, scratch2); | 631 ConvertToDoubleStub stub1(r3, r2, scratch1, scratch2); |
632 __ push(lr); | 632 __ push(lr); |
633 __ Call(stub1.GetCode()); | 633 __ Call(stub1.GetCode(masm->isolate())); |
634 // Write Smi from r1 to r1 and r0 in double format. | 634 // Write Smi from r1 to r1 and r0 in double format. |
635 __ mov(scratch1, Operand(r1)); | 635 __ mov(scratch1, Operand(r1)); |
636 ConvertToDoubleStub stub2(r1, r0, scratch1, scratch2); | 636 ConvertToDoubleStub stub2(r1, r0, scratch1, scratch2); |
637 __ Call(stub2.GetCode()); | 637 __ Call(stub2.GetCode(masm->isolate())); |
638 __ pop(lr); | 638 __ pop(lr); |
639 } | 639 } |
640 } | 640 } |
641 | 641 |
642 | 642 |
643 void FloatingPointHelper::LoadNumber(MacroAssembler* masm, | 643 void FloatingPointHelper::LoadNumber(MacroAssembler* masm, |
644 Destination destination, | 644 Destination destination, |
645 Register object, | 645 Register object, |
646 DwVfpRegister dst, | 646 DwVfpRegister dst, |
647 Register dst1, | 647 Register dst1, |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
685 if (destination == kCoreRegisters) { | 685 if (destination == kCoreRegisters) { |
686 // Load the converted smi to dst1 and dst2 in double format. | 686 // Load the converted smi to dst1 and dst2 in double format. |
687 __ vmov(dst1, dst2, dst); | 687 __ vmov(dst1, dst2, dst); |
688 } | 688 } |
689 } else { | 689 } else { |
690 ASSERT(destination == kCoreRegisters); | 690 ASSERT(destination == kCoreRegisters); |
691 // Write smi to dst1 and dst2 double format. | 691 // Write smi to dst1 and dst2 double format. |
692 __ mov(scratch1, Operand(object)); | 692 __ mov(scratch1, Operand(object)); |
693 ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2); | 693 ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2); |
694 __ push(lr); | 694 __ push(lr); |
695 __ Call(stub.GetCode()); | 695 __ Call(stub.GetCode(masm->isolate())); |
696 __ pop(lr); | 696 __ pop(lr); |
697 } | 697 } |
698 | 698 |
699 __ bind(&done); | 699 __ bind(&done); |
700 } | 700 } |
701 | 701 |
702 | 702 |
703 void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm, | 703 void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm, |
704 Register object, | 704 Register object, |
705 Register dst, | 705 Register dst, |
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1113 if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) { | 1113 if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) { |
1114 return true; | 1114 return true; |
1115 } | 1115 } |
1116 // Other register combinations are generated as and when they are needed, | 1116 // Other register combinations are generated as and when they are needed, |
1117 // so it is unsafe to call them from stubs (we can't generate a stub while | 1117 // so it is unsafe to call them from stubs (we can't generate a stub while |
1118 // we are generating a stub). | 1118 // we are generating a stub). |
1119 return false; | 1119 return false; |
1120 } | 1120 } |
1121 | 1121 |
1122 | 1122 |
1123 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() { | 1123 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( |
| 1124 Isolate* isolate) { |
1124 WriteInt32ToHeapNumberStub stub1(r1, r0, r2); | 1125 WriteInt32ToHeapNumberStub stub1(r1, r0, r2); |
1125 WriteInt32ToHeapNumberStub stub2(r2, r0, r3); | 1126 WriteInt32ToHeapNumberStub stub2(r2, r0, r3); |
1126 stub1.GetCode()->set_is_pregenerated(true); | 1127 stub1.GetCode(isolate)->set_is_pregenerated(true); |
1127 stub2.GetCode()->set_is_pregenerated(true); | 1128 stub2.GetCode(isolate)->set_is_pregenerated(true); |
1128 } | 1129 } |
1129 | 1130 |
1130 | 1131 |
1131 // See comment for class. | 1132 // See comment for class. |
1132 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { | 1133 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { |
1133 Label max_negative_int; | 1134 Label max_negative_int; |
1134 // the_int_ has the answer which is a signed int32 but not a Smi. | 1135 // the_int_ has the answer which is a signed int32 but not a Smi. |
1135 // We test for the special value that has a different exponent. This test | 1136 // We test for the special value that has a different exponent. This test |
1136 // has the neat side effect of setting the flags according to the sign. | 1137 // has the neat side effect of setting the flags according to the sign. |
1137 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); | 1138 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); |
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1309 CpuFeatures::Scope scope(VFP2); | 1310 CpuFeatures::Scope scope(VFP2); |
1310 __ SmiToDoubleVFPRegister(lhs, d7, r7, s15); | 1311 __ SmiToDoubleVFPRegister(lhs, d7, r7, s15); |
1311 // Load the double from rhs, tagged HeapNumber r0, to d6. | 1312 // Load the double from rhs, tagged HeapNumber r0, to d6. |
1312 __ sub(r7, rhs, Operand(kHeapObjectTag)); | 1313 __ sub(r7, rhs, Operand(kHeapObjectTag)); |
1313 __ vldr(d6, r7, HeapNumber::kValueOffset); | 1314 __ vldr(d6, r7, HeapNumber::kValueOffset); |
1314 } else { | 1315 } else { |
1315 __ push(lr); | 1316 __ push(lr); |
1316 // Convert lhs to a double in r2, r3. | 1317 // Convert lhs to a double in r2, r3. |
1317 __ mov(r7, Operand(lhs)); | 1318 __ mov(r7, Operand(lhs)); |
1318 ConvertToDoubleStub stub1(r3, r2, r7, r6); | 1319 ConvertToDoubleStub stub1(r3, r2, r7, r6); |
1319 __ Call(stub1.GetCode()); | 1320 __ Call(stub1.GetCode(masm->isolate())); |
1320 // Load rhs to a double in r0, r1. | 1321 // Load rhs to a double in r0, r1. |
1321 __ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset)); | 1322 __ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset)); |
1322 __ pop(lr); | 1323 __ pop(lr); |
1323 } | 1324 } |
1324 | 1325 |
1325 // We now have both loaded as doubles but we can skip the lhs nan check | 1326 // We now have both loaded as doubles but we can skip the lhs nan check |
1326 // since it's a smi. | 1327 // since it's a smi. |
1327 __ jmp(lhs_not_nan); | 1328 __ jmp(lhs_not_nan); |
1328 | 1329 |
1329 __ bind(&rhs_is_smi); | 1330 __ bind(&rhs_is_smi); |
(...skipping 21 matching lines...) Expand all Loading... |
1351 __ vldr(d7, r7, HeapNumber::kValueOffset); | 1352 __ vldr(d7, r7, HeapNumber::kValueOffset); |
1352 // Convert rhs to a double in d6 . | 1353 // Convert rhs to a double in d6 . |
1353 __ SmiToDoubleVFPRegister(rhs, d6, r7, s13); | 1354 __ SmiToDoubleVFPRegister(rhs, d6, r7, s13); |
1354 } else { | 1355 } else { |
1355 __ push(lr); | 1356 __ push(lr); |
1356 // Load lhs to a double in r2, r3. | 1357 // Load lhs to a double in r2, r3. |
1357 __ Ldrd(r2, r3, FieldMemOperand(lhs, HeapNumber::kValueOffset)); | 1358 __ Ldrd(r2, r3, FieldMemOperand(lhs, HeapNumber::kValueOffset)); |
1358 // Convert rhs to a double in r0, r1. | 1359 // Convert rhs to a double in r0, r1. |
1359 __ mov(r7, Operand(rhs)); | 1360 __ mov(r7, Operand(rhs)); |
1360 ConvertToDoubleStub stub2(r1, r0, r7, r6); | 1361 ConvertToDoubleStub stub2(r1, r0, r7, r6); |
1361 __ Call(stub2.GetCode()); | 1362 __ Call(stub2.GetCode(masm->isolate())); |
1362 __ pop(lr); | 1363 __ pop(lr); |
1363 } | 1364 } |
1364 // Fall through to both_loaded_as_doubles. | 1365 // Fall through to both_loaded_as_doubles. |
1365 } | 1366 } |
1366 | 1367 |
1367 | 1368 |
1368 void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cond) { | 1369 void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cond) { |
1369 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset); | 1370 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset); |
1370 Register rhs_exponent = exp_first ? r0 : r1; | 1371 Register rhs_exponent = exp_first ? r0 : r1; |
1371 Register lhs_exponent = exp_first ? r2 : r3; | 1372 Register lhs_exponent = exp_first ? r2 : r3; |
(...skipping 939 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2311 CpuFeatures::Scope scope(VFP2); | 2312 CpuFeatures::Scope scope(VFP2); |
2312 __ vmov(s0, r1); | 2313 __ vmov(s0, r1); |
2313 __ vcvt_f64_s32(d0, s0); | 2314 __ vcvt_f64_s32(d0, s0); |
2314 __ sub(r2, r0, Operand(kHeapObjectTag)); | 2315 __ sub(r2, r0, Operand(kHeapObjectTag)); |
2315 __ vstr(d0, r2, HeapNumber::kValueOffset); | 2316 __ vstr(d0, r2, HeapNumber::kValueOffset); |
2316 __ Ret(); | 2317 __ Ret(); |
2317 } else { | 2318 } else { |
2318 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not | 2319 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not |
2319 // have to set up a frame. | 2320 // have to set up a frame. |
2320 WriteInt32ToHeapNumberStub stub(r1, r0, r2); | 2321 WriteInt32ToHeapNumberStub stub(r1, r0, r2); |
2321 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 2322 __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
2322 } | 2323 } |
2323 | 2324 |
2324 __ bind(&impossible); | 2325 __ bind(&impossible); |
2325 if (FLAG_debug_code) { | 2326 if (FLAG_debug_code) { |
2326 __ stop("Incorrect assumption in bit-not stub"); | 2327 __ stop("Incorrect assumption in bit-not stub"); |
2327 } | 2328 } |
2328 } | 2329 } |
2329 | 2330 |
2330 | 2331 |
2331 // TODO(svenpanne): Use virtual functions instead of switch. | 2332 // TODO(svenpanne): Use virtual functions instead of switch. |
(...skipping 1535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3867 return true; | 3868 return true; |
3868 } | 3869 } |
3869 | 3870 |
3870 | 3871 |
3871 bool CEntryStub::IsPregenerated() { | 3872 bool CEntryStub::IsPregenerated() { |
3872 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && | 3873 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && |
3873 result_size_ == 1; | 3874 result_size_ == 1; |
3874 } | 3875 } |
3875 | 3876 |
3876 | 3877 |
3877 void CodeStub::GenerateStubsAheadOfTime() { | 3878 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
3878 CEntryStub::GenerateAheadOfTime(); | 3879 CEntryStub::GenerateAheadOfTime(isolate); |
3879 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(); | 3880 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3880 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(); | 3881 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3881 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(); | 3882 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3882 } | 3883 } |
3883 | 3884 |
3884 | 3885 |
3885 void CodeStub::GenerateFPStubs() { | 3886 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
3886 SaveFPRegsMode mode = CpuFeatures::IsSupported(VFP2) | 3887 SaveFPRegsMode mode = CpuFeatures::IsSupported(VFP2) |
3887 ? kSaveFPRegs | 3888 ? kSaveFPRegs |
3888 : kDontSaveFPRegs; | 3889 : kDontSaveFPRegs; |
3889 CEntryStub save_doubles(1, mode); | 3890 CEntryStub save_doubles(1, mode); |
3890 StoreBufferOverflowStub stub(mode); | 3891 StoreBufferOverflowStub stub(mode); |
3891 // These stubs might already be in the snapshot, detect that and don't | 3892 // These stubs might already be in the snapshot, detect that and don't |
3892 // regenerate, which would lead to code stub initialization state being messed | 3893 // regenerate, which would lead to code stub initialization state being messed |
3893 // up. | 3894 // up. |
3894 Code* save_doubles_code = NULL; | 3895 Code* save_doubles_code = NULL; |
3895 Code* store_buffer_overflow_code = NULL; | 3896 Code* store_buffer_overflow_code = NULL; |
3896 if (!save_doubles.FindCodeInCache(&save_doubles_code, ISOLATE)) { | 3897 if (!save_doubles.FindCodeInCache(&save_doubles_code, ISOLATE)) { |
3897 if (CpuFeatures::IsSupported(VFP2)) { | 3898 if (CpuFeatures::IsSupported(VFP2)) { |
3898 CpuFeatures::Scope scope2(VFP2); | 3899 CpuFeatures::Scope scope2(VFP2); |
3899 save_doubles_code = *save_doubles.GetCode(); | 3900 save_doubles_code = *save_doubles.GetCode(isolate); |
3900 store_buffer_overflow_code = *stub.GetCode(); | 3901 store_buffer_overflow_code = *stub.GetCode(isolate); |
3901 } else { | 3902 } else { |
3902 save_doubles_code = *save_doubles.GetCode(); | 3903 save_doubles_code = *save_doubles.GetCode(isolate); |
3903 store_buffer_overflow_code = *stub.GetCode(); | 3904 store_buffer_overflow_code = *stub.GetCode(isolate); |
3904 } | 3905 } |
3905 save_doubles_code->set_is_pregenerated(true); | 3906 save_doubles_code->set_is_pregenerated(true); |
3906 store_buffer_overflow_code->set_is_pregenerated(true); | 3907 store_buffer_overflow_code->set_is_pregenerated(true); |
3907 } | 3908 } |
3908 ISOLATE->set_fp_stubs_generated(true); | 3909 ISOLATE->set_fp_stubs_generated(true); |
3909 } | 3910 } |
3910 | 3911 |
3911 | 3912 |
3912 void CEntryStub::GenerateAheadOfTime() { | 3913 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
3913 CEntryStub stub(1, kDontSaveFPRegs); | 3914 CEntryStub stub(1, kDontSaveFPRegs); |
3914 Handle<Code> code = stub.GetCode(); | 3915 Handle<Code> code = stub.GetCode(isolate); |
3915 code->set_is_pregenerated(true); | 3916 code->set_is_pregenerated(true); |
3916 } | 3917 } |
3917 | 3918 |
3918 | 3919 |
3919 static void JumpIfOOM(MacroAssembler* masm, | 3920 static void JumpIfOOM(MacroAssembler* masm, |
3920 Register value, | 3921 Register value, |
3921 Register scratch, | 3922 Register scratch, |
3922 Label* oom_label) { | 3923 Label* oom_label) { |
3923 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); | 3924 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); |
3924 STATIC_ASSERT(kFailureTag == 3); | 3925 STATIC_ASSERT(kFailureTag == 3); |
(...skipping 3072 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6997 __ mov(r0, Operand(EQUAL), LeaveCC, eq); | 6998 __ mov(r0, Operand(EQUAL), LeaveCC, eq); |
6998 __ mov(r0, Operand(LESS), LeaveCC, lt); | 6999 __ mov(r0, Operand(LESS), LeaveCC, lt); |
6999 __ mov(r0, Operand(GREATER), LeaveCC, gt); | 7000 __ mov(r0, Operand(GREATER), LeaveCC, gt); |
7000 __ Ret(); | 7001 __ Ret(); |
7001 } | 7002 } |
7002 | 7003 |
7003 __ bind(&unordered); | 7004 __ bind(&unordered); |
7004 __ bind(&generic_stub); | 7005 __ bind(&generic_stub); |
7005 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, | 7006 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, |
7006 CompareIC::GENERIC); | 7007 CompareIC::GENERIC); |
7007 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 7008 __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
7008 | 7009 |
7009 __ bind(&maybe_undefined1); | 7010 __ bind(&maybe_undefined1); |
7010 if (Token::IsOrderedRelationalCompareOp(op_)) { | 7011 if (Token::IsOrderedRelationalCompareOp(op_)) { |
7011 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); | 7012 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); |
7012 __ b(ne, &miss); | 7013 __ b(ne, &miss); |
7013 __ JumpIfSmi(r1, &unordered); | 7014 __ JumpIfSmi(r1, &unordered); |
7014 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); | 7015 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); |
7015 __ b(ne, &maybe_undefined2); | 7016 __ b(ne, &maybe_undefined2); |
7016 __ jmp(&unordered); | 7017 __ jmp(&unordered); |
7017 } | 7018 } |
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7215 | 7216 |
7216 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, | 7217 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, |
7217 ExternalReference function) { | 7218 ExternalReference function) { |
7218 __ mov(r2, Operand(function)); | 7219 __ mov(r2, Operand(function)); |
7219 GenerateCall(masm, r2); | 7220 GenerateCall(masm, r2); |
7220 } | 7221 } |
7221 | 7222 |
7222 | 7223 |
7223 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, | 7224 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, |
7224 Register target) { | 7225 Register target) { |
7225 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()), | 7226 intptr_t code = |
7226 RelocInfo::CODE_TARGET)); | 7227 reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); |
| 7228 __ mov(lr, Operand(code, RelocInfo::CODE_TARGET)); |
7227 | 7229 |
7228 // Prevent literal pool emission during calculation of return address. | 7230 // Prevent literal pool emission during calculation of return address. |
7229 Assembler::BlockConstPoolScope block_const_pool(masm); | 7231 Assembler::BlockConstPoolScope block_const_pool(masm); |
7230 | 7232 |
7231 // Push return address (accessible to GC through exit frame pc). | 7233 // Push return address (accessible to GC through exit frame pc). |
7232 // Note that using pc with str is deprecated. | 7234 // Note that using pc with str is deprecated. |
7233 Label start; | 7235 Label start; |
7234 __ bind(&start); | 7236 __ bind(&start); |
7235 __ add(ip, pc, Operand(Assembler::kInstrSize)); | 7237 __ add(ip, pc, Operand(Assembler::kInstrSize)); |
7236 __ str(ip, MemOperand(sp, 0)); | 7238 __ str(ip, MemOperand(sp, 0)); |
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7549 } | 7551 } |
7550 return false; | 7552 return false; |
7551 } | 7553 } |
7552 | 7554 |
7553 | 7555 |
7554 bool StoreBufferOverflowStub::IsPregenerated() { | 7556 bool StoreBufferOverflowStub::IsPregenerated() { |
7555 return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated(); | 7557 return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated(); |
7556 } | 7558 } |
7557 | 7559 |
7558 | 7560 |
7559 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() { | 7561 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 7562 Isolate* isolate) { |
7560 StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 7563 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
7561 stub1.GetCode()->set_is_pregenerated(true); | 7564 stub1.GetCode(isolate)->set_is_pregenerated(true); |
7562 } | 7565 } |
7563 | 7566 |
7564 | 7567 |
7565 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() { | 7568 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) { |
7566 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 7569 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
7567 !entry->object.is(no_reg); | 7570 !entry->object.is(no_reg); |
7568 entry++) { | 7571 entry++) { |
7569 RecordWriteStub stub(entry->object, | 7572 RecordWriteStub stub(entry->object, |
7570 entry->value, | 7573 entry->value, |
7571 entry->address, | 7574 entry->address, |
7572 entry->action, | 7575 entry->action, |
7573 kDontSaveFPRegs); | 7576 kDontSaveFPRegs); |
7574 stub.GetCode()->set_is_pregenerated(true); | 7577 stub.GetCode(isolate)->set_is_pregenerated(true); |
7575 } | 7578 } |
7576 } | 7579 } |
7577 | 7580 |
7578 | 7581 |
7579 bool CodeStub::CanUseFPRegisters() { | 7582 bool CodeStub::CanUseFPRegisters() { |
7580 return CpuFeatures::IsSupported(VFP2); | 7583 return CpuFeatures::IsSupported(VFP2); |
7581 } | 7584 } |
7582 | 7585 |
7583 | 7586 |
7584 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 7587 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7839 r5, r6, r7, r9, r2, | 7842 r5, r6, r7, r9, r2, |
7840 &slow_elements); | 7843 &slow_elements); |
7841 __ Ret(); | 7844 __ Ret(); |
7842 } | 7845 } |
7843 | 7846 |
7844 | 7847 |
7845 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 7848 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
7846 ASSERT(!Serializer::enabled()); | 7849 ASSERT(!Serializer::enabled()); |
7847 bool save_fp_regs = CpuFeatures::IsSupported(VFP2); | 7850 bool save_fp_regs = CpuFeatures::IsSupported(VFP2); |
7848 CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs); | 7851 CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs); |
7849 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); | 7852 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
7850 int parameter_count_offset = | 7853 int parameter_count_offset = |
7851 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 7854 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
7852 __ ldr(r1, MemOperand(fp, parameter_count_offset)); | 7855 __ ldr(r1, MemOperand(fp, parameter_count_offset)); |
7853 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 7856 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
7854 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); | 7857 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); |
7855 __ add(sp, sp, r1); | 7858 __ add(sp, sp, r1); |
7856 __ Ret(); | 7859 __ Ret(); |
7857 } | 7860 } |
7858 | 7861 |
7859 | 7862 |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7914 | 7917 |
7915 __ Pop(lr, r5, r1); | 7918 __ Pop(lr, r5, r1); |
7916 __ Ret(); | 7919 __ Ret(); |
7917 } | 7920 } |
7918 | 7921 |
7919 #undef __ | 7922 #undef __ |
7920 | 7923 |
7921 } } // namespace v8::internal | 7924 } } // namespace v8::internal |
7922 | 7925 |
7923 #endif // V8_TARGET_ARCH_ARM | 7926 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |