OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 3433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3444 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, false, &miss); | 3444 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, false, &miss); |
3445 | 3445 |
3446 __ bind(&miss); | 3446 __ bind(&miss); |
3447 KeyedLoadIC::GenerateMiss(masm); | 3447 KeyedLoadIC::GenerateMiss(masm); |
3448 | 3448 |
3449 __ bind(&load_smi_map); | 3449 __ bind(&load_smi_map); |
3450 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 3450 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
3451 __ Branch(&compare_map); | 3451 __ Branch(&compare_map); |
3452 } | 3452 } |
3453 | 3453 |
3454 void StoreICTrampolineStub::Generate(MacroAssembler* masm) { | |
3455 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | |
3456 StoreICStub stub(isolate(), state()); | |
3457 stub.GenerateForTrampoline(masm); | |
3458 } | |
3459 | |
3460 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3454 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
3461 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | 3455 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); |
3462 KeyedStoreICStub stub(isolate(), state()); | 3456 KeyedStoreICStub stub(isolate(), state()); |
3463 stub.GenerateForTrampoline(masm); | 3457 stub.GenerateForTrampoline(masm); |
3464 } | 3458 } |
3465 | 3459 |
3466 void StoreICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | |
3467 | |
3468 void StoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3469 GenerateImpl(masm, true); | |
3470 } | |
3471 | |
3472 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3473 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // a1 | |
3474 Register key = StoreWithVectorDescriptor::NameRegister(); // a2 | |
3475 Register vector = StoreWithVectorDescriptor::VectorRegister(); // a3 | |
3476 Register slot = StoreWithVectorDescriptor::SlotRegister(); // a4 | |
3477 DCHECK(StoreWithVectorDescriptor::ValueRegister().is(a0)); // a0 | |
3478 Register feedback = a5; | |
3479 Register receiver_map = a6; | |
3480 Register scratch1 = a7; | |
3481 | |
3482 __ SmiScale(scratch1, slot, kPointerSizeLog2); | |
3483 __ Daddu(feedback, vector, Operand(scratch1)); | |
3484 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3485 | |
3486 // Try to quickly handle the monomorphic case without knowing for sure | |
3487 // if we have a weak cell in feedback. We do know it's safe to look | |
3488 // at WeakCell::kValueOffset. | |
3489 Label try_array, load_smi_map, compare_map; | |
3490 Label not_array, miss; | |
3491 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3492 scratch1, &compare_map, &load_smi_map, &try_array); | |
3493 | |
3494 // Is it a fixed array? | |
3495 __ bind(&try_array); | |
3496 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3497 __ Branch(¬_array, ne, scratch1, Heap::kFixedArrayMapRootIndex); | |
3498 | |
3499 Register scratch2 = t0; | |
3500 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true, | |
3501 &miss); | |
3502 | |
3503 __ bind(¬_array); | |
3504 __ Branch(&miss, ne, feedback, Heap::kmegamorphic_symbolRootIndex); | |
3505 masm->isolate()->store_stub_cache()->GenerateProbe( | |
3506 masm, receiver, key, feedback, receiver_map, scratch1, scratch2); | |
3507 | |
3508 __ bind(&miss); | |
3509 StoreIC::GenerateMiss(masm); | |
3510 | |
3511 __ bind(&load_smi_map); | |
3512 __ Branch(USE_DELAY_SLOT, &compare_map); | |
3513 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. | |
3514 } | |
3515 | |
3516 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | 3460 void KeyedStoreICStub::Generate(MacroAssembler* masm) { |
3517 GenerateImpl(masm, false); | 3461 GenerateImpl(masm, false); |
3518 } | 3462 } |
3519 | 3463 |
3520 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 3464 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { |
3521 GenerateImpl(masm, true); | 3465 GenerateImpl(masm, true); |
3522 } | 3466 } |
3523 | 3467 |
3524 | 3468 |
3525 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, | 3469 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, |
(...skipping 1553 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5079 kStackUnwindSpace, kInvalidStackOffset, | 5023 kStackUnwindSpace, kInvalidStackOffset, |
5080 return_value_operand, NULL); | 5024 return_value_operand, NULL); |
5081 } | 5025 } |
5082 | 5026 |
5083 #undef __ | 5027 #undef __ |
5084 | 5028 |
5085 } // namespace internal | 5029 } // namespace internal |
5086 } // namespace v8 | 5030 } // namespace v8 |
5087 | 5031 |
5088 #endif // V8_TARGET_ARCH_MIPS64 | 5032 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |