| OLD | NEW | 
|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #if V8_TARGET_ARCH_S390 | 5 #if V8_TARGET_ARCH_S390 | 
| 6 | 6 | 
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" | 
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" | 
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" | 
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" | 
| (...skipping 3651 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3662 | 3662 | 
| 3663   __ bind(&miss); | 3663   __ bind(&miss); | 
| 3664   KeyedLoadIC::GenerateMiss(masm); | 3664   KeyedLoadIC::GenerateMiss(masm); | 
| 3665 | 3665 | 
| 3666   __ bind(&load_smi_map); | 3666   __ bind(&load_smi_map); | 
| 3667   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 3667   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 
| 3668   __ b(&compare_map); | 3668   __ b(&compare_map); | 
| 3669 } | 3669 } | 
| 3670 | 3670 | 
| 3671 void StoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3671 void StoreICTrampolineStub::Generate(MacroAssembler* masm) { | 
| 3672   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); | 3672   __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | 
| 3673   StoreICStub stub(isolate(), state()); | 3673   StoreICStub stub(isolate(), state()); | 
| 3674   stub.GenerateForTrampoline(masm); | 3674   stub.GenerateForTrampoline(masm); | 
| 3675 } | 3675 } | 
| 3676 | 3676 | 
| 3677 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3677 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 
| 3678   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); | 3678   __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | 
| 3679   KeyedStoreICStub stub(isolate(), state()); | 3679   KeyedStoreICStub stub(isolate(), state()); | 
| 3680   stub.GenerateForTrampoline(masm); | 3680   stub.GenerateForTrampoline(masm); | 
| 3681 } | 3681 } | 
| 3682 | 3682 | 
| 3683 void StoreICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | 3683 void StoreICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | 
| 3684 | 3684 | 
| 3685 void StoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 3685 void StoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | 
| 3686   GenerateImpl(masm, true); | 3686   GenerateImpl(masm, true); | 
| 3687 } | 3687 } | 
| 3688 | 3688 | 
| 3689 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 3689 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 
| 3690   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // r3 | 3690   Register receiver = StoreWithVectorDescriptor::ReceiverRegister();  // r3 | 
| 3691   Register key = VectorStoreICDescriptor::NameRegister();           // r4 | 3691   Register key = StoreWithVectorDescriptor::NameRegister();           // r4 | 
| 3692   Register vector = VectorStoreICDescriptor::VectorRegister();      // r5 | 3692   Register vector = StoreWithVectorDescriptor::VectorRegister();      // r5 | 
| 3693   Register slot = VectorStoreICDescriptor::SlotRegister();          // r6 | 3693   Register slot = StoreWithVectorDescriptor::SlotRegister();          // r6 | 
| 3694   DCHECK(VectorStoreICDescriptor::ValueRegister().is(r2));          // r2 | 3694   DCHECK(StoreWithVectorDescriptor::ValueRegister().is(r2));          // r2 | 
| 3695   Register feedback = r7; | 3695   Register feedback = r7; | 
| 3696   Register receiver_map = r8; | 3696   Register receiver_map = r8; | 
| 3697   Register scratch1 = r9; | 3697   Register scratch1 = r9; | 
| 3698 | 3698 | 
| 3699   __ SmiToPtrArrayOffset(r0, slot); | 3699   __ SmiToPtrArrayOffset(r0, slot); | 
| 3700   __ AddP(feedback, vector, r0); | 3700   __ AddP(feedback, vector, r0); | 
| 3701   __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 3701   __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 
| 3702 | 3702 | 
| 3703   // Try to quickly handle the monomorphic case without knowing for sure | 3703   // Try to quickly handle the monomorphic case without knowing for sure | 
| 3704   // if we have a weak cell in feedback. We do know it's safe to look | 3704   // if we have a weak cell in feedback. We do know it's safe to look | 
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3798   __ bind(&prepare_next); | 3798   __ bind(&prepare_next); | 
| 3799   __ AddP(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); | 3799   __ AddP(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); | 
| 3800   __ CmpLogicalP(pointer_reg, too_far); | 3800   __ CmpLogicalP(pointer_reg, too_far); | 
| 3801   __ blt(&next_loop); | 3801   __ blt(&next_loop); | 
| 3802 | 3802 | 
| 3803   // We exhausted our array of map handler pairs. | 3803   // We exhausted our array of map handler pairs. | 
| 3804   __ b(miss); | 3804   __ b(miss); | 
| 3805 } | 3805 } | 
| 3806 | 3806 | 
| 3807 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 3807 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 
| 3808   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // r3 | 3808   Register receiver = StoreWithVectorDescriptor::ReceiverRegister();  // r3 | 
| 3809   Register key = VectorStoreICDescriptor::NameRegister();           // r4 | 3809   Register key = StoreWithVectorDescriptor::NameRegister();           // r4 | 
| 3810   Register vector = VectorStoreICDescriptor::VectorRegister();      // r5 | 3810   Register vector = StoreWithVectorDescriptor::VectorRegister();      // r5 | 
| 3811   Register slot = VectorStoreICDescriptor::SlotRegister();          // r6 | 3811   Register slot = StoreWithVectorDescriptor::SlotRegister();          // r6 | 
| 3812   DCHECK(VectorStoreICDescriptor::ValueRegister().is(r2));          // r2 | 3812   DCHECK(StoreWithVectorDescriptor::ValueRegister().is(r2));          // r2 | 
| 3813   Register feedback = r7; | 3813   Register feedback = r7; | 
| 3814   Register receiver_map = r8; | 3814   Register receiver_map = r8; | 
| 3815   Register scratch1 = r9; | 3815   Register scratch1 = r9; | 
| 3816 | 3816 | 
| 3817   __ SmiToPtrArrayOffset(r0, slot); | 3817   __ SmiToPtrArrayOffset(r0, slot); | 
| 3818   __ AddP(feedback, vector, r0); | 3818   __ AddP(feedback, vector, r0); | 
| 3819   __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 3819   __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 
| 3820 | 3820 | 
| 3821   // Try to quickly handle the monomorphic case without knowing for sure | 3821   // Try to quickly handle the monomorphic case without knowing for sure | 
| 3822   // if we have a weak cell in feedback. We do know it's safe to look | 3822   // if we have a weak cell in feedback. We do know it's safe to look | 
| (...skipping 1568 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5391   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5391   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 
| 5392                            kStackUnwindSpace, NULL, return_value_operand, NULL); | 5392                            kStackUnwindSpace, NULL, return_value_operand, NULL); | 
| 5393 } | 5393 } | 
| 5394 | 5394 | 
| 5395 #undef __ | 5395 #undef __ | 
| 5396 | 5396 | 
| 5397 }  // namespace internal | 5397 }  // namespace internal | 
| 5398 }  // namespace v8 | 5398 }  // namespace v8 | 
| 5399 | 5399 | 
| 5400 #endif  // V8_TARGET_ARCH_S390 | 5400 #endif  // V8_TARGET_ARCH_S390 | 
| OLD | NEW | 
|---|