| Index: src/arm64/code-stubs-arm64.cc
|
| diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
|
| index f9290a7850406418f41750503499d6a13546509b..c0d700ce0d8c4ae4bcb9082a74a126df52e527e7 100644
|
| --- a/src/arm64/code-stubs-arm64.cc
|
| +++ b/src/arm64/code-stubs-arm64.cc
|
| @@ -2974,14 +2974,6 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
| __ Ret();
|
| }
|
|
|
| -
|
| -void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
|
| - __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
|
| - KeyedLoadICStub stub(isolate());
|
| - stub.GenerateForTrampoline(masm);
|
| -}
|
| -
|
| -
|
| void CallICTrampolineStub::Generate(MacroAssembler* masm) {
|
| __ EmitLoadTypeFeedbackVector(x2);
|
| CallICStub stub(isolate(), state());
|
| @@ -3079,74 +3071,6 @@ static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
|
| __ Jump(handler);
|
| }
|
|
|
| -
|
| -void KeyedLoadICStub::Generate(MacroAssembler* masm) {
|
| - GenerateImpl(masm, false);
|
| -}
|
| -
|
| -
|
| -void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
|
| - GenerateImpl(masm, true);
|
| -}
|
| -
|
| -
|
| -void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
|
| - Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // x1
|
| - Register key = LoadWithVectorDescriptor::NameRegister(); // x2
|
| - Register vector = LoadWithVectorDescriptor::VectorRegister(); // x3
|
| - Register slot = LoadWithVectorDescriptor::SlotRegister(); // x0
|
| - Register feedback = x4;
|
| - Register receiver_map = x5;
|
| - Register scratch1 = x6;
|
| -
|
| - __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
|
| - __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
|
| -
|
| - // Try to quickly handle the monomorphic case without knowing for sure
|
| - // if we have a weak cell in feedback. We do know it's safe to look
|
| - // at WeakCell::kValueOffset.
|
| - Label try_array, load_smi_map, compare_map;
|
| - Label not_array, miss;
|
| - HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
|
| - scratch1, &compare_map, &load_smi_map, &try_array);
|
| -
|
| - __ Bind(&try_array);
|
| - // Is it a fixed array?
|
| - __ Ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
|
| - __ JumpIfNotRoot(scratch1, Heap::kFixedArrayMapRootIndex, ¬_array);
|
| -
|
| - // We have a polymorphic element handler.
|
| - Label polymorphic, try_poly_name;
|
| - __ Bind(&polymorphic);
|
| - HandleArrayCases(masm, feedback, receiver_map, scratch1, x7, true, &miss);
|
| -
|
| - __ Bind(¬_array);
|
| - // Is it generic?
|
| - __ JumpIfNotRoot(feedback, Heap::kmegamorphic_symbolRootIndex,
|
| - &try_poly_name);
|
| - Handle<Code> megamorphic_stub =
|
| - KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
|
| - __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
|
| -
|
| - __ Bind(&try_poly_name);
|
| - // We might have a name in feedback, and a fixed array in the next slot.
|
| - __ Cmp(key, feedback);
|
| - __ B(ne, &miss);
|
| - // If the name comparison succeeded, we know we have a fixed array with
|
| - // at least one map/handler pair.
|
| - __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
|
| - __ Ldr(feedback,
|
| - FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
|
| - HandleArrayCases(masm, feedback, receiver_map, scratch1, x7, false, &miss);
|
| -
|
| - __ Bind(&miss);
|
| - KeyedLoadIC::GenerateMiss(masm);
|
| -
|
| - __ Bind(&load_smi_map);
|
| - __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
|
| - __ jmp(&compare_map);
|
| -}
|
| -
|
| void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
|
| __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
|
| KeyedStoreICStub stub(isolate(), state());
|
|
|