OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 3242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3253 __ lw(a1, MemOperand(fp, parameter_count_offset)); | 3253 __ lw(a1, MemOperand(fp, parameter_count_offset)); |
3254 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 3254 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
3255 __ Addu(a1, a1, Operand(1)); | 3255 __ Addu(a1, a1, Operand(1)); |
3256 } | 3256 } |
3257 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 3257 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
3258 __ sll(a1, a1, kPointerSizeLog2); | 3258 __ sll(a1, a1, kPointerSizeLog2); |
3259 __ Ret(USE_DELAY_SLOT); | 3259 __ Ret(USE_DELAY_SLOT); |
3260 __ Addu(sp, sp, a1); | 3260 __ Addu(sp, sp, a1); |
3261 } | 3261 } |
3262 | 3262 |
3263 | |
3264 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | |
3265 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | |
3266 KeyedLoadICStub stub(isolate()); | |
3267 stub.GenerateForTrampoline(masm); | |
3268 } | |
3269 | |
3270 | |
3271 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3263 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
3272 __ EmitLoadTypeFeedbackVector(a2); | 3264 __ EmitLoadTypeFeedbackVector(a2); |
3273 CallICStub stub(isolate(), state()); | 3265 CallICStub stub(isolate(), state()); |
3274 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3266 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
3275 } | 3267 } |
3276 | 3268 |
3277 | 3269 |
3278 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 3270 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
3279 Register receiver_map, Register scratch1, | 3271 Register receiver_map, Register scratch1, |
3280 Register scratch2, bool is_polymorphic, | 3272 Register scratch2, bool is_polymorphic, |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3354 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); | 3346 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); |
3355 Register handler = feedback; | 3347 Register handler = feedback; |
3356 | 3348 |
3357 __ Lsa(handler, vector, slot, kPointerSizeLog2 - kSmiTagSize); | 3349 __ Lsa(handler, vector, slot, kPointerSizeLog2 - kSmiTagSize); |
3358 __ lw(handler, | 3350 __ lw(handler, |
3359 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 3351 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
3360 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3352 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
3361 __ Jump(t9); | 3353 __ Jump(t9); |
3362 } | 3354 } |
3363 | 3355 |
3364 | |
3365 void KeyedLoadICStub::Generate(MacroAssembler* masm) { | |
3366 GenerateImpl(masm, false); | |
3367 } | |
3368 | |
3369 | |
3370 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3371 GenerateImpl(masm, true); | |
3372 } | |
3373 | |
3374 | |
3375 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3376 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1 | |
3377 Register key = LoadWithVectorDescriptor::NameRegister(); // a2 | |
3378 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3 | |
3379 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0 | |
3380 Register feedback = t0; | |
3381 Register receiver_map = t1; | |
3382 Register scratch1 = t4; | |
3383 | |
3384 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize); | |
3385 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3386 | |
3387 // Try to quickly handle the monomorphic case without knowing for sure | |
3388 // if we have a weak cell in feedback. We do know it's safe to look | |
3389 // at WeakCell::kValueOffset. | |
3390 Label try_array, load_smi_map, compare_map; | |
3391 Label not_array, miss; | |
3392 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3393 scratch1, &compare_map, &load_smi_map, &try_array); | |
3394 | |
3395 __ bind(&try_array); | |
3396 // Is it a fixed array? | |
3397 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3398 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | |
3399 __ Branch(¬_array, ne, at, Operand(scratch1)); | |
3400 // We have a polymorphic element handler. | |
3401 __ JumpIfNotSmi(key, &miss); | |
3402 | |
3403 Label polymorphic, try_poly_name; | |
3404 __ bind(&polymorphic); | |
3405 HandleArrayCases(masm, feedback, receiver_map, scratch1, t5, true, &miss); | |
3406 | |
3407 __ bind(¬_array); | |
3408 // Is it generic? | |
3409 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
3410 __ Branch(&try_poly_name, ne, at, Operand(feedback)); | |
3411 Handle<Code> megamorphic_stub = | |
3412 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
3413 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
3414 | |
3415 __ bind(&try_poly_name); | |
3416 // We might have a name in feedback, and a fixed array in the next slot. | |
3417 __ Branch(&miss, ne, key, Operand(feedback)); | |
3418 // If the name comparison succeeded, we know we have a fixed array with | |
3419 // at least one map/handler pair. | |
3420 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize); | |
3421 __ lw(feedback, | |
3422 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
3423 HandleArrayCases(masm, feedback, receiver_map, scratch1, t5, false, &miss); | |
3424 | |
3425 __ bind(&miss); | |
3426 KeyedLoadIC::GenerateMiss(masm); | |
3427 | |
3428 __ bind(&load_smi_map); | |
3429 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
3430 __ jmp(&compare_map); | |
3431 } | |
3432 | |
3433 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3356 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
3434 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | 3357 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); |
3435 KeyedStoreICStub stub(isolate(), state()); | 3358 KeyedStoreICStub stub(isolate(), state()); |
3436 stub.GenerateForTrampoline(masm); | 3359 stub.GenerateForTrampoline(masm); |
3437 } | 3360 } |
3438 | 3361 |
3439 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | 3362 void KeyedStoreICStub::Generate(MacroAssembler* masm) { |
3440 GenerateImpl(masm, false); | 3363 GenerateImpl(masm, false); |
3441 } | 3364 } |
3442 | 3365 |
(...skipping 1400 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4843 kStackUnwindSpace, kInvalidStackOffset, | 4766 kStackUnwindSpace, kInvalidStackOffset, |
4844 return_value_operand, NULL); | 4767 return_value_operand, NULL); |
4845 } | 4768 } |
4846 | 4769 |
4847 #undef __ | 4770 #undef __ |
4848 | 4771 |
4849 } // namespace internal | 4772 } // namespace internal |
4850 } // namespace v8 | 4773 } // namespace v8 |
4851 | 4774 |
4852 #endif // V8_TARGET_ARCH_MIPS | 4775 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |