OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 3259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3270 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 3270 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
3271 __ Addu(a1, a1, Operand(1)); | 3271 __ Addu(a1, a1, Operand(1)); |
3272 } | 3272 } |
3273 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 3273 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
3274 __ sll(a1, a1, kPointerSizeLog2); | 3274 __ sll(a1, a1, kPointerSizeLog2); |
3275 __ Ret(USE_DELAY_SLOT); | 3275 __ Ret(USE_DELAY_SLOT); |
3276 __ Addu(sp, sp, a1); | 3276 __ Addu(sp, sp, a1); |
3277 } | 3277 } |
3278 | 3278 |
3279 | 3279 |
3280 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | |
3281 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | |
3282 LoadICStub stub(isolate()); | |
3283 stub.GenerateForTrampoline(masm); | |
3284 } | |
3285 | |
3286 | |
3287 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 3280 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
3288 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | 3281 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); |
3289 KeyedLoadICStub stub(isolate()); | 3282 KeyedLoadICStub stub(isolate()); |
3290 stub.GenerateForTrampoline(masm); | 3283 stub.GenerateForTrampoline(masm); |
3291 } | 3284 } |
3292 | 3285 |
3293 | 3286 |
3294 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3287 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
3295 __ EmitLoadTypeFeedbackVector(a2); | 3288 __ EmitLoadTypeFeedbackVector(a2); |
3296 CallICStub stub(isolate(), state()); | 3289 CallICStub stub(isolate(), state()); |
3297 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3290 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
3298 } | 3291 } |
3299 | 3292 |
3300 | 3293 |
3301 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | |
3302 | |
3303 | |
3304 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3305 GenerateImpl(masm, true); | |
3306 } | |
3307 | |
3308 | |
3309 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 3294 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
3310 Register receiver_map, Register scratch1, | 3295 Register receiver_map, Register scratch1, |
3311 Register scratch2, bool is_polymorphic, | 3296 Register scratch2, bool is_polymorphic, |
3312 Label* miss) { | 3297 Label* miss) { |
3313 // feedback initially contains the feedback array | 3298 // feedback initially contains the feedback array |
3314 Label next_loop, prepare_next; | 3299 Label next_loop, prepare_next; |
3315 Label start_polymorphic; | 3300 Label start_polymorphic; |
3316 | 3301 |
3317 Register cached_map = scratch1; | 3302 Register cached_map = scratch1; |
3318 | 3303 |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3386 Register handler = feedback; | 3371 Register handler = feedback; |
3387 | 3372 |
3388 __ Lsa(handler, vector, slot, kPointerSizeLog2 - kSmiTagSize); | 3373 __ Lsa(handler, vector, slot, kPointerSizeLog2 - kSmiTagSize); |
3389 __ lw(handler, | 3374 __ lw(handler, |
3390 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 3375 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
3391 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3376 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
3392 __ Jump(t9); | 3377 __ Jump(t9); |
3393 } | 3378 } |
3394 | 3379 |
3395 | 3380 |
3396 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3397 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1 | |
3398 Register name = LoadWithVectorDescriptor::NameRegister(); // a2 | |
3399 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3 | |
3400 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0 | |
3401 Register feedback = t0; | |
3402 Register receiver_map = t1; | |
3403 Register scratch1 = t4; | |
3404 | |
3405 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize); | |
3406 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3407 | |
3408 // Try to quickly handle the monomorphic case without knowing for sure | |
3409 // if we have a weak cell in feedback. We do know it's safe to look | |
3410 // at WeakCell::kValueOffset. | |
3411 Label try_array, load_smi_map, compare_map; | |
3412 Label not_array, miss; | |
3413 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3414 scratch1, &compare_map, &load_smi_map, &try_array); | |
3415 | |
3416 // Is it a fixed array? | |
3417 __ bind(&try_array); | |
3418 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3419 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | |
3420 __ Branch(¬_array, ne, at, Operand(scratch1)); | |
3421 HandleArrayCases(masm, feedback, receiver_map, scratch1, t5, true, &miss); | |
3422 | |
3423 __ bind(¬_array); | |
3424 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
3425 __ Branch(&miss, ne, at, Operand(feedback)); | |
3426 masm->isolate()->load_stub_cache()->GenerateProbe( | |
3427 masm, receiver, name, feedback, receiver_map, scratch1, t5); | |
3428 | |
3429 __ bind(&miss); | |
3430 LoadIC::GenerateMiss(masm); | |
3431 | |
3432 __ bind(&load_smi_map); | |
3433 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
3434 __ jmp(&compare_map); | |
3435 } | |
3436 | |
3437 | |
3438 void KeyedLoadICStub::Generate(MacroAssembler* masm) { | 3381 void KeyedLoadICStub::Generate(MacroAssembler* masm) { |
3439 GenerateImpl(masm, false); | 3382 GenerateImpl(masm, false); |
3440 } | 3383 } |
3441 | 3384 |
3442 | 3385 |
3443 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | 3386 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
3444 GenerateImpl(masm, true); | 3387 GenerateImpl(masm, true); |
3445 } | 3388 } |
3446 | 3389 |
3447 | 3390 |
(...skipping 1662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5110 kStackUnwindSpace, kInvalidStackOffset, | 5053 kStackUnwindSpace, kInvalidStackOffset, |
5111 return_value_operand, NULL); | 5054 return_value_operand, NULL); |
5112 } | 5055 } |
5113 | 5056 |
5114 #undef __ | 5057 #undef __ |
5115 | 5058 |
5116 } // namespace internal | 5059 } // namespace internal |
5117 } // namespace v8 | 5060 } // namespace v8 |
5118 | 5061 |
5119 #endif // V8_TARGET_ARCH_MIPS | 5062 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |