OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 3262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3273 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 3273 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
3274 __ Daddu(a1, a1, Operand(1)); | 3274 __ Daddu(a1, a1, Operand(1)); |
3275 } | 3275 } |
3276 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 3276 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
3277 __ dsll(a1, a1, kPointerSizeLog2); | 3277 __ dsll(a1, a1, kPointerSizeLog2); |
3278 __ Ret(USE_DELAY_SLOT); | 3278 __ Ret(USE_DELAY_SLOT); |
3279 __ Daddu(sp, sp, a1); | 3279 __ Daddu(sp, sp, a1); |
3280 } | 3280 } |
3281 | 3281 |
3282 | 3282 |
3283 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | |
3284 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | |
3285 LoadICStub stub(isolate()); | |
3286 stub.GenerateForTrampoline(masm); | |
3287 } | |
3288 | |
3289 | |
3290 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 3283 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
3291 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | 3284 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); |
3292 KeyedLoadICStub stub(isolate()); | 3285 KeyedLoadICStub stub(isolate()); |
3293 stub.GenerateForTrampoline(masm); | 3286 stub.GenerateForTrampoline(masm); |
3294 } | 3287 } |
3295 | 3288 |
3296 | 3289 |
3297 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3290 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
3298 __ EmitLoadTypeFeedbackVector(a2); | 3291 __ EmitLoadTypeFeedbackVector(a2); |
3299 CallICStub stub(isolate(), state()); | 3292 CallICStub stub(isolate(), state()); |
3300 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3293 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
3301 } | 3294 } |
3302 | 3295 |
3303 | 3296 |
3304 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | |
3305 | |
3306 | |
3307 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3308 GenerateImpl(masm, true); | |
3309 } | |
3310 | |
3311 | |
3312 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 3297 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
3313 Register receiver_map, Register scratch1, | 3298 Register receiver_map, Register scratch1, |
3314 Register scratch2, bool is_polymorphic, | 3299 Register scratch2, bool is_polymorphic, |
3315 Label* miss) { | 3300 Label* miss) { |
3316 // feedback initially contains the feedback array | 3301 // feedback initially contains the feedback array |
3317 Label next_loop, prepare_next; | 3302 Label next_loop, prepare_next; |
3318 Label start_polymorphic; | 3303 Label start_polymorphic; |
3319 | 3304 |
3320 Register cached_map = scratch1; | 3305 Register cached_map = scratch1; |
3321 | 3306 |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3389 Register handler = feedback; | 3374 Register handler = feedback; |
3390 __ SmiScale(handler, slot, kPointerSizeLog2); | 3375 __ SmiScale(handler, slot, kPointerSizeLog2); |
3391 __ Daddu(handler, vector, Operand(handler)); | 3376 __ Daddu(handler, vector, Operand(handler)); |
3392 __ ld(handler, | 3377 __ ld(handler, |
3393 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 3378 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
3394 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | 3379 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); |
3395 __ Jump(t9); | 3380 __ Jump(t9); |
3396 } | 3381 } |
3397 | 3382 |
3398 | 3383 |
3399 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3400 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1 | |
3401 Register name = LoadWithVectorDescriptor::NameRegister(); // a2 | |
3402 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3 | |
3403 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0 | |
3404 Register feedback = a4; | |
3405 Register receiver_map = a5; | |
3406 Register scratch1 = a6; | |
3407 | |
3408 __ SmiScale(feedback, slot, kPointerSizeLog2); | |
3409 __ Daddu(feedback, vector, Operand(feedback)); | |
3410 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3411 | |
3412 // Try to quickly handle the monomorphic case without knowing for sure | |
3413 // if we have a weak cell in feedback. We do know it's safe to look | |
3414 // at WeakCell::kValueOffset. | |
3415 Label try_array, load_smi_map, compare_map; | |
3416 Label not_array, miss; | |
3417 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3418 scratch1, &compare_map, &load_smi_map, &try_array); | |
3419 | |
3420 // Is it a fixed array? | |
3421 __ bind(&try_array); | |
3422 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3423 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); | |
3424 __ Branch(¬_array, ne, scratch1, Operand(at)); | |
3425 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, true, &miss); | |
3426 | |
3427 __ bind(¬_array); | |
3428 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | |
3429 __ Branch(&miss, ne, feedback, Operand(at)); | |
3430 masm->isolate()->load_stub_cache()->GenerateProbe( | |
3431 masm, receiver, name, feedback, receiver_map, scratch1, a7); | |
3432 | |
3433 __ bind(&miss); | |
3434 LoadIC::GenerateMiss(masm); | |
3435 | |
3436 __ bind(&load_smi_map); | |
3437 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
3438 __ Branch(&compare_map); | |
3439 } | |
3440 | |
3441 | |
3442 void KeyedLoadICStub::Generate(MacroAssembler* masm) { | 3384 void KeyedLoadICStub::Generate(MacroAssembler* masm) { |
3443 GenerateImpl(masm, false); | 3385 GenerateImpl(masm, false); |
3444 } | 3386 } |
3445 | 3387 |
3446 | 3388 |
3447 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | 3389 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
3448 GenerateImpl(masm, true); | 3390 GenerateImpl(masm, true); |
3449 } | 3391 } |
3450 | 3392 |
3451 | 3393 |
(...skipping 1685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5137 kStackUnwindSpace, kInvalidStackOffset, | 5079 kStackUnwindSpace, kInvalidStackOffset, |
5138 return_value_operand, NULL); | 5080 return_value_operand, NULL); |
5139 } | 5081 } |
5140 | 5082 |
5141 #undef __ | 5083 #undef __ |
5142 | 5084 |
5143 } // namespace internal | 5085 } // namespace internal |
5144 } // namespace v8 | 5086 } // namespace v8 |
5145 | 5087 |
5146 #endif // V8_TARGET_ARCH_MIPS64 | 5088 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |