OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 3251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3262 __ Ret(USE_DELAY_SLOT); | 3262 __ Ret(USE_DELAY_SLOT); |
3263 __ Daddu(sp, sp, a1); | 3263 __ Daddu(sp, sp, a1); |
3264 } | 3264 } |
3265 | 3265 |
3266 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3266 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
3267 __ EmitLoadTypeFeedbackVector(a2); | 3267 __ EmitLoadTypeFeedbackVector(a2); |
3268 CallICStub stub(isolate(), state()); | 3268 CallICStub stub(isolate(), state()); |
3269 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3269 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
3270 } | 3270 } |
3271 | 3271 |
3272 | |
3273 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | |
3274 Register receiver_map, Register scratch1, | |
3275 Register scratch2, bool is_polymorphic, | |
3276 Label* miss) { | |
3277 // feedback initially contains the feedback array | |
3278 Label next_loop, prepare_next; | |
3279 Label start_polymorphic; | |
3280 | |
3281 Register cached_map = scratch1; | |
3282 | |
3283 __ ld(cached_map, | |
3284 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); | |
3285 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
3286 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); | |
3287 // found, now call handler. | |
3288 Register handler = feedback; | |
3289 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); | |
3290 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3291 __ Jump(t9); | |
3292 | |
3293 Register length = scratch2; | |
3294 __ bind(&start_polymorphic); | |
3295 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
3296 if (!is_polymorphic) { | |
3297 // If the IC could be monomorphic we have to make sure we don't go past the | |
3298 // end of the feedback array. | |
3299 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); | |
3300 } | |
3301 | |
3302 Register too_far = length; | |
3303 Register pointer_reg = feedback; | |
3304 | |
3305 // +-----+------+------+-----+-----+ ... ----+ | |
3306 // | map | len | wm0 | h0 | wm1 | hN | | |
3307 // +-----+------+------+-----+-----+ ... ----+ | |
3308 // 0 1 2 len-1 | |
3309 // ^ ^ | |
3310 // | | | |
3311 // pointer_reg too_far | |
3312 // aka feedback scratch2 | |
3313 // also need receiver_map | |
3314 // use cached_map (scratch1) to look in the weak map values. | |
3315 __ SmiScale(too_far, length, kPointerSizeLog2); | |
3316 __ Daddu(too_far, feedback, Operand(too_far)); | |
3317 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
3318 __ Daddu(pointer_reg, feedback, | |
3319 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); | |
3320 | |
3321 __ bind(&next_loop); | |
3322 __ ld(cached_map, MemOperand(pointer_reg)); | |
3323 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
3324 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | |
3325 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); | |
3326 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3327 __ Jump(t9); | |
3328 | |
3329 __ bind(&prepare_next); | |
3330 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); | |
3331 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | |
3332 | |
3333 // We exhausted our array of map handler pairs. | |
3334 __ Branch(miss); | |
3335 } | |
3336 | |
3337 | |
3338 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, | |
3339 Register receiver_map, Register feedback, | |
3340 Register vector, Register slot, | |
3341 Register scratch, Label* compare_map, | |
3342 Label* load_smi_map, Label* try_array) { | |
3343 __ JumpIfSmi(receiver, load_smi_map); | |
3344 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
3345 __ bind(compare_map); | |
3346 Register cached_map = scratch; | |
3347 // Move the weak map into the weak_cell register. | |
3348 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); | |
3349 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); | |
3350 Register handler = feedback; | |
3351 __ SmiScale(handler, slot, kPointerSizeLog2); | |
3352 __ Daddu(handler, vector, Operand(handler)); | |
3353 __ ld(handler, | |
3354 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | |
3355 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); | |
3356 __ Jump(t9); | |
3357 } | |
3358 | |
3359 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | |
3360 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | |
3361 KeyedStoreICStub stub(isolate(), state()); | |
3362 stub.GenerateForTrampoline(masm); | |
3363 } | |
3364 | |
3365 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | |
3366 GenerateImpl(masm, false); | |
3367 } | |
3368 | |
3369 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3370 GenerateImpl(masm, true); | |
3371 } | |
3372 | |
3373 | |
3374 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, | |
3375 Register receiver_map, Register scratch1, | |
3376 Register scratch2, Label* miss) { | |
3377 // feedback initially contains the feedback array | |
3378 Label next_loop, prepare_next; | |
3379 Label start_polymorphic; | |
3380 Label transition_call; | |
3381 | |
3382 Register cached_map = scratch1; | |
3383 Register too_far = scratch2; | |
3384 Register pointer_reg = feedback; | |
3385 | |
3386 __ ld(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
3387 | |
3388 // +-----+------+------+-----+-----+-----+ ... ----+ | |
3389 // | map | len | wm0 | wt0 | h0 | wm1 | hN | | |
3390 // +-----+------+------+-----+-----+ ----+ ... ----+ | |
3391 // 0 1 2 len-1 | |
3392 // ^ ^ | |
3393 // | | | |
3394 // pointer_reg too_far | |
3395 // aka feedback scratch2 | |
3396 // also need receiver_map | |
3397 // use cached_map (scratch1) to look in the weak map values. | |
3398 __ SmiScale(too_far, too_far, kPointerSizeLog2); | |
3399 __ Daddu(too_far, feedback, Operand(too_far)); | |
3400 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
3401 __ Daddu(pointer_reg, feedback, | |
3402 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag)); | |
3403 | |
3404 __ bind(&next_loop); | |
3405 __ ld(cached_map, MemOperand(pointer_reg)); | |
3406 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
3407 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); | |
3408 // Is it a transitioning store? | |
3409 __ ld(too_far, MemOperand(pointer_reg, kPointerSize)); | |
3410 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
3411 __ Branch(&transition_call, ne, too_far, Operand(at)); | |
3412 | |
3413 __ ld(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); | |
3414 __ Daddu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3415 __ Jump(t9); | |
3416 | |
3417 __ bind(&transition_call); | |
3418 __ ld(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); | |
3419 __ JumpIfSmi(too_far, miss); | |
3420 | |
3421 __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); | |
3422 // Load the map into the correct register. | |
3423 DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister())); | |
3424 __ Move(feedback, too_far); | |
3425 __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3426 __ Jump(t9); | |
3427 | |
3428 __ bind(&prepare_next); | |
3429 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); | |
3430 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); | |
3431 | |
3432 // We exhausted our array of map handler pairs. | |
3433 __ Branch(miss); | |
3434 } | |
3435 | |
3436 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3437 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // a1 | |
3438 Register key = StoreWithVectorDescriptor::NameRegister(); // a2 | |
3439 Register vector = StoreWithVectorDescriptor::VectorRegister(); // a3 | |
3440 Register slot = StoreWithVectorDescriptor::SlotRegister(); // a4 | |
3441 DCHECK(StoreWithVectorDescriptor::ValueRegister().is(a0)); // a0 | |
3442 Register feedback = a5; | |
3443 Register receiver_map = a6; | |
3444 Register scratch1 = a7; | |
3445 | |
3446 __ SmiScale(scratch1, slot, kPointerSizeLog2); | |
3447 __ Daddu(feedback, vector, Operand(scratch1)); | |
3448 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3449 | |
3450 // Try to quickly handle the monomorphic case without knowing for sure | |
3451 // if we have a weak cell in feedback. We do know it's safe to look | |
3452 // at WeakCell::kValueOffset. | |
3453 Label try_array, load_smi_map, compare_map; | |
3454 Label not_array, miss; | |
3455 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3456 scratch1, &compare_map, &load_smi_map, &try_array); | |
3457 | |
3458 __ bind(&try_array); | |
3459 // Is it a fixed array? | |
3460 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3461 __ Branch(¬_array, ne, scratch1, Heap::kFixedArrayMapRootIndex); | |
3462 | |
3463 // We have a polymorphic element handler. | |
3464 Label try_poly_name; | |
3465 | |
3466 Register scratch2 = t0; | |
3467 | |
3468 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2, | |
3469 &miss); | |
3470 | |
3471 __ bind(¬_array); | |
3472 // Is it generic? | |
3473 __ Branch(&try_poly_name, ne, feedback, Heap::kmegamorphic_symbolRootIndex); | |
3474 Handle<Code> megamorphic_stub = | |
3475 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
3476 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
3477 | |
3478 __ bind(&try_poly_name); | |
3479 // We might have a name in feedback, and a fixed array in the next slot. | |
3480 __ Branch(&miss, ne, key, Operand(feedback)); | |
3481 // If the name comparison succeeded, we know we have a fixed array with | |
3482 // at least one map/handler pair. | |
3483 __ SmiScale(scratch1, slot, kPointerSizeLog2); | |
3484 __ Daddu(feedback, vector, Operand(scratch1)); | |
3485 __ ld(feedback, | |
3486 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
3487 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false, | |
3488 &miss); | |
3489 | |
3490 __ bind(&miss); | |
3491 KeyedStoreIC::GenerateMiss(masm); | |
3492 | |
3493 __ bind(&load_smi_map); | |
3494 __ Branch(USE_DELAY_SLOT, &compare_map); | |
3495 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. | |
3496 } | |
3497 | |
3498 | |
3499 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 3272 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
3500 if (masm->isolate()->function_entry_hook() != NULL) { | 3273 if (masm->isolate()->function_entry_hook() != NULL) { |
3501 ProfileEntryHookStub stub(masm->isolate()); | 3274 ProfileEntryHookStub stub(masm->isolate()); |
3502 __ push(ra); | 3275 __ push(ra); |
3503 __ CallStub(&stub); | 3276 __ CallStub(&stub); |
3504 __ pop(ra); | 3277 __ pop(ra); |
3505 } | 3278 } |
3506 } | 3279 } |
3507 | 3280 |
3508 | 3281 |
(...skipping 1282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4791 kStackUnwindSpace, kInvalidStackOffset, | 4564 kStackUnwindSpace, kInvalidStackOffset, |
4792 return_value_operand, NULL); | 4565 return_value_operand, NULL); |
4793 } | 4566 } |
4794 | 4567 |
4795 #undef __ | 4568 #undef __ |
4796 | 4569 |
4797 } // namespace internal | 4570 } // namespace internal |
4798 } // namespace v8 | 4571 } // namespace v8 |
4799 | 4572 |
4800 #endif // V8_TARGET_ARCH_MIPS64 | 4573 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |