OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 3184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3195 __ add(sp, sp, r4); | 3195 __ add(sp, sp, r4); |
3196 __ Ret(); | 3196 __ Ret(); |
3197 } | 3197 } |
3198 | 3198 |
3199 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3199 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
3200 __ EmitLoadTypeFeedbackVector(r5); | 3200 __ EmitLoadTypeFeedbackVector(r5); |
3201 CallICStub stub(isolate(), state()); | 3201 CallICStub stub(isolate(), state()); |
3202 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3202 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
3203 } | 3203 } |
3204 | 3204 |
3205 | |
3206 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | |
3207 Register receiver_map, Register scratch1, | |
3208 Register scratch2, bool is_polymorphic, | |
3209 Label* miss) { | |
3210 // feedback initially contains the feedback array | |
3211 Label next_loop, prepare_next; | |
3212 Label start_polymorphic; | |
3213 | |
3214 Register cached_map = scratch1; | |
3215 | |
3216 __ LoadP(cached_map, | |
3217 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0))); | |
3218 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
3219 __ cmp(receiver_map, cached_map); | |
3220 __ bne(&start_polymorphic); | |
3221 // found, now call handler. | |
3222 Register handler = feedback; | |
3223 __ LoadP(handler, | |
3224 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); | |
3225 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3226 __ Jump(ip); | |
3227 | |
3228 | |
3229 Register length = scratch2; | |
3230 __ bind(&start_polymorphic); | |
3231 __ LoadP(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
3232 if (!is_polymorphic) { | |
3233 // If the IC could be monomorphic we have to make sure we don't go past the | |
3234 // end of the feedback array. | |
3235 __ CmpSmiLiteral(length, Smi::FromInt(2), r0); | |
3236 __ beq(miss); | |
3237 } | |
3238 | |
3239 Register too_far = length; | |
3240 Register pointer_reg = feedback; | |
3241 | |
3242 // +-----+------+------+-----+-----+ ... ----+ | |
3243 // | map | len | wm0 | h0 | wm1 | hN | | |
3244 // +-----+------+------+-----+-----+ ... ----+ | |
3245 // 0 1 2 len-1 | |
3246 // ^ ^ | |
3247 // | | | |
3248 // pointer_reg too_far | |
3249 // aka feedback scratch2 | |
3250 // also need receiver_map | |
3251 // use cached_map (scratch1) to look in the weak map values. | |
3252 __ SmiToPtrArrayOffset(r0, length); | |
3253 __ add(too_far, feedback, r0); | |
3254 __ addi(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
3255 __ addi(pointer_reg, feedback, | |
3256 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag)); | |
3257 | |
3258 __ bind(&next_loop); | |
3259 __ LoadP(cached_map, MemOperand(pointer_reg)); | |
3260 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
3261 __ cmp(receiver_map, cached_map); | |
3262 __ bne(&prepare_next); | |
3263 __ LoadP(handler, MemOperand(pointer_reg, kPointerSize)); | |
3264 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3265 __ Jump(ip); | |
3266 | |
3267 __ bind(&prepare_next); | |
3268 __ addi(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); | |
3269 __ cmp(pointer_reg, too_far); | |
3270 __ blt(&next_loop); | |
3271 | |
3272 // We exhausted our array of map handler pairs. | |
3273 __ b(miss); | |
3274 } | |
3275 | |
3276 | |
3277 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, | |
3278 Register receiver_map, Register feedback, | |
3279 Register vector, Register slot, | |
3280 Register scratch, Label* compare_map, | |
3281 Label* load_smi_map, Label* try_array) { | |
3282 __ JumpIfSmi(receiver, load_smi_map); | |
3283 __ LoadP(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
3284 __ bind(compare_map); | |
3285 Register cached_map = scratch; | |
3286 // Move the weak map into the weak_cell register. | |
3287 __ LoadP(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); | |
3288 __ cmp(cached_map, receiver_map); | |
3289 __ bne(try_array); | |
3290 Register handler = feedback; | |
3291 __ SmiToPtrArrayOffset(r0, slot); | |
3292 __ add(handler, vector, r0); | |
3293 __ LoadP(handler, | |
3294 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | |
3295 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3296 __ Jump(ip); | |
3297 } | |
3298 | |
3299 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | |
3300 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | |
3301 KeyedStoreICStub stub(isolate(), state()); | |
3302 stub.GenerateForTrampoline(masm); | |
3303 } | |
3304 | |
3305 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | |
3306 GenerateImpl(masm, false); | |
3307 } | |
3308 | |
3309 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3310 GenerateImpl(masm, true); | |
3311 } | |
3312 | |
3313 | |
3314 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback, | |
3315 Register receiver_map, Register scratch1, | |
3316 Register scratch2, Label* miss) { | |
3317 // feedback initially contains the feedback array | |
3318 Label next_loop, prepare_next; | |
3319 Label start_polymorphic; | |
3320 Label transition_call; | |
3321 | |
3322 Register cached_map = scratch1; | |
3323 Register too_far = scratch2; | |
3324 Register pointer_reg = feedback; | |
3325 __ LoadP(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); | |
3326 | |
3327 // +-----+------+------+-----+-----+-----+ ... ----+ | |
3328 // | map | len | wm0 | wt0 | h0 | wm1 | hN | | |
3329 // +-----+------+------+-----+-----+ ----+ ... ----+ | |
3330 // 0 1 2 len-1 | |
3331 // ^ ^ | |
3332 // | | | |
3333 // pointer_reg too_far | |
3334 // aka feedback scratch2 | |
3335 // also need receiver_map | |
3336 // use cached_map (scratch1) to look in the weak map values. | |
3337 __ SmiToPtrArrayOffset(r0, too_far); | |
3338 __ add(too_far, feedback, r0); | |
3339 __ addi(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
3340 __ addi(pointer_reg, feedback, | |
3341 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag)); | |
3342 | |
3343 __ bind(&next_loop); | |
3344 __ LoadP(cached_map, MemOperand(pointer_reg)); | |
3345 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); | |
3346 __ cmp(receiver_map, cached_map); | |
3347 __ bne(&prepare_next); | |
3348 // Is it a transitioning store? | |
3349 __ LoadP(too_far, MemOperand(pointer_reg, kPointerSize)); | |
3350 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); | |
3351 __ bne(&transition_call); | |
3352 __ LoadP(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); | |
3353 __ addi(ip, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3354 __ Jump(ip); | |
3355 | |
3356 __ bind(&transition_call); | |
3357 __ LoadP(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); | |
3358 __ JumpIfSmi(too_far, miss); | |
3359 | |
3360 __ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); | |
3361 | |
3362 // Load the map into the correct register. | |
3363 DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister())); | |
3364 __ mr(feedback, too_far); | |
3365 | |
3366 __ addi(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
3367 __ Jump(ip); | |
3368 | |
3369 __ bind(&prepare_next); | |
3370 __ addi(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); | |
3371 __ cmpl(pointer_reg, too_far); | |
3372 __ blt(&next_loop); | |
3373 | |
3374 // We exhausted our array of map handler pairs. | |
3375 __ b(miss); | |
3376 } | |
3377 | |
3378 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3379 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // r4 | |
3380 Register key = StoreWithVectorDescriptor::NameRegister(); // r5 | |
3381 Register vector = StoreWithVectorDescriptor::VectorRegister(); // r6 | |
3382 Register slot = StoreWithVectorDescriptor::SlotRegister(); // r7 | |
3383 DCHECK(StoreWithVectorDescriptor::ValueRegister().is(r3)); // r3 | |
3384 Register feedback = r8; | |
3385 Register receiver_map = r9; | |
3386 Register scratch1 = r10; | |
3387 | |
3388 __ SmiToPtrArrayOffset(r0, slot); | |
3389 __ add(feedback, vector, r0); | |
3390 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3391 | |
3392 // Try to quickly handle the monomorphic case without knowing for sure | |
3393 // if we have a weak cell in feedback. We do know it's safe to look | |
3394 // at WeakCell::kValueOffset. | |
3395 Label try_array, load_smi_map, compare_map; | |
3396 Label not_array, miss; | |
3397 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3398 scratch1, &compare_map, &load_smi_map, &try_array); | |
3399 | |
3400 __ bind(&try_array); | |
3401 // Is it a fixed array? | |
3402 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3403 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); | |
3404 __ bne(¬_array); | |
3405 | |
3406 // We have a polymorphic element handler. | |
3407 Label polymorphic, try_poly_name; | |
3408 __ bind(&polymorphic); | |
3409 | |
3410 Register scratch2 = r11; | |
3411 | |
3412 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2, | |
3413 &miss); | |
3414 | |
3415 __ bind(¬_array); | |
3416 // Is it generic? | |
3417 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); | |
3418 __ bne(&try_poly_name); | |
3419 Handle<Code> megamorphic_stub = | |
3420 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
3421 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
3422 | |
3423 __ bind(&try_poly_name); | |
3424 // We might have a name in feedback, and a fixed array in the next slot. | |
3425 __ cmp(key, feedback); | |
3426 __ bne(&miss); | |
3427 // If the name comparison succeeded, we know we have a fixed array with | |
3428 // at least one map/handler pair. | |
3429 __ SmiToPtrArrayOffset(r0, slot); | |
3430 __ add(feedback, vector, r0); | |
3431 __ LoadP(feedback, | |
3432 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
3433 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false, | |
3434 &miss); | |
3435 | |
3436 __ bind(&miss); | |
3437 KeyedStoreIC::GenerateMiss(masm); | |
3438 | |
3439 __ bind(&load_smi_map); | |
3440 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
3441 __ b(&compare_map); | |
3442 } | |
3443 | |
3444 | |
3445 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 3205 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
3446 if (masm->isolate()->function_entry_hook() != NULL) { | 3206 if (masm->isolate()->function_entry_hook() != NULL) { |
3447 PredictableCodeSizeScope predictable(masm, | 3207 PredictableCodeSizeScope predictable(masm, |
3448 #if V8_TARGET_ARCH_PPC64 | 3208 #if V8_TARGET_ARCH_PPC64 |
3449 14 * Assembler::kInstrSize); | 3209 14 * Assembler::kInstrSize); |
3450 #else | 3210 #else |
3451 11 * Assembler::kInstrSize); | 3211 11 * Assembler::kInstrSize); |
3452 #endif | 3212 #endif |
3453 ProfileEntryHookStub stub(masm->isolate()); | 3213 ProfileEntryHookStub stub(masm->isolate()); |
3454 __ mflr(r0); | 3214 __ mflr(r0); |
(...skipping 1370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4825 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 4585 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
4826 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 4586 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
4827 kStackUnwindSpace, NULL, return_value_operand, NULL); | 4587 kStackUnwindSpace, NULL, return_value_operand, NULL); |
4828 } | 4588 } |
4829 | 4589 |
4830 #undef __ | 4590 #undef __ |
4831 } // namespace internal | 4591 } // namespace internal |
4832 } // namespace v8 | 4592 } // namespace v8 |
4833 | 4593 |
4834 #endif // V8_TARGET_ARCH_PPC | 4594 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |