OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 3178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3189 __ LoadP(r4, MemOperand(fp, parameter_count_offset)); | 3189 __ LoadP(r4, MemOperand(fp, parameter_count_offset)); |
3190 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 3190 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
3191 __ addi(r4, r4, Operand(1)); | 3191 __ addi(r4, r4, Operand(1)); |
3192 } | 3192 } |
3193 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 3193 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
3194 __ slwi(r4, r4, Operand(kPointerSizeLog2)); | 3194 __ slwi(r4, r4, Operand(kPointerSizeLog2)); |
3195 __ add(sp, sp, r4); | 3195 __ add(sp, sp, r4); |
3196 __ Ret(); | 3196 __ Ret(); |
3197 } | 3197 } |
3198 | 3198 |
3199 | |
3200 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | |
3201 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | |
3202 KeyedLoadICStub stub(isolate()); | |
3203 stub.GenerateForTrampoline(masm); | |
3204 } | |
3205 | |
3206 | |
3207 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3199 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
3208 __ EmitLoadTypeFeedbackVector(r5); | 3200 __ EmitLoadTypeFeedbackVector(r5); |
3209 CallICStub stub(isolate(), state()); | 3201 CallICStub stub(isolate(), state()); |
3210 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3202 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
3211 } | 3203 } |
3212 | 3204 |
3213 | 3205 |
3214 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 3206 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
3215 Register receiver_map, Register scratch1, | 3207 Register receiver_map, Register scratch1, |
3216 Register scratch2, bool is_polymorphic, | 3208 Register scratch2, bool is_polymorphic, |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3297 __ bne(try_array); | 3289 __ bne(try_array); |
3298 Register handler = feedback; | 3290 Register handler = feedback; |
3299 __ SmiToPtrArrayOffset(r0, slot); | 3291 __ SmiToPtrArrayOffset(r0, slot); |
3300 __ add(handler, vector, r0); | 3292 __ add(handler, vector, r0); |
3301 __ LoadP(handler, | 3293 __ LoadP(handler, |
3302 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 3294 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
3303 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3295 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
3304 __ Jump(ip); | 3296 __ Jump(ip); |
3305 } | 3297 } |
3306 | 3298 |
3307 | |
3308 void KeyedLoadICStub::Generate(MacroAssembler* masm) { | |
3309 GenerateImpl(masm, false); | |
3310 } | |
3311 | |
3312 | |
3313 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
3314 GenerateImpl(masm, true); | |
3315 } | |
3316 | |
3317 | |
3318 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
3319 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r4 | |
3320 Register key = LoadWithVectorDescriptor::NameRegister(); // r5 | |
3321 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r6 | |
3322 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r3 | |
3323 Register feedback = r7; | |
3324 Register receiver_map = r8; | |
3325 Register scratch1 = r9; | |
3326 | |
3327 __ SmiToPtrArrayOffset(r0, slot); | |
3328 __ add(feedback, vector, r0); | |
3329 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
3330 | |
3331 // Try to quickly handle the monomorphic case without knowing for sure | |
3332 // if we have a weak cell in feedback. We do know it's safe to look | |
3333 // at WeakCell::kValueOffset. | |
3334 Label try_array, load_smi_map, compare_map; | |
3335 Label not_array, miss; | |
3336 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
3337 scratch1, &compare_map, &load_smi_map, &try_array); | |
3338 | |
3339 __ bind(&try_array); | |
3340 // Is it a fixed array? | |
3341 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
3342 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); | |
3343 __ bne(¬_array); | |
3344 | |
3345 // We have a polymorphic element handler. | |
3346 Label polymorphic, try_poly_name; | |
3347 __ bind(&polymorphic); | |
3348 HandleArrayCases(masm, feedback, receiver_map, scratch1, r10, true, &miss); | |
3349 | |
3350 __ bind(¬_array); | |
3351 // Is it generic? | |
3352 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); | |
3353 __ bne(&try_poly_name); | |
3354 Handle<Code> megamorphic_stub = | |
3355 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
3356 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
3357 | |
3358 __ bind(&try_poly_name); | |
3359 // We might have a name in feedback, and a fixed array in the next slot. | |
3360 __ cmp(key, feedback); | |
3361 __ bne(&miss); | |
3362 // If the name comparison succeeded, we know we have a fixed array with | |
3363 // at least one map/handler pair. | |
3364 __ SmiToPtrArrayOffset(r0, slot); | |
3365 __ add(feedback, vector, r0); | |
3366 __ LoadP(feedback, | |
3367 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize)); | |
3368 HandleArrayCases(masm, feedback, receiver_map, scratch1, r10, false, &miss); | |
3369 | |
3370 __ bind(&miss); | |
3371 KeyedLoadIC::GenerateMiss(masm); | |
3372 | |
3373 __ bind(&load_smi_map); | |
3374 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
3375 __ b(&compare_map); | |
3376 } | |
3377 | |
3378 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3299 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
3379 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | 3300 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); |
3380 KeyedStoreICStub stub(isolate(), state()); | 3301 KeyedStoreICStub stub(isolate(), state()); |
3381 stub.GenerateForTrampoline(masm); | 3302 stub.GenerateForTrampoline(masm); |
3382 } | 3303 } |
3383 | 3304 |
3384 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | 3305 void KeyedStoreICStub::Generate(MacroAssembler* masm) { |
3385 GenerateImpl(masm, false); | 3306 GenerateImpl(masm, false); |
3386 } | 3307 } |
3387 | 3308 |
(...skipping 1516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4904 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 4825 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
4905 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 4826 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
4906 kStackUnwindSpace, NULL, return_value_operand, NULL); | 4827 kStackUnwindSpace, NULL, return_value_operand, NULL); |
4907 } | 4828 } |
4908 | 4829 |
4909 #undef __ | 4830 #undef __ |
4910 } // namespace internal | 4831 } // namespace internal |
4911 } // namespace v8 | 4832 } // namespace v8 |
4912 | 4833 |
4913 #endif // V8_TARGET_ARCH_PPC | 4834 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |