| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 3178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3189 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 3189 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
| 3190 __ addi(r4, r4, Operand(1)); | 3190 __ addi(r4, r4, Operand(1)); |
| 3191 } | 3191 } |
| 3192 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 3192 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 3193 __ slwi(r4, r4, Operand(kPointerSizeLog2)); | 3193 __ slwi(r4, r4, Operand(kPointerSizeLog2)); |
| 3194 __ add(sp, sp, r4); | 3194 __ add(sp, sp, r4); |
| 3195 __ Ret(); | 3195 __ Ret(); |
| 3196 } | 3196 } |
| 3197 | 3197 |
| 3198 | 3198 |
| 3199 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | |
| 3200 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | |
| 3201 LoadICStub stub(isolate()); | |
| 3202 stub.GenerateForTrampoline(masm); | |
| 3203 } | |
| 3204 | |
| 3205 | |
| 3206 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 3199 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
| 3207 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | 3200 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); |
| 3208 KeyedLoadICStub stub(isolate()); | 3201 KeyedLoadICStub stub(isolate()); |
| 3209 stub.GenerateForTrampoline(masm); | 3202 stub.GenerateForTrampoline(masm); |
| 3210 } | 3203 } |
| 3211 | 3204 |
| 3212 | 3205 |
| 3213 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3206 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
| 3214 __ EmitLoadTypeFeedbackVector(r5); | 3207 __ EmitLoadTypeFeedbackVector(r5); |
| 3215 CallICStub stub(isolate(), state()); | 3208 CallICStub stub(isolate(), state()); |
| 3216 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3209 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 3217 } | 3210 } |
| 3218 | 3211 |
| 3219 | 3212 |
| 3220 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } | |
| 3221 | |
| 3222 | |
| 3223 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
| 3224 GenerateImpl(masm, true); | |
| 3225 } | |
| 3226 | |
| 3227 | |
| 3228 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 3213 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
| 3229 Register receiver_map, Register scratch1, | 3214 Register receiver_map, Register scratch1, |
| 3230 Register scratch2, bool is_polymorphic, | 3215 Register scratch2, bool is_polymorphic, |
| 3231 Label* miss) { | 3216 Label* miss) { |
| 3232 // feedback initially contains the feedback array | 3217 // feedback initially contains the feedback array |
| 3233 Label next_loop, prepare_next; | 3218 Label next_loop, prepare_next; |
| 3234 Label start_polymorphic; | 3219 Label start_polymorphic; |
| 3235 | 3220 |
| 3236 Register cached_map = scratch1; | 3221 Register cached_map = scratch1; |
| 3237 | 3222 |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3312 Register handler = feedback; | 3297 Register handler = feedback; |
| 3313 __ SmiToPtrArrayOffset(r0, slot); | 3298 __ SmiToPtrArrayOffset(r0, slot); |
| 3314 __ add(handler, vector, r0); | 3299 __ add(handler, vector, r0); |
| 3315 __ LoadP(handler, | 3300 __ LoadP(handler, |
| 3316 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 3301 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
| 3317 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3302 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 3318 __ Jump(ip); | 3303 __ Jump(ip); |
| 3319 } | 3304 } |
| 3320 | 3305 |
| 3321 | 3306 |
| 3322 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
| 3323 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r4 | |
| 3324 Register name = LoadWithVectorDescriptor::NameRegister(); // r5 | |
| 3325 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r6 | |
| 3326 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r3 | |
| 3327 Register feedback = r7; | |
| 3328 Register receiver_map = r8; | |
| 3329 Register scratch1 = r9; | |
| 3330 | |
| 3331 __ SmiToPtrArrayOffset(r0, slot); | |
| 3332 __ add(feedback, vector, r0); | |
| 3333 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | |
| 3334 | |
| 3335 // Try to quickly handle the monomorphic case without knowing for sure | |
| 3336 // if we have a weak cell in feedback. We do know it's safe to look | |
| 3337 // at WeakCell::kValueOffset. | |
| 3338 Label try_array, load_smi_map, compare_map; | |
| 3339 Label not_array, miss; | |
| 3340 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
| 3341 scratch1, &compare_map, &load_smi_map, &try_array); | |
| 3342 | |
| 3343 // Is it a fixed array? | |
| 3344 __ bind(&try_array); | |
| 3345 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
| 3346 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); | |
| 3347 __ bne(¬_array); | |
| 3348 HandleArrayCases(masm, feedback, receiver_map, scratch1, r10, true, &miss); | |
| 3349 | |
| 3350 __ bind(¬_array); | |
| 3351 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); | |
| 3352 __ bne(&miss); | |
| 3353 masm->isolate()->load_stub_cache()->GenerateProbe( | |
| 3354 masm, receiver, name, feedback, receiver_map, scratch1, r10); | |
| 3355 | |
| 3356 __ bind(&miss); | |
| 3357 LoadIC::GenerateMiss(masm); | |
| 3358 | |
| 3359 __ bind(&load_smi_map); | |
| 3360 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
| 3361 __ b(&compare_map); | |
| 3362 } | |
| 3363 | |
| 3364 | |
| 3365 void KeyedLoadICStub::Generate(MacroAssembler* masm) { | 3307 void KeyedLoadICStub::Generate(MacroAssembler* masm) { |
| 3366 GenerateImpl(masm, false); | 3308 GenerateImpl(masm, false); |
| 3367 } | 3309 } |
| 3368 | 3310 |
| 3369 | 3311 |
| 3370 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | 3312 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 3371 GenerateImpl(masm, true); | 3313 GenerateImpl(masm, true); |
| 3372 } | 3314 } |
| 3373 | 3315 |
| 3374 | 3316 |
| (...skipping 1797 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5172 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5114 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
| 5173 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5115 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
| 5174 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5116 kStackUnwindSpace, NULL, return_value_operand, NULL); |
| 5175 } | 5117 } |
| 5176 | 5118 |
| 5177 #undef __ | 5119 #undef __ |
| 5178 } // namespace internal | 5120 } // namespace internal |
| 5179 } // namespace v8 | 5121 } // namespace v8 |
| 5180 | 5122 |
| 5181 #endif // V8_TARGET_ARCH_PPC | 5123 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |