| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_S390 | 5 #if V8_TARGET_ARCH_S390 |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 3121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3132 __ LoadP(r3, MemOperand(fp, parameter_count_offset)); | 3132 __ LoadP(r3, MemOperand(fp, parameter_count_offset)); |
| 3133 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 3133 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
| 3134 __ AddP(r3, Operand(1)); | 3134 __ AddP(r3, Operand(1)); |
| 3135 } | 3135 } |
| 3136 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 3136 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 3137 __ ShiftLeftP(r3, r3, Operand(kPointerSizeLog2)); | 3137 __ ShiftLeftP(r3, r3, Operand(kPointerSizeLog2)); |
| 3138 __ la(sp, MemOperand(r3, sp)); | 3138 __ la(sp, MemOperand(r3, sp)); |
| 3139 __ Ret(); | 3139 __ Ret(); |
| 3140 } | 3140 } |
| 3141 | 3141 |
| 3142 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | |
| 3143 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); | |
| 3144 KeyedLoadICStub stub(isolate()); | |
| 3145 stub.GenerateForTrampoline(masm); | |
| 3146 } | |
| 3147 | |
| 3148 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 3142 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
| 3149 __ EmitLoadTypeFeedbackVector(r4); | 3143 __ EmitLoadTypeFeedbackVector(r4); |
| 3150 CallICStub stub(isolate(), state()); | 3144 CallICStub stub(isolate(), state()); |
| 3151 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3145 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 3152 } | 3146 } |
| 3153 | 3147 |
| 3154 static void HandleArrayCases(MacroAssembler* masm, Register feedback, | 3148 static void HandleArrayCases(MacroAssembler* masm, Register feedback, |
| 3155 Register receiver_map, Register scratch1, | 3149 Register receiver_map, Register scratch1, |
| 3156 Register scratch2, bool is_polymorphic, | 3150 Register scratch2, bool is_polymorphic, |
| 3157 Label* miss) { | 3151 Label* miss) { |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3234 __ CmpP(cached_map, receiver_map); | 3228 __ CmpP(cached_map, receiver_map); |
| 3235 __ bne(try_array); | 3229 __ bne(try_array); |
| 3236 Register handler = feedback; | 3230 Register handler = feedback; |
| 3237 __ SmiToPtrArrayOffset(r1, slot); | 3231 __ SmiToPtrArrayOffset(r1, slot); |
| 3238 __ LoadP(handler, | 3232 __ LoadP(handler, |
| 3239 FieldMemOperand(r1, vector, FixedArray::kHeaderSize + kPointerSize)); | 3233 FieldMemOperand(r1, vector, FixedArray::kHeaderSize + kPointerSize)); |
| 3240 __ AddP(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3234 __ AddP(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 3241 __ Jump(ip); | 3235 __ Jump(ip); |
| 3242 } | 3236 } |
| 3243 | 3237 |
| 3244 void KeyedLoadICStub::Generate(MacroAssembler* masm) { | |
| 3245 GenerateImpl(masm, false); | |
| 3246 } | |
| 3247 | |
| 3248 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { | |
| 3249 GenerateImpl(masm, true); | |
| 3250 } | |
| 3251 | |
| 3252 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | |
| 3253 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r3 | |
| 3254 Register key = LoadWithVectorDescriptor::NameRegister(); // r4 | |
| 3255 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r5 | |
| 3256 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r2 | |
| 3257 Register feedback = r6; | |
| 3258 Register receiver_map = r7; | |
| 3259 Register scratch1 = r8; | |
| 3260 | |
| 3261 __ SmiToPtrArrayOffset(r1, slot); | |
| 3262 __ LoadP(feedback, FieldMemOperand(r1, vector, FixedArray::kHeaderSize)); | |
| 3263 | |
| 3264 // Try to quickly handle the monomorphic case without knowing for sure | |
| 3265 // if we have a weak cell in feedback. We do know it's safe to look | |
| 3266 // at WeakCell::kValueOffset. | |
| 3267 Label try_array, load_smi_map, compare_map; | |
| 3268 Label not_array, miss; | |
| 3269 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot, | |
| 3270 scratch1, &compare_map, &load_smi_map, &try_array); | |
| 3271 | |
| 3272 __ bind(&try_array); | |
| 3273 // Is it a fixed array? | |
| 3274 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); | |
| 3275 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); | |
| 3276 __ bne(¬_array); | |
| 3277 | |
| 3278 // We have a polymorphic element handler. | |
| 3279 Label polymorphic, try_poly_name; | |
| 3280 __ bind(&polymorphic); | |
| 3281 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss); | |
| 3282 | |
| 3283 __ bind(¬_array); | |
| 3284 // Is it generic? | |
| 3285 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); | |
| 3286 __ bne(&try_poly_name); | |
| 3287 Handle<Code> megamorphic_stub = | |
| 3288 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); | |
| 3289 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); | |
| 3290 | |
| 3291 __ bind(&try_poly_name); | |
| 3292 // We might have a name in feedback, and a fixed array in the next slot. | |
| 3293 __ CmpP(key, feedback); | |
| 3294 __ bne(&miss); | |
| 3295 // If the name comparison succeeded, we know we have a fixed array with | |
| 3296 // at least one map/handler pair. | |
| 3297 __ SmiToPtrArrayOffset(r1, slot); | |
| 3298 __ LoadP(feedback, | |
| 3299 FieldMemOperand(r1, vector, FixedArray::kHeaderSize + kPointerSize)); | |
| 3300 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, false, &miss); | |
| 3301 | |
| 3302 __ bind(&miss); | |
| 3303 KeyedLoadIC::GenerateMiss(masm); | |
| 3304 | |
| 3305 __ bind(&load_smi_map); | |
| 3306 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | |
| 3307 __ b(&compare_map); | |
| 3308 } | |
| 3309 | |
| 3310 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { | 3238 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { |
| 3311 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); | 3239 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); |
| 3312 KeyedStoreICStub stub(isolate(), state()); | 3240 KeyedStoreICStub stub(isolate(), state()); |
| 3313 stub.GenerateForTrampoline(masm); | 3241 stub.GenerateForTrampoline(masm); |
| 3314 } | 3242 } |
| 3315 | 3243 |
| 3316 void KeyedStoreICStub::Generate(MacroAssembler* masm) { | 3244 void KeyedStoreICStub::Generate(MacroAssembler* masm) { |
| 3317 GenerateImpl(masm, false); | 3245 GenerateImpl(masm, false); |
| 3318 } | 3246 } |
| 3319 | 3247 |
| (...skipping 1500 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4820 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 4748 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
| 4821 kStackUnwindSpace, NULL, return_value_operand, NULL); | 4749 kStackUnwindSpace, NULL, return_value_operand, NULL); |
| 4822 } | 4750 } |
| 4823 | 4751 |
| 4824 #undef __ | 4752 #undef __ |
| 4825 | 4753 |
| 4826 } // namespace internal | 4754 } // namespace internal |
| 4827 } // namespace v8 | 4755 } // namespace v8 |
| 4828 | 4756 |
| 4829 #endif // V8_TARGET_ARCH_S390 | 4757 #endif // V8_TARGET_ARCH_S390 |
| OLD | NEW |