OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1431 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1442 // register which we have just restored. | 1442 // register which we have just restored. |
1443 __ Ret(); | 1443 __ Ret(); |
1444 } | 1444 } |
1445 | 1445 |
1446 | 1446 |
1447 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1447 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
1448 Label miss; | 1448 Label miss; |
1449 Register receiver = LoadDescriptor::ReceiverRegister(); | 1449 Register receiver = LoadDescriptor::ReceiverRegister(); |
1450 // Ensure that the vector and slot registers won't be clobbered before | 1450 // Ensure that the vector and slot registers won't be clobbered before |
1451 // calling the miss handler. | 1451 // calling the miss handler. |
1452 DCHECK(!AreAliased(x10, x11, VectorLoadICDescriptor::VectorRegister(), | 1452 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::VectorRegister(), |
1453 VectorLoadICDescriptor::SlotRegister())); | 1453 LoadWithVectorDescriptor::SlotRegister())); |
1454 | 1454 |
1455 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, x10, | 1455 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, x10, |
1456 x11, &miss); | 1456 x11, &miss); |
1457 | 1457 |
1458 __ Bind(&miss); | 1458 __ Bind(&miss); |
1459 PropertyAccessCompiler::TailCallBuiltin( | 1459 PropertyAccessCompiler::TailCallBuiltin( |
1460 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); | 1460 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); |
1461 } | 1461 } |
1462 | 1462 |
1463 | 1463 |
1464 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { | 1464 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { |
1465 // Return address is in lr. | 1465 // Return address is in lr. |
1466 Label miss; | 1466 Label miss; |
1467 | 1467 |
1468 Register receiver = LoadDescriptor::ReceiverRegister(); | 1468 Register receiver = LoadDescriptor::ReceiverRegister(); |
1469 Register index = LoadDescriptor::NameRegister(); | 1469 Register index = LoadDescriptor::NameRegister(); |
1470 Register result = x0; | 1470 Register result = x0; |
1471 Register scratch = x10; | 1471 Register scratch = x10; |
1472 DCHECK(!scratch.is(receiver) && !scratch.is(index)); | 1472 DCHECK(!scratch.is(receiver) && !scratch.is(index)); |
1473 DCHECK(!scratch.is(VectorLoadICDescriptor::VectorRegister()) && | 1473 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) && |
1474 result.is(VectorLoadICDescriptor::SlotRegister())); | 1474 result.is(LoadWithVectorDescriptor::SlotRegister())); |
1475 | 1475 |
1476 // StringCharAtGenerator doesn't use the result register until it's passed | 1476 // StringCharAtGenerator doesn't use the result register until it's passed |
1477 // the different miss possibilities. If it did, we would have a conflict | 1477 // the different miss possibilities. If it did, we would have a conflict |
1478 // when FLAG_vector_ics is true. | 1478 // when FLAG_vector_ics is true. |
1479 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, | 1479 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, |
1480 &miss, // When not a string. | 1480 &miss, // When not a string. |
1481 &miss, // When not a number. | 1481 &miss, // When not a number. |
1482 &miss, // When index out of range. | 1482 &miss, // When index out of range. |
1483 STRING_INDEX_IS_ARRAY_INDEX, | 1483 STRING_INDEX_IS_ARRAY_INDEX, |
1484 RECEIVER_IS_STRING); | 1484 RECEIVER_IS_STRING); |
(...skipping 1861 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3346 void StringCharCodeAtGenerator::GenerateSlow( | 3346 void StringCharCodeAtGenerator::GenerateSlow( |
3347 MacroAssembler* masm, EmbedMode embed_mode, | 3347 MacroAssembler* masm, EmbedMode embed_mode, |
3348 const RuntimeCallHelper& call_helper) { | 3348 const RuntimeCallHelper& call_helper) { |
3349 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); | 3349 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); |
3350 | 3350 |
3351 __ Bind(&index_not_smi_); | 3351 __ Bind(&index_not_smi_); |
3352 // If index is a heap number, try converting it to an integer. | 3352 // If index is a heap number, try converting it to an integer. |
3353 __ JumpIfNotHeapNumber(index_, index_not_number_); | 3353 __ JumpIfNotHeapNumber(index_, index_not_number_); |
3354 call_helper.BeforeCall(masm); | 3354 call_helper.BeforeCall(masm); |
3355 if (embed_mode == PART_OF_IC_HANDLER) { | 3355 if (embed_mode == PART_OF_IC_HANDLER) { |
3356 __ Push(VectorLoadICDescriptor::VectorRegister(), | 3356 __ Push(LoadWithVectorDescriptor::VectorRegister(), |
3357 VectorLoadICDescriptor::SlotRegister(), object_, index_); | 3357 LoadWithVectorDescriptor::SlotRegister(), object_, index_); |
3358 } else { | 3358 } else { |
3359 // Save object_ on the stack and pass index_ as argument for runtime call. | 3359 // Save object_ on the stack and pass index_ as argument for runtime call. |
3360 __ Push(object_, index_); | 3360 __ Push(object_, index_); |
3361 } | 3361 } |
3362 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 3362 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
3363 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 3363 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
3364 } else { | 3364 } else { |
3365 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 3365 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
3366 // NumberToSmi discards numbers that are not exact integers. | 3366 // NumberToSmi discards numbers that are not exact integers. |
3367 __ CallRuntime(Runtime::kNumberToSmi, 1); | 3367 __ CallRuntime(Runtime::kNumberToSmi, 1); |
3368 } | 3368 } |
3369 // Save the conversion result before the pop instructions below | 3369 // Save the conversion result before the pop instructions below |
3370 // have a chance to overwrite it. | 3370 // have a chance to overwrite it. |
3371 __ Mov(index_, x0); | 3371 __ Mov(index_, x0); |
3372 if (embed_mode == PART_OF_IC_HANDLER) { | 3372 if (embed_mode == PART_OF_IC_HANDLER) { |
3373 __ Pop(object_, VectorLoadICDescriptor::SlotRegister(), | 3373 __ Pop(object_, LoadWithVectorDescriptor::SlotRegister(), |
3374 VectorLoadICDescriptor::VectorRegister()); | 3374 LoadWithVectorDescriptor::VectorRegister()); |
3375 } else { | 3375 } else { |
3376 __ Pop(object_); | 3376 __ Pop(object_); |
3377 } | 3377 } |
3378 // Reload the instance type. | 3378 // Reload the instance type. |
3379 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); | 3379 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); |
3380 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); | 3380 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); |
3381 call_helper.AfterCall(masm); | 3381 call_helper.AfterCall(masm); |
3382 | 3382 |
3383 // If index is still not a smi, it must be out of range. | 3383 // If index is still not a smi, it must be out of range. |
3384 __ JumpIfNotSmi(index_, index_out_of_range_); | 3384 __ JumpIfNotSmi(index_, index_out_of_range_); |
(...skipping 1103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4488 __ Add(x1, x1, 1); | 4488 __ Add(x1, x1, 1); |
4489 } | 4489 } |
4490 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4490 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
4491 __ Drop(x1); | 4491 __ Drop(x1); |
4492 // Return to IC Miss stub, continuation still on stack. | 4492 // Return to IC Miss stub, continuation still on stack. |
4493 __ Ret(); | 4493 __ Ret(); |
4494 } | 4494 } |
4495 | 4495 |
4496 | 4496 |
4497 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4497 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { |
4498 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); | 4498 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); |
4499 VectorRawLoadStub stub(isolate(), state()); | 4499 LoadICStub stub(isolate(), state()); |
4500 stub.GenerateForTrampoline(masm); | 4500 stub.GenerateForTrampoline(masm); |
4501 } | 4501 } |
4502 | 4502 |
4503 | 4503 |
4504 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4504 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
4505 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); | 4505 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); |
4506 VectorRawKeyedLoadStub stub(isolate()); | 4506 KeyedLoadICStub stub(isolate()); |
4507 stub.GenerateForTrampoline(masm); | 4507 stub.GenerateForTrampoline(masm); |
4508 } | 4508 } |
4509 | 4509 |
4510 | 4510 |
4511 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 4511 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
4512 EmitLoadTypeFeedbackVector(masm, x2); | 4512 EmitLoadTypeFeedbackVector(masm, x2); |
4513 CallICStub stub(isolate(), state()); | 4513 CallICStub stub(isolate(), state()); |
4514 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4514 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4515 } | 4515 } |
4516 | 4516 |
4517 | 4517 |
4518 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | 4518 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { |
4519 EmitLoadTypeFeedbackVector(masm, x2); | 4519 EmitLoadTypeFeedbackVector(masm, x2); |
4520 CallIC_ArrayStub stub(isolate(), state()); | 4520 CallIC_ArrayStub stub(isolate(), state()); |
4521 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4521 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4522 } | 4522 } |
4523 | 4523 |
4524 | 4524 |
4525 void VectorRawLoadStub::Generate(MacroAssembler* masm) { | 4525 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } |
4526 GenerateImpl(masm, false); | |
4527 } | |
4528 | 4526 |
4529 | 4527 |
4530 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4528 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4531 GenerateImpl(masm, true); | 4529 GenerateImpl(masm, true); |
4532 } | 4530 } |
4533 | 4531 |
4534 | 4532 |
4535 static void HandleArrayCases(MacroAssembler* masm, Register receiver, | 4533 static void HandleArrayCases(MacroAssembler* masm, Register receiver, |
4536 Register key, Register vector, Register slot, | 4534 Register key, Register vector, Register slot, |
4537 Register feedback, Register receiver_map, | 4535 Register feedback, Register receiver_map, |
4538 Register scratch1, Register scratch2, | 4536 Register scratch1, Register scratch2, |
4539 bool is_polymorphic, Label* miss) { | 4537 bool is_polymorphic, Label* miss) { |
4540 // feedback initially contains the feedback array | 4538 // feedback initially contains the feedback array |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4617 | 4615 |
4618 Register handler = feedback; | 4616 Register handler = feedback; |
4619 __ Add(handler, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 4617 __ Add(handler, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
4620 __ Ldr(handler, | 4618 __ Ldr(handler, |
4621 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); | 4619 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize)); |
4622 __ Add(handler, handler, Code::kHeaderSize - kHeapObjectTag); | 4620 __ Add(handler, handler, Code::kHeaderSize - kHeapObjectTag); |
4623 __ Jump(handler); | 4621 __ Jump(handler); |
4624 } | 4622 } |
4625 | 4623 |
4626 | 4624 |
4627 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4625 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4628 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // x1 | 4626 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // x1 |
4629 Register name = VectorLoadICDescriptor::NameRegister(); // x2 | 4627 Register name = LoadWithVectorDescriptor::NameRegister(); // x2 |
4630 Register vector = VectorLoadICDescriptor::VectorRegister(); // x3 | 4628 Register vector = LoadWithVectorDescriptor::VectorRegister(); // x3 |
4631 Register slot = VectorLoadICDescriptor::SlotRegister(); // x0 | 4629 Register slot = LoadWithVectorDescriptor::SlotRegister(); // x0 |
4632 Register feedback = x4; | 4630 Register feedback = x4; |
4633 Register receiver_map = x5; | 4631 Register receiver_map = x5; |
4634 Register scratch1 = x6; | 4632 Register scratch1 = x6; |
4635 | 4633 |
4636 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 4634 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
4637 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4635 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4638 | 4636 |
4639 // Try to quickly handle the monomorphic case without knowing for sure | 4637 // Try to quickly handle the monomorphic case without knowing for sure |
4640 // if we have a weak cell in feedback. We do know it's safe to look | 4638 // if we have a weak cell in feedback. We do know it's safe to look |
4641 // at WeakCell::kValueOffset. | 4639 // at WeakCell::kValueOffset. |
(...skipping 19 matching lines...) Expand all Loading... |
4661 | 4659 |
4662 __ Bind(&miss); | 4660 __ Bind(&miss); |
4663 LoadIC::GenerateMiss(masm); | 4661 LoadIC::GenerateMiss(masm); |
4664 | 4662 |
4665 __ Bind(&load_smi_map); | 4663 __ Bind(&load_smi_map); |
4666 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 4664 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
4667 __ jmp(&compare_map); | 4665 __ jmp(&compare_map); |
4668 } | 4666 } |
4669 | 4667 |
4670 | 4668 |
4671 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { | 4669 void KeyedLoadICStub::Generate(MacroAssembler* masm) { |
4672 GenerateImpl(masm, false); | 4670 GenerateImpl(masm, false); |
4673 } | 4671 } |
4674 | 4672 |
4675 | 4673 |
4676 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4674 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
4677 GenerateImpl(masm, true); | 4675 GenerateImpl(masm, true); |
4678 } | 4676 } |
4679 | 4677 |
4680 | 4678 |
4681 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4679 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
4682 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // x1 | 4680 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // x1 |
4683 Register key = VectorLoadICDescriptor::NameRegister(); // x2 | 4681 Register key = LoadWithVectorDescriptor::NameRegister(); // x2 |
4684 Register vector = VectorLoadICDescriptor::VectorRegister(); // x3 | 4682 Register vector = LoadWithVectorDescriptor::VectorRegister(); // x3 |
4685 Register slot = VectorLoadICDescriptor::SlotRegister(); // x0 | 4683 Register slot = LoadWithVectorDescriptor::SlotRegister(); // x0 |
4686 Register feedback = x4; | 4684 Register feedback = x4; |
4687 Register receiver_map = x5; | 4685 Register receiver_map = x5; |
4688 Register scratch1 = x6; | 4686 Register scratch1 = x6; |
4689 | 4687 |
4690 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); | 4688 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); |
4691 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); | 4689 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
4692 | 4690 |
4693 // Try to quickly handle the monomorphic case without knowing for sure | 4691 // Try to quickly handle the monomorphic case without knowing for sure |
4694 // if we have a weak cell in feedback. We do know it's safe to look | 4692 // if we have a weak cell in feedback. We do know it's safe to look |
4695 // at WeakCell::kValueOffset. | 4693 // at WeakCell::kValueOffset. |
(...skipping 1079 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5775 kStackUnwindSpace, NULL, spill_offset, | 5773 kStackUnwindSpace, NULL, spill_offset, |
5776 MemOperand(fp, 6 * kPointerSize), NULL); | 5774 MemOperand(fp, 6 * kPointerSize), NULL); |
5777 } | 5775 } |
5778 | 5776 |
5779 | 5777 |
5780 #undef __ | 5778 #undef __ |
5781 | 5779 |
5782 } } // namespace v8::internal | 5780 } } // namespace v8::internal |
5783 | 5781 |
5784 #endif // V8_TARGET_ARCH_ARM64 | 5782 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |