| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 520 __ ret(0); | 520 __ ret(0); |
| 521 } | 521 } |
| 522 } | 522 } |
| 523 | 523 |
| 524 | 524 |
| 525 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 525 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
| 526 Label miss; | 526 Label miss; |
| 527 Register receiver = LoadDescriptor::ReceiverRegister(); | 527 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 528 // Ensure that the vector and slot registers won't be clobbered before | 528 // Ensure that the vector and slot registers won't be clobbered before |
| 529 // calling the miss handler. | 529 // calling the miss handler. |
| 530 DCHECK(!AreAliased(r8, r9, VectorLoadICDescriptor::VectorRegister(), | 530 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(), |
| 531 VectorLoadICDescriptor::SlotRegister())); | 531 LoadDescriptor::SlotRegister())); |
| 532 | 532 |
| 533 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, | 533 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, |
| 534 r9, &miss); | 534 r9, &miss); |
| 535 __ bind(&miss); | 535 __ bind(&miss); |
| 536 PropertyAccessCompiler::TailCallBuiltin( | 536 PropertyAccessCompiler::TailCallBuiltin( |
| 537 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); | 537 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); |
| 538 } | 538 } |
| 539 | 539 |
| 540 | 540 |
| 541 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 541 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| (...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 905 | 905 |
| 906 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { | 906 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { |
| 907 // Return address is on the stack. | 907 // Return address is on the stack. |
| 908 Label miss; | 908 Label miss; |
| 909 | 909 |
| 910 Register receiver = LoadDescriptor::ReceiverRegister(); | 910 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 911 Register index = LoadDescriptor::NameRegister(); | 911 Register index = LoadDescriptor::NameRegister(); |
| 912 Register scratch = rdi; | 912 Register scratch = rdi; |
| 913 Register result = rax; | 913 Register result = rax; |
| 914 DCHECK(!scratch.is(receiver) && !scratch.is(index)); | 914 DCHECK(!scratch.is(receiver) && !scratch.is(index)); |
| 915 DCHECK(!scratch.is(VectorLoadICDescriptor::VectorRegister()) && | 915 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) && |
| 916 result.is(VectorLoadICDescriptor::SlotRegister())); | 916 result.is(LoadDescriptor::SlotRegister())); |
| 917 | 917 |
| 918 // StringCharAtGenerator doesn't use the result register until it's passed | 918 // StringCharAtGenerator doesn't use the result register until it's passed |
| 919 // the different miss possibilities. If it did, we would have a conflict | 919 // the different miss possibilities. If it did, we would have a conflict |
| 920 // when FLAG_vector_ics is true. | 920 // when FLAG_vector_ics is true. |
| 921 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, | 921 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, |
| 922 &miss, // When not a string. | 922 &miss, // When not a string. |
| 923 &miss, // When not a number. | 923 &miss, // When not a number. |
| 924 &miss, // When index out of range. | 924 &miss, // When index out of range. |
| 925 STRING_INDEX_IS_ARRAY_INDEX, | 925 STRING_INDEX_IS_ARRAY_INDEX, |
| 926 RECEIVER_IS_STRING); | 926 RECEIVER_IS_STRING); |
| (...skipping 2006 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2933 Factory* factory = masm->isolate()->factory(); | 2933 Factory* factory = masm->isolate()->factory(); |
| 2934 // Index is not a smi. | 2934 // Index is not a smi. |
| 2935 __ bind(&index_not_smi_); | 2935 __ bind(&index_not_smi_); |
| 2936 // If index is a heap number, try converting it to an integer. | 2936 // If index is a heap number, try converting it to an integer. |
| 2937 __ CheckMap(index_, | 2937 __ CheckMap(index_, |
| 2938 factory->heap_number_map(), | 2938 factory->heap_number_map(), |
| 2939 index_not_number_, | 2939 index_not_number_, |
| 2940 DONT_DO_SMI_CHECK); | 2940 DONT_DO_SMI_CHECK); |
| 2941 call_helper.BeforeCall(masm); | 2941 call_helper.BeforeCall(masm); |
| 2942 if (embed_mode == PART_OF_IC_HANDLER) { | 2942 if (embed_mode == PART_OF_IC_HANDLER) { |
| 2943 __ Push(VectorLoadICDescriptor::VectorRegister()); | 2943 __ Push(LoadWithVectorDescriptor::VectorRegister()); |
| 2944 __ Push(VectorLoadICDescriptor::SlotRegister()); | 2944 __ Push(LoadDescriptor::SlotRegister()); |
| 2945 } | 2945 } |
| 2946 __ Push(object_); | 2946 __ Push(object_); |
| 2947 __ Push(index_); // Consumed by runtime conversion function. | 2947 __ Push(index_); // Consumed by runtime conversion function. |
| 2948 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 2948 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
| 2949 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 2949 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
| 2950 } else { | 2950 } else { |
| 2951 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 2951 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
| 2952 // NumberToSmi discards numbers that are not exact integers. | 2952 // NumberToSmi discards numbers that are not exact integers. |
| 2953 __ CallRuntime(Runtime::kNumberToSmi, 1); | 2953 __ CallRuntime(Runtime::kNumberToSmi, 1); |
| 2954 } | 2954 } |
| 2955 if (!index_.is(rax)) { | 2955 if (!index_.is(rax)) { |
| 2956 // Save the conversion result before the pop instructions below | 2956 // Save the conversion result before the pop instructions below |
| 2957 // have a chance to overwrite it. | 2957 // have a chance to overwrite it. |
| 2958 __ movp(index_, rax); | 2958 __ movp(index_, rax); |
| 2959 } | 2959 } |
| 2960 __ Pop(object_); | 2960 __ Pop(object_); |
| 2961 if (embed_mode == PART_OF_IC_HANDLER) { | 2961 if (embed_mode == PART_OF_IC_HANDLER) { |
| 2962 __ Pop(VectorLoadICDescriptor::SlotRegister()); | 2962 __ Pop(LoadDescriptor::SlotRegister()); |
| 2963 __ Pop(VectorLoadICDescriptor::VectorRegister()); | 2963 __ Pop(LoadWithVectorDescriptor::VectorRegister()); |
| 2964 } | 2964 } |
| 2965 // Reload the instance type. | 2965 // Reload the instance type. |
| 2966 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); | 2966 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); |
| 2967 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); | 2967 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); |
| 2968 call_helper.AfterCall(masm); | 2968 call_helper.AfterCall(masm); |
| 2969 // If index is still not a smi, it must be out of range. | 2969 // If index is still not a smi, it must be out of range. |
| 2970 __ JumpIfNotSmi(index_, index_out_of_range_); | 2970 __ JumpIfNotSmi(index_, index_out_of_range_); |
| 2971 // Otherwise, return to the fast path. | 2971 // Otherwise, return to the fast path. |
| 2972 __ jmp(&got_smi_index_); | 2972 __ jmp(&got_smi_index_); |
| 2973 | 2973 |
| (...skipping 1384 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4358 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4358 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 4359 __ PopReturnAddressTo(rcx); | 4359 __ PopReturnAddressTo(rcx); |
| 4360 int additional_offset = | 4360 int additional_offset = |
| 4361 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; | 4361 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; |
| 4362 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); | 4362 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); |
| 4363 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. | 4363 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. |
| 4364 } | 4364 } |
| 4365 | 4365 |
| 4366 | 4366 |
| 4367 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4367 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { |
| 4368 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); | 4368 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); |
| 4369 VectorRawLoadStub stub(isolate(), state()); | 4369 LoadICStub stub(isolate(), state()); |
| 4370 stub.GenerateForTrampoline(masm); | 4370 stub.GenerateForTrampoline(masm); |
| 4371 } | 4371 } |
| 4372 | 4372 |
| 4373 | 4373 |
| 4374 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { | 4374 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { |
| 4375 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister()); | 4375 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); |
| 4376 VectorRawKeyedLoadStub stub(isolate()); | 4376 KeyedLoadICStub stub(isolate()); |
| 4377 stub.GenerateForTrampoline(masm); | 4377 stub.GenerateForTrampoline(masm); |
| 4378 } | 4378 } |
| 4379 | 4379 |
| 4380 | 4380 |
| 4381 static void HandleArrayCases(MacroAssembler* masm, Register receiver, | 4381 static void HandleArrayCases(MacroAssembler* masm, Register receiver, |
| 4382 Register key, Register vector, Register slot, | 4382 Register key, Register vector, Register slot, |
| 4383 Register feedback, Register receiver_map, | 4383 Register feedback, Register receiver_map, |
| 4384 Register scratch1, Register scratch2, | 4384 Register scratch1, Register scratch2, |
| 4385 Register scratch3, bool is_polymorphic, | 4385 Register scratch3, bool is_polymorphic, |
| 4386 Label* miss) { | 4386 Label* miss) { |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4445 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset)); | 4445 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset)); |
| 4446 __ j(not_equal, try_array); | 4446 __ j(not_equal, try_array); |
| 4447 Register handler = feedback; | 4447 Register handler = feedback; |
| 4448 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size, | 4448 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size, |
| 4449 FixedArray::kHeaderSize + kPointerSize)); | 4449 FixedArray::kHeaderSize + kPointerSize)); |
| 4450 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); | 4450 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); |
| 4451 __ jmp(handler); | 4451 __ jmp(handler); |
| 4452 } | 4452 } |
| 4453 | 4453 |
| 4454 | 4454 |
| 4455 void VectorRawLoadStub::Generate(MacroAssembler* masm) { | 4455 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } |
| 4456 GenerateImpl(masm, false); | |
| 4457 } | |
| 4458 | 4456 |
| 4459 | 4457 |
| 4460 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4458 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4461 GenerateImpl(masm, true); | 4459 GenerateImpl(masm, true); |
| 4462 } | 4460 } |
| 4463 | 4461 |
| 4464 | 4462 |
| 4465 void VectorRawLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4463 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
| 4466 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // rdx | 4464 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx |
| 4467 Register name = VectorLoadICDescriptor::NameRegister(); // rcx | 4465 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx |
| 4468 Register vector = VectorLoadICDescriptor::VectorRegister(); // rbx | 4466 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx |
| 4469 Register slot = VectorLoadICDescriptor::SlotRegister(); // rax | 4467 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax |
| 4470 Register feedback = rdi; | 4468 Register feedback = rdi; |
| 4471 Register integer_slot = r8; | 4469 Register integer_slot = r8; |
| 4472 Register receiver_map = r9; | 4470 Register receiver_map = r9; |
| 4473 | 4471 |
| 4474 __ SmiToInteger32(integer_slot, slot); | 4472 __ SmiToInteger32(integer_slot, slot); |
| 4475 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, | 4473 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, |
| 4476 FixedArray::kHeaderSize)); | 4474 FixedArray::kHeaderSize)); |
| 4477 | 4475 |
| 4478 // Try to quickly handle the monomorphic case without knowing for sure | 4476 // Try to quickly handle the monomorphic case without knowing for sure |
| 4479 // if we have a weak cell in feedback. We do know it's safe to look | 4477 // if we have a weak cell in feedback. We do know it's safe to look |
| (...skipping 20 matching lines...) Expand all Loading... |
| 4500 | 4498 |
| 4501 __ bind(&miss); | 4499 __ bind(&miss); |
| 4502 LoadIC::GenerateMiss(masm); | 4500 LoadIC::GenerateMiss(masm); |
| 4503 | 4501 |
| 4504 __ bind(&load_smi_map); | 4502 __ bind(&load_smi_map); |
| 4505 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); | 4503 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); |
| 4506 __ jmp(&compare_map); | 4504 __ jmp(&compare_map); |
| 4507 } | 4505 } |
| 4508 | 4506 |
| 4509 | 4507 |
| 4510 void VectorRawKeyedLoadStub::Generate(MacroAssembler* masm) { | 4508 void KeyedLoadICStub::Generate(MacroAssembler* masm) { |
| 4511 GenerateImpl(masm, false); | 4509 GenerateImpl(masm, false); |
| 4512 } | 4510 } |
| 4513 | 4511 |
| 4514 | 4512 |
| 4515 void VectorRawKeyedLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4513 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { |
| 4516 GenerateImpl(masm, true); | 4514 GenerateImpl(masm, true); |
| 4517 } | 4515 } |
| 4518 | 4516 |
| 4519 | 4517 |
| 4520 void VectorRawKeyedLoadStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { | 4518 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { |
| 4521 Register receiver = VectorLoadICDescriptor::ReceiverRegister(); // rdx | 4519 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx |
| 4522 Register key = VectorLoadICDescriptor::NameRegister(); // rcx | 4520 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx |
| 4523 Register vector = VectorLoadICDescriptor::VectorRegister(); // rbx | 4521 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx |
| 4524 Register slot = VectorLoadICDescriptor::SlotRegister(); // rax | 4522 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax |
| 4525 Register feedback = rdi; | 4523 Register feedback = rdi; |
| 4526 Register integer_slot = r8; | 4524 Register integer_slot = r8; |
| 4527 Register receiver_map = r9; | 4525 Register receiver_map = r9; |
| 4528 | 4526 |
| 4529 __ SmiToInteger32(integer_slot, slot); | 4527 __ SmiToInteger32(integer_slot, slot); |
| 4530 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, | 4528 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, |
| 4531 FixedArray::kHeaderSize)); | 4529 FixedArray::kHeaderSize)); |
| 4532 | 4530 |
| 4533 // Try to quickly handle the monomorphic case without knowing for sure | 4531 // Try to quickly handle the monomorphic case without knowing for sure |
| 4534 // if we have a weak cell in feedback. We do know it's safe to look | 4532 // if we have a weak cell in feedback. We do know it's safe to look |
| (...skipping 838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5373 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, | 5371 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, |
| 5374 kStackSpace, nullptr, return_value_operand, NULL); | 5372 kStackSpace, nullptr, return_value_operand, NULL); |
| 5375 } | 5373 } |
| 5376 | 5374 |
| 5377 | 5375 |
| 5378 #undef __ | 5376 #undef __ |
| 5379 | 5377 |
| 5380 } } // namespace v8::internal | 5378 } } // namespace v8::internal |
| 5381 | 5379 |
| 5382 #endif // V8_TARGET_ARCH_X64 | 5380 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |