| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 256 Label* allocation_memento_found) { | 256 Label* allocation_memento_found) { |
| 257 // ----------- S t a t e ------------- | 257 // ----------- S t a t e ------------- |
| 258 // -- rax : value | 258 // -- rax : value |
| 259 // -- rbx : target map | 259 // -- rbx : target map |
| 260 // -- rcx : key | 260 // -- rcx : key |
| 261 // -- rdx : receiver | 261 // -- rdx : receiver |
| 262 // -- rsp[0] : return address | 262 // -- rsp[0] : return address |
| 263 // ----------------------------------- | 263 // ----------------------------------- |
| 264 if (mode == TRACK_ALLOCATION_SITE) { | 264 if (mode == TRACK_ALLOCATION_SITE) { |
| 265 ASSERT(allocation_memento_found != NULL); | 265 ASSERT(allocation_memento_found != NULL); |
| 266 __ TestJSArrayForAllocationMemento(rdx, rdi); | 266 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, allocation_memento_found); |
| 267 __ j(equal, allocation_memento_found); | |
| 268 } | 267 } |
| 269 | 268 |
| 270 // Set transitioned map. | 269 // Set transitioned map. |
| 271 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); | 270 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx); |
| 272 __ RecordWriteField(rdx, | 271 __ RecordWriteField(rdx, |
| 273 HeapObject::kMapOffset, | 272 HeapObject::kMapOffset, |
| 274 rbx, | 273 rbx, |
| 275 rdi, | 274 rdi, |
| 276 kDontSaveFPRegs, | 275 kDontSaveFPRegs, |
| 277 EMIT_REMEMBERED_SET, | 276 EMIT_REMEMBERED_SET, |
| 278 OMIT_SMI_CHECK); | 277 OMIT_SMI_CHECK); |
| 279 } | 278 } |
| 280 | 279 |
| 281 | 280 |
| 282 void ElementsTransitionGenerator::GenerateSmiToDouble( | 281 void ElementsTransitionGenerator::GenerateSmiToDouble( |
| 283 MacroAssembler* masm, AllocationSiteMode mode, Label* fail) { | 282 MacroAssembler* masm, AllocationSiteMode mode, Label* fail) { |
| 284 // ----------- S t a t e ------------- | 283 // ----------- S t a t e ------------- |
| 285 // -- rax : value | 284 // -- rax : value |
| 286 // -- rbx : target map | 285 // -- rbx : target map |
| 287 // -- rcx : key | 286 // -- rcx : key |
| 288 // -- rdx : receiver | 287 // -- rdx : receiver |
| 289 // -- rsp[0] : return address | 288 // -- rsp[0] : return address |
| 290 // ----------------------------------- | 289 // ----------------------------------- |
| 291 // The fail label is not actually used since we do not allocate. | 290 // The fail label is not actually used since we do not allocate. |
| 292 Label allocated, new_backing_store, only_change_map, done; | 291 Label allocated, new_backing_store, only_change_map, done; |
| 293 | 292 |
| 294 if (mode == TRACK_ALLOCATION_SITE) { | 293 if (mode == TRACK_ALLOCATION_SITE) { |
| 295 __ TestJSArrayForAllocationMemento(rdx, rdi); | 294 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail); |
| 296 __ j(equal, fail); | |
| 297 } | 295 } |
| 298 | 296 |
| 299 // Check for empty arrays, which only require a map transition and no changes | 297 // Check for empty arrays, which only require a map transition and no changes |
| 300 // to the backing store. | 298 // to the backing store. |
| 301 __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); | 299 __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); |
| 302 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); | 300 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); |
| 303 __ j(equal, &only_change_map); | 301 __ j(equal, &only_change_map); |
| 304 | 302 |
| 305 // Check backing store for COW-ness. For COW arrays we have to | 303 // Check backing store for COW-ness. For COW arrays we have to |
| 306 // allocate a new backing store. | 304 // allocate a new backing store. |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 411 // ----------- S t a t e ------------- | 409 // ----------- S t a t e ------------- |
| 412 // -- rax : value | 410 // -- rax : value |
| 413 // -- rbx : target map | 411 // -- rbx : target map |
| 414 // -- rcx : key | 412 // -- rcx : key |
| 415 // -- rdx : receiver | 413 // -- rdx : receiver |
| 416 // -- rsp[0] : return address | 414 // -- rsp[0] : return address |
| 417 // ----------------------------------- | 415 // ----------------------------------- |
| 418 Label loop, entry, convert_hole, gc_required, only_change_map; | 416 Label loop, entry, convert_hole, gc_required, only_change_map; |
| 419 | 417 |
| 420 if (mode == TRACK_ALLOCATION_SITE) { | 418 if (mode == TRACK_ALLOCATION_SITE) { |
| 421 __ TestJSArrayForAllocationMemento(rdx, rdi); | 419 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail); |
| 422 __ j(equal, fail); | |
| 423 } | 420 } |
| 424 | 421 |
| 425 // Check for empty arrays, which only require a map transition and no changes | 422 // Check for empty arrays, which only require a map transition and no changes |
| 426 // to the backing store. | 423 // to the backing store. |
| 427 __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); | 424 __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset)); |
| 428 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); | 425 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); |
| 429 __ j(equal, &only_change_map); | 426 __ j(equal, &only_change_map); |
| 430 | 427 |
| 431 __ push(rax); | 428 __ push(rax); |
| 432 | 429 |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 671 __ subsd(result, double_scratch); | 668 __ subsd(result, double_scratch); |
| 672 __ addsd(result, Operand(kScratchRegister, 8 * kDoubleSize)); | 669 __ addsd(result, Operand(kScratchRegister, 8 * kDoubleSize)); |
| 673 __ mulsd(result, input); | 670 __ mulsd(result, input); |
| 674 | 671 |
| 675 __ bind(&done); | 672 __ bind(&done); |
| 676 } | 673 } |
| 677 | 674 |
| 678 #undef __ | 675 #undef __ |
| 679 | 676 |
| 680 | 677 |
| 681 static const int kNoCodeAgeSequenceLength = 6; | |
| 682 | |
| 683 static byte* GetNoCodeAgeSequence(uint32_t* length) { | 678 static byte* GetNoCodeAgeSequence(uint32_t* length) { |
| 684 static bool initialized = false; | 679 static bool initialized = false; |
| 685 static byte sequence[kNoCodeAgeSequenceLength]; | 680 static byte sequence[kNoCodeAgeSequenceLength]; |
| 686 *length = kNoCodeAgeSequenceLength; | 681 *length = kNoCodeAgeSequenceLength; |
| 687 if (!initialized) { | 682 if (!initialized) { |
| 688 // The sequence of instructions that is patched out for aging code is the | 683 // The sequence of instructions that is patched out for aging code is the |
| 689 // following boilerplate stack-building prologue that is found both in | 684 // following boilerplate stack-building prologue that is found both in |
| 690 // FUNCTION and OPTIMIZED_FUNCTION code: | 685 // FUNCTION and OPTIMIZED_FUNCTION code: |
| 691 CodePatcher patcher(sequence, kNoCodeAgeSequenceLength); | 686 CodePatcher patcher(sequence, kNoCodeAgeSequenceLength); |
| 692 patcher.masm()->push(rbp); | 687 patcher.masm()->push(rbp); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 704 byte* young_sequence = GetNoCodeAgeSequence(&young_length); | 699 byte* young_sequence = GetNoCodeAgeSequence(&young_length); |
| 705 bool result = (!memcmp(sequence, young_sequence, young_length)); | 700 bool result = (!memcmp(sequence, young_sequence, young_length)); |
| 706 ASSERT(result || *sequence == kCallOpcode); | 701 ASSERT(result || *sequence == kCallOpcode); |
| 707 return result; | 702 return result; |
| 708 } | 703 } |
| 709 | 704 |
| 710 | 705 |
| 711 void Code::GetCodeAgeAndParity(byte* sequence, Age* age, | 706 void Code::GetCodeAgeAndParity(byte* sequence, Age* age, |
| 712 MarkingParity* parity) { | 707 MarkingParity* parity) { |
| 713 if (IsYoungSequence(sequence)) { | 708 if (IsYoungSequence(sequence)) { |
| 714 *age = kNoAge; | 709 *age = kNoAgeCodeAge; |
| 715 *parity = NO_MARKING_PARITY; | 710 *parity = NO_MARKING_PARITY; |
| 716 } else { | 711 } else { |
| 717 sequence++; // Skip the kCallOpcode byte | 712 sequence++; // Skip the kCallOpcode byte |
| 718 Address target_address = sequence + *reinterpret_cast<int*>(sequence) + | 713 Address target_address = sequence + *reinterpret_cast<int*>(sequence) + |
| 719 Assembler::kCallTargetAddressOffset; | 714 Assembler::kCallTargetAddressOffset; |
| 720 Code* stub = GetCodeFromTargetAddress(target_address); | 715 Code* stub = GetCodeFromTargetAddress(target_address); |
| 721 GetCodeAgeAndParity(stub, age, parity); | 716 GetCodeAgeAndParity(stub, age, parity); |
| 722 } | 717 } |
| 723 } | 718 } |
| 724 | 719 |
| 725 | 720 |
| 726 void Code::PatchPlatformCodeAge(Isolate* isolate, | 721 void Code::PatchPlatformCodeAge(Isolate* isolate, |
| 727 byte* sequence, | 722 byte* sequence, |
| 728 Code::Age age, | 723 Code::Age age, |
| 729 MarkingParity parity) { | 724 MarkingParity parity) { |
| 730 uint32_t young_length; | 725 uint32_t young_length; |
| 731 byte* young_sequence = GetNoCodeAgeSequence(&young_length); | 726 byte* young_sequence = GetNoCodeAgeSequence(&young_length); |
| 732 if (age == kNoAge) { | 727 if (age == kNoAgeCodeAge) { |
| 733 CopyBytes(sequence, young_sequence, young_length); | 728 CopyBytes(sequence, young_sequence, young_length); |
| 734 CPU::FlushICache(sequence, young_length); | 729 CPU::FlushICache(sequence, young_length); |
| 735 } else { | 730 } else { |
| 736 Code* stub = GetCodeAgeStub(isolate, age, parity); | 731 Code* stub = GetCodeAgeStub(isolate, age, parity); |
| 737 CodePatcher patcher(sequence, young_length); | 732 CodePatcher patcher(sequence, young_length); |
| 738 patcher.masm()->call(stub->instruction_start()); | 733 patcher.masm()->call(stub->instruction_start()); |
| 739 for (int i = 0; | 734 patcher.masm()->Nop( |
| 740 i < kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength; | 735 kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength); |
| 741 i++) { | |
| 742 patcher.masm()->nop(); | |
| 743 } | |
| 744 } | 736 } |
| 745 } | 737 } |
| 746 | 738 |
| 747 | 739 |
| 748 Operand StackArgumentsAccessor::GetArgumentOperand(int index) { | 740 Operand StackArgumentsAccessor::GetArgumentOperand(int index) { |
| 749 ASSERT(index >= 0); | 741 ASSERT(index >= 0); |
| 750 int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0; | 742 int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0; |
| 751 int displacement_to_last_argument = base_reg_.is(rsp) ? | 743 int displacement_to_last_argument = base_reg_.is(rsp) ? |
| 752 kPCOnStackSize : kFPOnStackSize + kPCOnStackSize; | 744 kPCOnStackSize : kFPOnStackSize + kPCOnStackSize; |
| 753 displacement_to_last_argument += extra_displacement_to_last_argument_; | 745 displacement_to_last_argument += extra_displacement_to_last_argument_; |
| 754 if (argument_count_reg_.is(no_reg)) { | 746 if (argument_count_reg_.is(no_reg)) { |
| 755 // argument[0] is at base_reg_ + displacement_to_last_argument + | 747 // argument[0] is at base_reg_ + displacement_to_last_argument + |
| 756 // (argument_count_immediate_ + receiver - 1) * kPointerSize. | 748 // (argument_count_immediate_ + receiver - 1) * kPointerSize. |
| 757 ASSERT(argument_count_immediate_ + receiver > 0); | 749 ASSERT(argument_count_immediate_ + receiver > 0); |
| 758 return Operand(base_reg_, displacement_to_last_argument + | 750 return Operand(base_reg_, displacement_to_last_argument + |
| 759 (argument_count_immediate_ + receiver - 1 - index) * kPointerSize); | 751 (argument_count_immediate_ + receiver - 1 - index) * kPointerSize); |
| 760 } else { | 752 } else { |
| 761 // argument[0] is at base_reg_ + displacement_to_last_argument + | 753 // argument[0] is at base_reg_ + displacement_to_last_argument + |
| 762 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. | 754 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize. |
| 763 return Operand(base_reg_, argument_count_reg_, times_pointer_size, | 755 return Operand(base_reg_, argument_count_reg_, times_pointer_size, |
| 764 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); | 756 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); |
| 765 } | 757 } |
| 766 } | 758 } |
| 767 | 759 |
| 768 | 760 |
| 769 } } // namespace v8::internal | 761 } } // namespace v8::internal |
| 770 | 762 |
| 771 #endif // V8_TARGET_ARCH_X64 | 763 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |