Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(573)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 7945009: Merge experimental/gc branch to the bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/deoptimizer-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
323 __ Set(tos_, 0); 323 __ Set(tos_, 0);
324 __ ret(1 * kPointerSize); 324 __ ret(1 * kPointerSize);
325 __ bind(&not_heap_number); 325 __ bind(&not_heap_number);
326 } 326 }
327 327
328 __ bind(&patch); 328 __ bind(&patch);
329 GenerateTypeTransition(masm); 329 GenerateTypeTransition(masm);
330 } 330 }
331 331
332 332
333 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
334 __ PushCallerSaved(save_doubles_);
335 const int argument_count = 1;
336 __ PrepareCallCFunction(argument_count);
337 #ifdef _WIN64
338 __ LoadAddress(rcx, ExternalReference::isolate_address());
339 #else
340 __ LoadAddress(rdi, ExternalReference::isolate_address());
341 #endif
342
343 AllowExternalCallThatCantCauseGC scope(masm);
344 __ CallCFunction(
345 ExternalReference::store_buffer_overflow_function(masm->isolate()),
346 argument_count);
347 __ PopCallerSaved(save_doubles_);
348 __ ret(0);
349 }
350
351
333 void ToBooleanStub::CheckOddball(MacroAssembler* masm, 352 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
334 Type type, 353 Type type,
335 Heap::RootListIndex value, 354 Heap::RootListIndex value,
336 bool result) { 355 bool result) {
337 const Register argument = rax; 356 const Register argument = rax;
338 if (types_.Contains(type)) { 357 if (types_.Contains(type)) {
339 // If we see an expected oddball, return its ToBoolean value tos_. 358 // If we see an expected oddball, return its ToBoolean value tos_.
340 Label different_value; 359 Label different_value;
341 __ CompareRoot(argument, value); 360 __ CompareRoot(argument, value);
342 __ j(not_equal, &different_value, Label::kNear); 361 __ j(not_equal, &different_value, Label::kNear);
(...skipping 2325 matching lines...) Expand 10 before | Expand all | Expand 10 after
2668 2687
2669 // rbx: last_match_info backing store (FixedArray) 2688 // rbx: last_match_info backing store (FixedArray)
2670 // rdx: number of capture registers 2689 // rdx: number of capture registers
2671 // Store the capture count. 2690 // Store the capture count.
2672 __ Integer32ToSmi(kScratchRegister, rdx); 2691 __ Integer32ToSmi(kScratchRegister, rdx);
2673 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), 2692 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2674 kScratchRegister); 2693 kScratchRegister);
2675 // Store last subject and last input. 2694 // Store last subject and last input.
2676 __ movq(rax, Operand(rsp, kSubjectOffset)); 2695 __ movq(rax, Operand(rsp, kSubjectOffset));
2677 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); 2696 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2678 __ movq(rcx, rbx); 2697 __ RecordWriteField(rbx,
2679 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi); 2698 RegExpImpl::kLastSubjectOffset,
2699 rax,
2700 rdi,
2701 kDontSaveFPRegs);
2680 __ movq(rax, Operand(rsp, kSubjectOffset)); 2702 __ movq(rax, Operand(rsp, kSubjectOffset));
2681 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 2703 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2682 __ movq(rcx, rbx); 2704 __ RecordWriteField(rbx,
2683 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi); 2705 RegExpImpl::kLastInputOffset,
2706 rax,
2707 rdi,
2708 kDontSaveFPRegs);
2684 2709
2685 // Get the static offsets vector filled by the native regexp code. 2710 // Get the static offsets vector filled by the native regexp code.
2686 __ LoadAddress(rcx, 2711 __ LoadAddress(rcx,
2687 ExternalReference::address_of_static_offsets_vector(isolate)); 2712 ExternalReference::address_of_static_offsets_vector(isolate));
2688 2713
2689 // rbx: last_match_info backing store (FixedArray) 2714 // rbx: last_match_info backing store (FixedArray)
2690 // rcx: offsets vector 2715 // rcx: offsets vector
2691 // rdx: number of capture registers 2716 // rdx: number of capture registers
2692 Label next_capture, done; 2717 Label next_capture, done;
2693 // Capture register counter starts from number of capture registers and 2718 // Capture register counter starts from number of capture registers and
(...skipping 629 matching lines...) Expand 10 before | Expand all | Expand 10 after
3323 return false; 3348 return false;
3324 } 3349 }
3325 3350
3326 3351
3327 bool CEntryStub::CompilingCallsToThisStubIsGCSafe() { 3352 bool CEntryStub::CompilingCallsToThisStubIsGCSafe() {
3328 return result_size_ == 1; 3353 return result_size_ == 1;
3329 } 3354 }
3330 3355
3331 3356
3332 void CodeStub::GenerateStubsAheadOfTime() { 3357 void CodeStub::GenerateStubsAheadOfTime() {
3333 CEntryStub save_doubles(1); 3358 CEntryStub save_doubles(1, kSaveFPRegs);
3334 save_doubles.SaveDoubles();
3335 save_doubles.GetCode(); 3359 save_doubles.GetCode();
3360 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
3361 // It is important that the store buffer overflow stubs are generated first.
3362 RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
3336 } 3363 }
3337 3364
3338 3365
3339 void CodeStub::GenerateFPStubs() { 3366 void CodeStub::GenerateFPStubs() {
3340 } 3367 }
3341 3368
3342 3369
3343 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 3370 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3344 // Throw exception in eax. 3371 // Throw exception in eax.
3345 __ Throw(rax); 3372 __ Throw(rax);
(...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after
3771 __ j(above, &slow); 3798 __ j(above, &slow);
3772 3799
3773 // Register mapping: 3800 // Register mapping:
3774 // rax is object map. 3801 // rax is object map.
3775 // rdx is function. 3802 // rdx is function.
3776 // rbx is function prototype. 3803 // rbx is function prototype.
3777 if (!HasCallSiteInlineCheck()) { 3804 if (!HasCallSiteInlineCheck()) {
3778 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 3805 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3779 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 3806 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3780 } else { 3807 } else {
3808 // Get return address and delta to inlined map check.
3781 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3809 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3782 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3810 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3783 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); 3811 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
3784 if (FLAG_debug_code) { 3812 if (FLAG_debug_code) {
3785 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 3813 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3786 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 3814 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3787 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 3815 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
3788 } 3816 }
3789 } 3817 }
3790 3818
(...skipping 14 matching lines...) Expand all
3805 __ jmp(&loop); 3833 __ jmp(&loop);
3806 3834
3807 __ bind(&is_instance); 3835 __ bind(&is_instance);
3808 if (!HasCallSiteInlineCheck()) { 3836 if (!HasCallSiteInlineCheck()) {
3809 __ xorl(rax, rax); 3837 __ xorl(rax, rax);
3810 // Store bitwise zero in the cache. This is a Smi in GC terms. 3838 // Store bitwise zero in the cache. This is a Smi in GC terms.
3811 STATIC_ASSERT(kSmiTag == 0); 3839 STATIC_ASSERT(kSmiTag == 0);
3812 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 3840 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3813 } else { 3841 } else {
3814 // Store offset of true in the root array at the inline check site. 3842 // Store offset of true in the root array at the inline check site.
3815 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias 3843 int true_offset = 0x100 +
3816 == 0xB0 - 0x100); 3844 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3817 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize. 3845 // Assert it is a 1-byte signed value.
3846 ASSERT(true_offset >= 0 && true_offset < 0x100);
3847 __ movl(rax, Immediate(true_offset));
3818 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3848 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3819 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3849 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3820 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3850 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3821 if (FLAG_debug_code) { 3851 if (FLAG_debug_code) {
3822 __ movl(rax, Immediate(kWordBeforeResultValue)); 3852 __ movl(rax, Immediate(kWordBeforeResultValue));
3823 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3853 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3824 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 3854 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
3825 } 3855 }
3826 __ Set(rax, 0); 3856 __ Set(rax, 0);
3827 } 3857 }
3828 __ ret(2 * kPointerSize + extra_stack_space); 3858 __ ret(2 * kPointerSize + extra_stack_space);
3829 3859
3830 __ bind(&is_not_instance); 3860 __ bind(&is_not_instance);
3831 if (!HasCallSiteInlineCheck()) { 3861 if (!HasCallSiteInlineCheck()) {
3832 // We have to store a non-zero value in the cache. 3862 // We have to store a non-zero value in the cache.
3833 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 3863 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3834 } else { 3864 } else {
3835 // Store offset of false in the root array at the inline check site. 3865 // Store offset of false in the root array at the inline check site.
3836 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias 3866 int false_offset = 0x100 +
3837 == 0xB8 - 0x100); 3867 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3838 __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize. 3868 // Assert it is a 1-byte signed value.
3869 ASSERT(false_offset >= 0 && false_offset < 0x100);
3870 __ movl(rax, Immediate(false_offset));
3839 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3871 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3840 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3872 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3841 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3873 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3842 if (FLAG_debug_code) { 3874 if (FLAG_debug_code) {
3843 __ movl(rax, Immediate(kWordBeforeResultValue)); 3875 __ movl(rax, Immediate(kWordBeforeResultValue));
3844 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3876 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3845 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 3877 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
3846 } 3878 }
3847 } 3879 }
3848 __ ret(2 * kPointerSize + extra_stack_space); 3880 __ ret(2 * kPointerSize + extra_stack_space);
(...skipping 1660 matching lines...) Expand 10 before | Expand all | Expand 10 after
5509 __ Drop(1); 5541 __ Drop(1);
5510 __ ret(2 * kPointerSize); 5542 __ ret(2 * kPointerSize);
5511 5543
5512 __ bind(&not_in_dictionary); 5544 __ bind(&not_in_dictionary);
5513 __ movq(scratch, Immediate(0)); 5545 __ movq(scratch, Immediate(0));
5514 __ Drop(1); 5546 __ Drop(1);
5515 __ ret(2 * kPointerSize); 5547 __ ret(2 * kPointerSize);
5516 } 5548 }
5517 5549
5518 5550
5551 struct AheadOfTimeWriteBarrierStubList {
5552 Register object, value, address;
5553 RememberedSetAction action;
5554 };
5555
5556
5557 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
5558 // Used in RegExpExecStub.
5559 { rbx, rax, rdi, EMIT_REMEMBERED_SET },
5560 // Used in CompileArrayPushCall.
5561 { rbx, rcx, rdx, EMIT_REMEMBERED_SET },
5562 // Used in CompileStoreGlobal.
5563 { rbx, rcx, rdx, OMIT_REMEMBERED_SET },
5564 // Used in StoreStubCompiler::CompileStoreField and
5565 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
5566 { rdx, rcx, rbx, EMIT_REMEMBERED_SET },
5567 // GenerateStoreField calls the stub with two different permutations of
5568 // registers. This is the second.
5569 { rbx, rcx, rdx, EMIT_REMEMBERED_SET },
5570 // StoreIC::GenerateNormal via GenerateDictionaryStore.
5571 { rbx, r8, r9, EMIT_REMEMBERED_SET },
5572 // KeyedStoreIC::GenerateGeneric.
5573 { rbx, rdx, rcx, EMIT_REMEMBERED_SET},
5574 // KeyedStoreStubCompiler::GenerateStoreFastElement.
5575 { rdi, rdx, rcx, EMIT_REMEMBERED_SET},
5576 // Null termination.
5577 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
5578 };
5579
5580
5581 bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
5582 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5583 !entry->object.is(no_reg);
5584 entry++) {
5585 if (object_.is(entry->object) &&
5586 value_.is(entry->value) &&
5587 address_.is(entry->address) &&
5588 remembered_set_action_ == entry->action &&
5589 save_fp_regs_mode_ == kDontSaveFPRegs) {
5590 return true;
5591 }
5592 }
5593 return false;
5594 }
5595
5596
5597 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
5598 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
5599 stub1.GetCode();
5600 StoreBufferOverflowStub stub2(kSaveFPRegs);
5601 stub2.GetCode();
5602 }
5603
5604
5605 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
5606 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5607 !entry->object.is(no_reg);
5608 entry++) {
5609 RecordWriteStub stub(entry->object,
5610 entry->value,
5611 entry->address,
5612 entry->action,
5613 kDontSaveFPRegs);
5614 stub.GetCode();
5615 }
5616 }
5617
5618
5619 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
5620 // the value has just been written into the object, now this stub makes sure
5621 // we keep the GC informed. The word in the object where the value has been
5622 // written is in the address register.
5623 void RecordWriteStub::Generate(MacroAssembler* masm) {
5624 Label skip_to_incremental_noncompacting;
5625 Label skip_to_incremental_compacting;
5626
5627 // The first two instructions are generated with labels so as to get the
5628 // offset fixed up correctly by the bind(Label*) call. We patch it back and
5629 // forth between a compare instructions (a nop in this position) and the
5630 // real branch when we start and stop incremental heap marking.
5631 // See RecordWriteStub::Patch for details.
5632 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
5633 __ jmp(&skip_to_incremental_compacting, Label::kFar);
5634
5635 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5636 __ RememberedSetHelper(
5637 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
5638 } else {
5639 __ ret(0);
5640 }
5641
5642 __ bind(&skip_to_incremental_noncompacting);
5643 GenerateIncremental(masm, INCREMENTAL);
5644
5645 __ bind(&skip_to_incremental_compacting);
5646 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
5647
5648 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
5649 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
5650 masm->set_byte_at(0, kTwoByteNopInstruction);
5651 masm->set_byte_at(2, kFiveByteNopInstruction);
5652 }
5653
5654
5655 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
5656 regs_.Save(masm);
5657
5658 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5659 Label dont_need_remembered_set;
5660
5661 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
5662 __ JumpIfNotInNewSpace(regs_.scratch0(),
5663 regs_.scratch0(),
5664 &dont_need_remembered_set);
5665
5666 __ CheckPageFlag(regs_.object(),
5667 regs_.scratch0(),
5668 1 << MemoryChunk::SCAN_ON_SCAVENGE,
5669 not_zero,
5670 &dont_need_remembered_set);
5671
5672 // First notify the incremental marker if necessary, then update the
5673 // remembered set.
5674 CheckNeedsToInformIncrementalMarker(
5675 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
5676 InformIncrementalMarker(masm, mode);
5677 regs_.Restore(masm);
5678 __ RememberedSetHelper(
5679 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
5680
5681 __ bind(&dont_need_remembered_set);
5682 }
5683
5684 CheckNeedsToInformIncrementalMarker(
5685 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
5686 InformIncrementalMarker(masm, mode);
5687 regs_.Restore(masm);
5688 __ ret(0);
5689 }
5690
5691
5692 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
5693 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
5694 #ifdef _WIN64
5695 Register arg3 = r8;
5696 Register arg2 = rdx;
5697 Register arg1 = rcx;
5698 #else
5699 Register arg3 = rdx;
5700 Register arg2 = rsi;
5701 Register arg1 = rdi;
5702 #endif
5703 Register address =
5704 arg1.is(regs_.address()) ? kScratchRegister : regs_.address();
5705 ASSERT(!address.is(regs_.object()));
5706 ASSERT(!address.is(arg1));
5707 __ Move(address, regs_.address());
5708 __ Move(arg1, regs_.object());
5709 if (mode == INCREMENTAL_COMPACTION) {
5710 // TODO(gc) Can we just set address arg2 in the beginning?
5711 __ Move(arg2, address);
5712 } else {
5713 ASSERT(mode == INCREMENTAL);
5714 __ movq(arg2, Operand(address, 0));
5715 }
5716 __ LoadAddress(arg3, ExternalReference::isolate_address());
5717 int argument_count = 3;
5718
5719 AllowExternalCallThatCantCauseGC scope(masm);
5720 __ PrepareCallCFunction(argument_count);
5721 if (mode == INCREMENTAL_COMPACTION) {
5722 __ CallCFunction(
5723 ExternalReference::incremental_evacuation_record_write_function(
5724 masm->isolate()),
5725 argument_count);
5726 } else {
5727 ASSERT(mode == INCREMENTAL);
5728 __ CallCFunction(
5729 ExternalReference::incremental_marking_record_write_function(
5730 masm->isolate()),
5731 argument_count);
5732 }
5733 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
5734 }
5735
5736
5737 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
5738 MacroAssembler* masm,
5739 OnNoNeedToInformIncrementalMarker on_no_need,
5740 Mode mode) {
5741 Label on_black;
5742 Label need_incremental;
5743 Label need_incremental_pop_object;
5744
5745 // Let's look at the color of the object: If it is not black we don't have
5746 // to inform the incremental marker.
5747 __ JumpIfBlack(regs_.object(),
5748 regs_.scratch0(),
5749 regs_.scratch1(),
5750 &on_black,
5751 Label::kNear);
5752
5753 regs_.Restore(masm);
5754 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5755 __ RememberedSetHelper(
5756 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
5757 } else {
5758 __ ret(0);
5759 }
5760
5761 __ bind(&on_black);
5762
5763 // Get the value from the slot.
5764 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
5765
5766 if (mode == INCREMENTAL_COMPACTION) {
5767 Label ensure_not_white;
5768
5769 __ CheckPageFlag(regs_.scratch0(), // Contains value.
5770 regs_.scratch1(), // Scratch.
5771 MemoryChunk::kEvacuationCandidateMask,
5772 zero,
5773 &ensure_not_white,
5774 Label::kNear);
5775
5776 __ CheckPageFlag(regs_.object(),
5777 regs_.scratch1(), // Scratch.
5778 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
5779 zero,
5780 &need_incremental);
5781
5782 __ bind(&ensure_not_white);
5783 }
5784
5785 // We need an extra register for this, so we push the object register
5786 // temporarily.
5787 __ push(regs_.object());
5788 __ EnsureNotWhite(regs_.scratch0(), // The value.
5789 regs_.scratch1(), // Scratch.
5790 regs_.object(), // Scratch.
5791 &need_incremental_pop_object,
5792 Label::kNear);
5793 __ pop(regs_.object());
5794
5795 regs_.Restore(masm);
5796 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5797 __ RememberedSetHelper(
5798 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
5799 } else {
5800 __ ret(0);
5801 }
5802
5803 __ bind(&need_incremental_pop_object);
5804 __ pop(regs_.object());
5805
5806 __ bind(&need_incremental);
5807
5808 // Fall through when we need to inform the incremental marker.
5809 }
5810
5811
5519 #undef __ 5812 #undef __
5520 5813
5521 } } // namespace v8::internal 5814 } } // namespace v8::internal
5522 5815
5523 #endif // V8_TARGET_ARCH_X64 5816 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/deoptimizer-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698