OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X87 | 7 #if V8_TARGET_ARCH_X87 |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 3499 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3510 // Registers: | 3510 // Registers: |
3511 // dictionary_: NameDictionary to probe. | 3511 // dictionary_: NameDictionary to probe. |
3512 // result_: used as scratch. | 3512 // result_: used as scratch. |
3513 // index_: will hold an index of entry if lookup is successful. | 3513 // index_: will hold an index of entry if lookup is successful. |
3514 // might alias with result_. | 3514 // might alias with result_. |
3515 // Returns: | 3515 // Returns: |
3516 // result_ is zero if lookup failed, non zero otherwise. | 3516 // result_ is zero if lookup failed, non zero otherwise. |
3517 | 3517 |
3518 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; | 3518 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
3519 | 3519 |
3520 Register scratch = result_; | 3520 Register scratch = result(); |
3521 | 3521 |
3522 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset)); | 3522 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset)); |
3523 __ dec(scratch); | 3523 __ dec(scratch); |
3524 __ SmiUntag(scratch); | 3524 __ SmiUntag(scratch); |
3525 __ push(scratch); | 3525 __ push(scratch); |
3526 | 3526 |
3527 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 3527 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
3528 // not equal to the name and kProbes-th slot is not used (its name is the | 3528 // not equal to the name and kProbes-th slot is not used (its name is the |
3529 // undefined value), it guarantees the hash table doesn't contain the | 3529 // undefined value), it guarantees the hash table doesn't contain the |
3530 // property. It's true even if some slots represent deleted properties | 3530 // property. It's true even if some slots represent deleted properties |
3531 // (their names are the null value). | 3531 // (their names are the null value). |
3532 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 3532 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
3533 // Compute the masked index: (hash + i + i * i) & mask. | 3533 // Compute the masked index: (hash + i + i * i) & mask. |
3534 __ mov(scratch, Operand(esp, 2 * kPointerSize)); | 3534 __ mov(scratch, Operand(esp, 2 * kPointerSize)); |
3535 if (i > 0) { | 3535 if (i > 0) { |
3536 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); | 3536 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
3537 } | 3537 } |
3538 __ and_(scratch, Operand(esp, 0)); | 3538 __ and_(scratch, Operand(esp, 0)); |
3539 | 3539 |
3540 // Scale the index by multiplying by the entry size. | 3540 // Scale the index by multiplying by the entry size. |
3541 DCHECK(NameDictionary::kEntrySize == 3); | 3541 DCHECK(NameDictionary::kEntrySize == 3); |
3542 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 3542 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. |
3543 | 3543 |
3544 // Having undefined at this place means the name is not contained. | 3544 // Having undefined at this place means the name is not contained. |
3545 DCHECK_EQ(kSmiTagSize, 1); | 3545 DCHECK_EQ(kSmiTagSize, 1); |
3546 __ mov(scratch, Operand(dictionary_, | 3546 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size, |
3547 index_, | |
3548 times_pointer_size, | |
3549 kElementsStartOffset - kHeapObjectTag)); | 3547 kElementsStartOffset - kHeapObjectTag)); |
3550 __ cmp(scratch, isolate()->factory()->undefined_value()); | 3548 __ cmp(scratch, isolate()->factory()->undefined_value()); |
3551 __ j(equal, ¬_in_dictionary); | 3549 __ j(equal, ¬_in_dictionary); |
3552 | 3550 |
3553 // Stop if found the property. | 3551 // Stop if found the property. |
3554 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); | 3552 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); |
3555 __ j(equal, &in_dictionary); | 3553 __ j(equal, &in_dictionary); |
3556 | 3554 |
3557 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 3555 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) { |
3558 // If we hit a key that is not a unique name during negative | 3556 // If we hit a key that is not a unique name during negative |
3559 // lookup we have to bailout as this key might be equal to the | 3557 // lookup we have to bailout as this key might be equal to the |
3560 // key we are looking for. | 3558 // key we are looking for. |
3561 | 3559 |
3562 // Check if the entry name is not a unique name. | 3560 // Check if the entry name is not a unique name. |
3563 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 3561 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
3564 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), | 3562 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), |
3565 &maybe_in_dictionary); | 3563 &maybe_in_dictionary); |
3566 } | 3564 } |
3567 } | 3565 } |
3568 | 3566 |
3569 __ bind(&maybe_in_dictionary); | 3567 __ bind(&maybe_in_dictionary); |
3570 // If we are doing negative lookup then probing failure should be | 3568 // If we are doing negative lookup then probing failure should be |
3571 // treated as a lookup success. For positive lookup probing failure | 3569 // treated as a lookup success. For positive lookup probing failure |
3572 // should be treated as lookup failure. | 3570 // should be treated as lookup failure. |
3573 if (mode_ == POSITIVE_LOOKUP) { | 3571 if (mode() == POSITIVE_LOOKUP) { |
3574 __ mov(result_, Immediate(0)); | 3572 __ mov(result(), Immediate(0)); |
3575 __ Drop(1); | 3573 __ Drop(1); |
3576 __ ret(2 * kPointerSize); | 3574 __ ret(2 * kPointerSize); |
3577 } | 3575 } |
3578 | 3576 |
3579 __ bind(&in_dictionary); | 3577 __ bind(&in_dictionary); |
3580 __ mov(result_, Immediate(1)); | 3578 __ mov(result(), Immediate(1)); |
3581 __ Drop(1); | 3579 __ Drop(1); |
3582 __ ret(2 * kPointerSize); | 3580 __ ret(2 * kPointerSize); |
3583 | 3581 |
3584 __ bind(¬_in_dictionary); | 3582 __ bind(¬_in_dictionary); |
3585 __ mov(result_, Immediate(0)); | 3583 __ mov(result(), Immediate(0)); |
3586 __ Drop(1); | 3584 __ Drop(1); |
3587 __ ret(2 * kPointerSize); | 3585 __ ret(2 * kPointerSize); |
3588 } | 3586 } |
3589 | 3587 |
3590 | 3588 |
3591 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 3589 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
3592 Isolate* isolate) { | 3590 Isolate* isolate) { |
3593 StoreBufferOverflowStub stub(isolate); | 3591 StoreBufferOverflowStub stub(isolate); |
3594 stub.GetCode(); | 3592 stub.GetCode(); |
3595 } | 3593 } |
3596 | 3594 |
3597 | 3595 |
3598 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 3596 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
3599 // the value has just been written into the object, now this stub makes sure | 3597 // the value has just been written into the object, now this stub makes sure |
3600 // we keep the GC informed. The word in the object where the value has been | 3598 // we keep the GC informed. The word in the object where the value has been |
3601 // written is in the address register. | 3599 // written is in the address register. |
3602 void RecordWriteStub::Generate(MacroAssembler* masm) { | 3600 void RecordWriteStub::Generate(MacroAssembler* masm) { |
3603 Label skip_to_incremental_noncompacting; | 3601 Label skip_to_incremental_noncompacting; |
3604 Label skip_to_incremental_compacting; | 3602 Label skip_to_incremental_compacting; |
3605 | 3603 |
3606 // The first two instructions are generated with labels so as to get the | 3604 // The first two instructions are generated with labels so as to get the |
3607 // offset fixed up correctly by the bind(Label*) call. We patch it back and | 3605 // offset fixed up correctly by the bind(Label*) call. We patch it back and |
3608 // forth between a compare instructions (a nop in this position) and the | 3606 // forth between a compare instructions (a nop in this position) and the |
3609 // real branch when we start and stop incremental heap marking. | 3607 // real branch when we start and stop incremental heap marking. |
3610 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); | 3608 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); |
3611 __ jmp(&skip_to_incremental_compacting, Label::kFar); | 3609 __ jmp(&skip_to_incremental_compacting, Label::kFar); |
3612 | 3610 |
3613 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 3611 if (remembered_set_action() == EMIT_REMEMBERED_SET) { |
3614 __ RememberedSetHelper(object_, | 3612 __ RememberedSetHelper(object(), address(), value(), |
3615 address_, | |
3616 value_, | |
3617 MacroAssembler::kReturnAtEnd); | 3613 MacroAssembler::kReturnAtEnd); |
3618 } else { | 3614 } else { |
3619 __ ret(0); | 3615 __ ret(0); |
3620 } | 3616 } |
3621 | 3617 |
3622 __ bind(&skip_to_incremental_noncompacting); | 3618 __ bind(&skip_to_incremental_noncompacting); |
3623 GenerateIncremental(masm, INCREMENTAL); | 3619 GenerateIncremental(masm, INCREMENTAL); |
3624 | 3620 |
3625 __ bind(&skip_to_incremental_compacting); | 3621 __ bind(&skip_to_incremental_compacting); |
3626 GenerateIncremental(masm, INCREMENTAL_COMPACTION); | 3622 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
3627 | 3623 |
3628 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. | 3624 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. |
3629 // Will be checked in IncrementalMarking::ActivateGeneratedStub. | 3625 // Will be checked in IncrementalMarking::ActivateGeneratedStub. |
3630 masm->set_byte_at(0, kTwoByteNopInstruction); | 3626 masm->set_byte_at(0, kTwoByteNopInstruction); |
3631 masm->set_byte_at(2, kFiveByteNopInstruction); | 3627 masm->set_byte_at(2, kFiveByteNopInstruction); |
3632 } | 3628 } |
3633 | 3629 |
3634 | 3630 |
3635 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { | 3631 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
3636 regs_.Save(masm); | 3632 regs_.Save(masm); |
3637 | 3633 |
3638 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 3634 if (remembered_set_action() == EMIT_REMEMBERED_SET) { |
3639 Label dont_need_remembered_set; | 3635 Label dont_need_remembered_set; |
3640 | 3636 |
3641 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 3637 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
3642 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. | 3638 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. |
3643 regs_.scratch0(), | 3639 regs_.scratch0(), |
3644 &dont_need_remembered_set); | 3640 &dont_need_remembered_set); |
3645 | 3641 |
3646 __ CheckPageFlag(regs_.object(), | 3642 __ CheckPageFlag(regs_.object(), |
3647 regs_.scratch0(), | 3643 regs_.scratch0(), |
3648 1 << MemoryChunk::SCAN_ON_SCAVENGE, | 3644 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
3649 not_zero, | 3645 not_zero, |
3650 &dont_need_remembered_set); | 3646 &dont_need_remembered_set); |
3651 | 3647 |
3652 // First notify the incremental marker if necessary, then update the | 3648 // First notify the incremental marker if necessary, then update the |
3653 // remembered set. | 3649 // remembered set. |
3654 CheckNeedsToInformIncrementalMarker( | 3650 CheckNeedsToInformIncrementalMarker( |
3655 masm, | 3651 masm, |
3656 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, | 3652 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, |
3657 mode); | 3653 mode); |
3658 InformIncrementalMarker(masm); | 3654 InformIncrementalMarker(masm); |
3659 regs_.Restore(masm); | 3655 regs_.Restore(masm); |
3660 __ RememberedSetHelper(object_, | 3656 __ RememberedSetHelper(object(), address(), value(), |
3661 address_, | |
3662 value_, | |
3663 MacroAssembler::kReturnAtEnd); | 3657 MacroAssembler::kReturnAtEnd); |
3664 | 3658 |
3665 __ bind(&dont_need_remembered_set); | 3659 __ bind(&dont_need_remembered_set); |
3666 } | 3660 } |
3667 | 3661 |
3668 CheckNeedsToInformIncrementalMarker( | 3662 CheckNeedsToInformIncrementalMarker( |
3669 masm, | 3663 masm, |
3670 kReturnOnNoNeedToInformIncrementalMarker, | 3664 kReturnOnNoNeedToInformIncrementalMarker, |
3671 mode); | 3665 mode); |
3672 InformIncrementalMarker(masm); | 3666 InformIncrementalMarker(masm); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3713 // Let's look at the color of the object: If it is not black we don't have | 3707 // Let's look at the color of the object: If it is not black we don't have |
3714 // to inform the incremental marker. | 3708 // to inform the incremental marker. |
3715 __ JumpIfBlack(regs_.object(), | 3709 __ JumpIfBlack(regs_.object(), |
3716 regs_.scratch0(), | 3710 regs_.scratch0(), |
3717 regs_.scratch1(), | 3711 regs_.scratch1(), |
3718 &object_is_black, | 3712 &object_is_black, |
3719 Label::kNear); | 3713 Label::kNear); |
3720 | 3714 |
3721 regs_.Restore(masm); | 3715 regs_.Restore(masm); |
3722 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 3716 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
3723 __ RememberedSetHelper(object_, | 3717 __ RememberedSetHelper(object(), address(), value(), |
3724 address_, | |
3725 value_, | |
3726 MacroAssembler::kReturnAtEnd); | 3718 MacroAssembler::kReturnAtEnd); |
3727 } else { | 3719 } else { |
3728 __ ret(0); | 3720 __ ret(0); |
3729 } | 3721 } |
3730 | 3722 |
3731 __ bind(&object_is_black); | 3723 __ bind(&object_is_black); |
3732 | 3724 |
3733 // Get the value from the slot. | 3725 // Get the value from the slot. |
3734 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 3726 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
3735 | 3727 |
(...skipping 24 matching lines...) Expand all Loading... |
3760 __ push(regs_.object()); | 3752 __ push(regs_.object()); |
3761 __ EnsureNotWhite(regs_.scratch0(), // The value. | 3753 __ EnsureNotWhite(regs_.scratch0(), // The value. |
3762 regs_.scratch1(), // Scratch. | 3754 regs_.scratch1(), // Scratch. |
3763 regs_.object(), // Scratch. | 3755 regs_.object(), // Scratch. |
3764 &need_incremental_pop_object, | 3756 &need_incremental_pop_object, |
3765 Label::kNear); | 3757 Label::kNear); |
3766 __ pop(regs_.object()); | 3758 __ pop(regs_.object()); |
3767 | 3759 |
3768 regs_.Restore(masm); | 3760 regs_.Restore(masm); |
3769 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 3761 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
3770 __ RememberedSetHelper(object_, | 3762 __ RememberedSetHelper(object(), address(), value(), |
3771 address_, | |
3772 value_, | |
3773 MacroAssembler::kReturnAtEnd); | 3763 MacroAssembler::kReturnAtEnd); |
3774 } else { | 3764 } else { |
3775 __ ret(0); | 3765 __ ret(0); |
3776 } | 3766 } |
3777 | 3767 |
3778 __ bind(&need_incremental_pop_object); | 3768 __ bind(&need_incremental_pop_object); |
3779 __ pop(regs_.object()); | 3769 __ pop(regs_.object()); |
3780 | 3770 |
3781 __ bind(&need_incremental); | 3771 __ bind(&need_incremental); |
3782 | 3772 |
(...skipping 618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4401 Operand(ebp, 7 * kPointerSize), | 4391 Operand(ebp, 7 * kPointerSize), |
4402 NULL); | 4392 NULL); |
4403 } | 4393 } |
4404 | 4394 |
4405 | 4395 |
4406 #undef __ | 4396 #undef __ |
4407 | 4397 |
4408 } } // namespace v8::internal | 4398 } } // namespace v8::internal |
4409 | 4399 |
4410 #endif // V8_TARGET_ARCH_X87 | 4400 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |