OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 958 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
969 // in that case. | 969 // in that case. |
970 if (op_ == Token::SHR) { | 970 if (op_ == Token::SHR) { |
971 __ bind(&non_smi_shr_result); | 971 __ bind(&non_smi_shr_result); |
972 Label allocation_failed; | 972 Label allocation_failed; |
973 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). | 973 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). |
974 // Allocate heap number in new space. | 974 // Allocate heap number in new space. |
975 // Not using AllocateHeapNumber macro in order to reuse | 975 // Not using AllocateHeapNumber macro in order to reuse |
976 // already loaded heap_number_map. | 976 // already loaded heap_number_map. |
977 __ AllocateInNewSpace(HeapNumber::kSize, | 977 __ AllocateInNewSpace(HeapNumber::kSize, |
978 rax, | 978 rax, |
979 rcx, | 979 rdx, |
980 no_reg, | 980 no_reg, |
981 &allocation_failed, | 981 &allocation_failed, |
982 TAG_OBJECT); | 982 TAG_OBJECT); |
983 // Set the map. | 983 // Set the map. |
984 if (FLAG_debug_code) { | 984 if (FLAG_debug_code) { |
985 __ AbortIfNotRootValue(heap_number_map, | 985 __ AbortIfNotRootValue(heap_number_map, |
986 Heap::kHeapNumberMapRootIndex, | 986 Heap::kHeapNumberMapRootIndex, |
987 "HeapNumberMap register clobbered."); | 987 "HeapNumberMap register clobbered."); |
988 } | 988 } |
989 __ movq(FieldOperand(rax, HeapObject::kMapOffset), | 989 __ movq(FieldOperand(rax, HeapObject::kMapOffset), |
(...skipping 1838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2828 // non-zero value, which indicates not equal, so just return. | 2828 // non-zero value, which indicates not equal, so just return. |
2829 __ ret(0); | 2829 __ ret(0); |
2830 } | 2830 } |
2831 | 2831 |
2832 __ bind(&check_for_strings); | 2832 __ bind(&check_for_strings); |
2833 | 2833 |
2834 __ JumpIfNotBothSequentialAsciiStrings( | 2834 __ JumpIfNotBothSequentialAsciiStrings( |
2835 rdx, rax, rcx, rbx, &check_unequal_objects); | 2835 rdx, rax, rcx, rbx, &check_unequal_objects); |
2836 | 2836 |
2837 // Inline comparison of ascii strings. | 2837 // Inline comparison of ascii strings. |
2838 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, | 2838 if (cc_ == equal) { |
| 2839 StringCompareStub::GenerateFlatAsciiStringEquals(masm, |
2839 rdx, | 2840 rdx, |
2840 rax, | 2841 rax, |
2841 rcx, | 2842 rcx, |
2842 rbx, | 2843 rbx); |
2843 rdi, | 2844 } else { |
2844 r8); | 2845 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, |
| 2846 rdx, |
| 2847 rax, |
| 2848 rcx, |
| 2849 rbx, |
| 2850 rdi, |
| 2851 r8); |
| 2852 } |
2845 | 2853 |
2846 #ifdef DEBUG | 2854 #ifdef DEBUG |
2847 __ Abort("Unexpected fall-through from string comparison"); | 2855 __ Abort("Unexpected fall-through from string comparison"); |
2848 #endif | 2856 #endif |
2849 | 2857 |
2850 __ bind(&check_unequal_objects); | 2858 __ bind(&check_unequal_objects); |
2851 if (cc_ == equal && !strict_) { | 2859 if (cc_ == equal && !strict_) { |
2852 // Not strict equality. Objects are unequal if | 2860 // Not strict equality. Objects are unequal if |
2853 // they are both JSObjects and not undetectable, | 2861 // they are both JSObjects and not undetectable, |
2854 // and their pointers are different. | 2862 // and their pointers are different. |
(...skipping 1627 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4482 __ bind(&return_rax); | 4490 __ bind(&return_rax); |
4483 __ IncrementCounter(counters->sub_string_native(), 1); | 4491 __ IncrementCounter(counters->sub_string_native(), 1); |
4484 __ ret(kArgumentsSize); | 4492 __ ret(kArgumentsSize); |
4485 | 4493 |
4486 // Just jump to runtime to create the sub string. | 4494 // Just jump to runtime to create the sub string. |
4487 __ bind(&runtime); | 4495 __ bind(&runtime); |
4488 __ TailCallRuntime(Runtime::kSubString, 3, 1); | 4496 __ TailCallRuntime(Runtime::kSubString, 3, 1); |
4489 } | 4497 } |
4490 | 4498 |
4491 | 4499 |
| 4500 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, |
| 4501 Register left, |
| 4502 Register right, |
| 4503 Register scratch1, |
| 4504 Register scratch2) { |
| 4505 Register length = scratch1; |
| 4506 |
| 4507 // Compare lengths. |
| 4508 NearLabel check_zero_length; |
| 4509 __ movq(length, FieldOperand(left, String::kLengthOffset)); |
| 4510 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); |
| 4511 __ j(equal, &check_zero_length); |
| 4512 __ Move(rax, Smi::FromInt(NOT_EQUAL)); |
| 4513 __ ret(0); |
| 4514 |
| 4515 // Check if the length is zero. |
| 4516 NearLabel compare_chars; |
| 4517 __ bind(&check_zero_length); |
| 4518 STATIC_ASSERT(kSmiTag == 0); |
| 4519 __ SmiTest(length); |
| 4520 __ j(not_zero, &compare_chars); |
| 4521 __ Move(rax, Smi::FromInt(EQUAL)); |
| 4522 __ ret(0); |
| 4523 |
| 4524 // Compare characters. |
| 4525 __ bind(&compare_chars); |
| 4526 NearLabel strings_not_equal; |
| 4527 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, |
| 4528 &strings_not_equal); |
| 4529 |
| 4530 // Characters are equal. |
| 4531 __ Move(rax, Smi::FromInt(EQUAL)); |
| 4532 __ ret(0); |
| 4533 |
| 4534 // Characters are not equal. |
| 4535 __ bind(&strings_not_equal); |
| 4536 __ Move(rax, Smi::FromInt(NOT_EQUAL)); |
| 4537 __ ret(0); |
| 4538 } |
| 4539 |
| 4540 |
4492 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, | 4541 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, |
4493 Register left, | 4542 Register left, |
4494 Register right, | 4543 Register right, |
4495 Register scratch1, | 4544 Register scratch1, |
4496 Register scratch2, | 4545 Register scratch2, |
4497 Register scratch3, | 4546 Register scratch3, |
4498 Register scratch4) { | 4547 Register scratch4) { |
4499 // Ensure that you can always subtract a string length from a non-negative | 4548 // Ensure that you can always subtract a string length from a non-negative |
4500 // number (e.g. another length). | 4549 // number (e.g. another length). |
4501 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); | 4550 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); |
(...skipping 14 matching lines...) Expand all Loading... |
4516 __ SmiSub(scratch1, scratch1, length_difference); | 4565 __ SmiSub(scratch1, scratch1, length_difference); |
4517 __ bind(&left_shorter); | 4566 __ bind(&left_shorter); |
4518 // Register scratch1 now holds Min(left.length, right.length). | 4567 // Register scratch1 now holds Min(left.length, right.length). |
4519 const Register min_length = scratch1; | 4568 const Register min_length = scratch1; |
4520 | 4569 |
4521 NearLabel compare_lengths; | 4570 NearLabel compare_lengths; |
4522 // If min-length is zero, go directly to comparing lengths. | 4571 // If min-length is zero, go directly to comparing lengths. |
4523 __ SmiTest(min_length); | 4572 __ SmiTest(min_length); |
4524 __ j(zero, &compare_lengths); | 4573 __ j(zero, &compare_lengths); |
4525 | 4574 |
4526 __ SmiToInteger32(min_length, min_length); | 4575 // Compare loop. |
| 4576 NearLabel result_not_equal; |
| 4577 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, |
| 4578 &result_not_equal); |
4527 | 4579 |
4528 // Registers scratch2 and scratch3 are free. | |
4529 NearLabel result_not_equal; | |
4530 Label loop; | |
4531 { | |
4532 // Check characters 0 .. min_length - 1 in a loop. | |
4533 // Use scratch3 as loop index, min_length as limit and scratch2 | |
4534 // for computation. | |
4535 const Register index = scratch3; | |
4536 __ Set(index, 0); // Index into strings. | |
4537 __ bind(&loop); | |
4538 // Compare characters. | |
4539 // TODO(lrn): Could we load more than one character at a time? | |
4540 __ movb(scratch2, FieldOperand(left, | |
4541 index, | |
4542 times_1, | |
4543 SeqAsciiString::kHeaderSize)); | |
4544 // Increment index and use -1 modifier on next load to give | |
4545 // the previous load extra time to complete. | |
4546 __ addl(index, Immediate(1)); | |
4547 __ cmpb(scratch2, FieldOperand(right, | |
4548 index, | |
4549 times_1, | |
4550 SeqAsciiString::kHeaderSize - 1)); | |
4551 __ j(not_equal, &result_not_equal); | |
4552 __ cmpl(index, min_length); | |
4553 __ j(not_equal, &loop); | |
4554 } | |
4555 // Completed loop without finding different characters. | 4580 // Completed loop without finding different characters. |
4556 // Compare lengths (precomputed). | 4581 // Compare lengths (precomputed). |
4557 __ bind(&compare_lengths); | 4582 __ bind(&compare_lengths); |
4558 __ SmiTest(length_difference); | 4583 __ SmiTest(length_difference); |
4559 __ j(not_zero, &result_not_equal); | 4584 __ j(not_zero, &result_not_equal); |
4560 | 4585 |
4561 // Result is EQUAL. | 4586 // Result is EQUAL. |
4562 __ Move(rax, Smi::FromInt(EQUAL)); | 4587 __ Move(rax, Smi::FromInt(EQUAL)); |
4563 __ ret(0); | 4588 __ ret(0); |
4564 | 4589 |
4565 NearLabel result_greater; | 4590 NearLabel result_greater; |
4566 __ bind(&result_not_equal); | 4591 __ bind(&result_not_equal); |
4567 // Unequal comparison of left to right, either character or length. | 4592 // Unequal comparison of left to right, either character or length. |
4568 __ j(greater, &result_greater); | 4593 __ j(greater, &result_greater); |
4569 | 4594 |
4570 // Result is LESS. | 4595 // Result is LESS. |
4571 __ Move(rax, Smi::FromInt(LESS)); | 4596 __ Move(rax, Smi::FromInt(LESS)); |
4572 __ ret(0); | 4597 __ ret(0); |
4573 | 4598 |
4574 // Result is GREATER. | 4599 // Result is GREATER. |
4575 __ bind(&result_greater); | 4600 __ bind(&result_greater); |
4576 __ Move(rax, Smi::FromInt(GREATER)); | 4601 __ Move(rax, Smi::FromInt(GREATER)); |
4577 __ ret(0); | 4602 __ ret(0); |
4578 } | 4603 } |
4579 | 4604 |
4580 | 4605 |
| 4606 void StringCompareStub::GenerateAsciiCharsCompareLoop( |
| 4607 MacroAssembler* masm, |
| 4608 Register left, |
| 4609 Register right, |
| 4610 Register length, |
| 4611 Register scratch, |
| 4612 NearLabel* chars_not_equal) { |
| 4613 // Change index to run from -length to -1 by adding length to string |
| 4614 // start. This means that loop ends when index reaches zero, which |
| 4615 // doesn't need an additional compare. |
| 4616 __ SmiToInteger32(length, length); |
| 4617 __ lea(left, |
| 4618 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize)); |
| 4619 __ lea(right, |
| 4620 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); |
| 4621 __ neg(length); |
| 4622 Register index = length; // index = -length; |
| 4623 |
| 4624 // Compare loop. |
| 4625 NearLabel loop; |
| 4626 __ bind(&loop); |
| 4627 __ movb(scratch, Operand(left, index, times_1, 0)); |
| 4628 __ cmpb(scratch, Operand(right, index, times_1, 0)); |
| 4629 __ j(not_equal, chars_not_equal); |
| 4630 __ addq(index, Immediate(1)); |
| 4631 __ j(not_zero, &loop); |
| 4632 } |
| 4633 |
| 4634 |
4581 void StringCompareStub::Generate(MacroAssembler* masm) { | 4635 void StringCompareStub::Generate(MacroAssembler* masm) { |
4582 Label runtime; | 4636 Label runtime; |
4583 | 4637 |
4584 // Stack frame on entry. | 4638 // Stack frame on entry. |
4585 // rsp[0]: return address | 4639 // rsp[0]: return address |
4586 // rsp[8]: right string | 4640 // rsp[8]: right string |
4587 // rsp[16]: left string | 4641 // rsp[16]: left string |
4588 | 4642 |
4589 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left | 4643 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left |
4590 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right | 4644 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4678 | 4732 |
4679 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); | 4733 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); |
4680 __ bind(&generic_stub); | 4734 __ bind(&generic_stub); |
4681 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 4735 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
4682 | 4736 |
4683 __ bind(&miss); | 4737 __ bind(&miss); |
4684 GenerateMiss(masm); | 4738 GenerateMiss(masm); |
4685 } | 4739 } |
4686 | 4740 |
4687 | 4741 |
| 4742 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
| 4743 ASSERT(state_ == CompareIC::STRINGS); |
| 4744 ASSERT(GetCondition() == equal); |
| 4745 Label miss; |
| 4746 |
| 4747 // Registers containing left and right operands respectively. |
| 4748 Register left = rdx; |
| 4749 Register right = rax; |
| 4750 Register tmp1 = rcx; |
| 4751 Register tmp2 = rbx; |
| 4752 Register tmp3 = rdi; |
| 4753 |
| 4754 // Check that both operands are heap objects. |
| 4755 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
| 4756 __ j(cond, &miss); |
| 4757 |
| 4758 // Check that both operands are strings. This leaves the instance |
| 4759 // types loaded in tmp1 and tmp2. |
| 4760 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
| 4761 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
| 4762 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
| 4763 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
| 4764 __ movq(tmp3, tmp1); |
| 4765 STATIC_ASSERT(kNotStringTag != 0); |
| 4766 __ or_(tmp3, tmp2); |
| 4767 __ testl(tmp3, Immediate(kIsNotStringMask)); |
| 4768 __ j(not_zero, &miss); |
| 4769 |
| 4770 // Fast check for identical strings. |
| 4771 NearLabel not_same; |
| 4772 __ cmpq(left, right); |
| 4773 __ j(not_equal, ¬_same); |
| 4774 STATIC_ASSERT(EQUAL == 0); |
| 4775 STATIC_ASSERT(kSmiTag == 0); |
| 4776 __ Move(rax, Smi::FromInt(EQUAL)); |
| 4777 __ ret(0); |
| 4778 |
| 4779 // Handle not identical strings. |
| 4780 __ bind(¬_same); |
| 4781 |
| 4782 // Check that both strings are symbols. If they are, we're done |
| 4783 // because we already know they are not identical. |
| 4784 NearLabel do_compare; |
| 4785 STATIC_ASSERT(kSymbolTag != 0); |
| 4786 __ and_(tmp1, tmp2); |
| 4787 __ testl(tmp1, Immediate(kIsSymbolMask)); |
| 4788 __ j(zero, &do_compare); |
| 4789 // Make sure rax is non-zero. At this point input operands are |
| 4790 // guaranteed to be non-zero. |
| 4791 ASSERT(right.is(rax)); |
| 4792 __ ret(0); |
| 4793 |
| 4794 // Check that both strings are sequential ASCII. |
| 4795 Label runtime; |
| 4796 __ bind(&do_compare); |
| 4797 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); |
| 4798 |
| 4799 // Compare flat ASCII strings. Returns when done. |
| 4800 StringCompareStub::GenerateFlatAsciiStringEquals( |
| 4801 masm, left, right, tmp1, tmp2); |
| 4802 |
| 4803 // Handle more complex cases in runtime. |
| 4804 __ bind(&runtime); |
| 4805 __ pop(tmp1); // Return address. |
| 4806 __ push(left); |
| 4807 __ push(right); |
| 4808 __ push(tmp1); |
| 4809 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); |
| 4810 |
| 4811 __ bind(&miss); |
| 4812 GenerateMiss(masm); |
| 4813 } |
| 4814 |
| 4815 |
4688 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 4816 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
4689 ASSERT(state_ == CompareIC::OBJECTS); | 4817 ASSERT(state_ == CompareIC::OBJECTS); |
4690 NearLabel miss; | 4818 NearLabel miss; |
4691 Condition either_smi = masm->CheckEitherSmi(rdx, rax); | 4819 Condition either_smi = masm->CheckEitherSmi(rdx, rax); |
4692 __ j(either_smi, &miss); | 4820 __ j(either_smi, &miss); |
4693 | 4821 |
4694 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); | 4822 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); |
4695 __ j(not_equal, &miss, not_taken); | 4823 __ j(not_equal, &miss, not_taken); |
4696 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); | 4824 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); |
4697 __ j(not_equal, &miss, not_taken); | 4825 __ j(not_equal, &miss, not_taken); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4729 __ pop(rcx); | 4857 __ pop(rcx); |
4730 __ pop(rax); | 4858 __ pop(rax); |
4731 __ pop(rdx); | 4859 __ pop(rdx); |
4732 __ push(rcx); | 4860 __ push(rcx); |
4733 | 4861 |
4734 // Do a tail call to the rewritten stub. | 4862 // Do a tail call to the rewritten stub. |
4735 __ jmp(rdi); | 4863 __ jmp(rdi); |
4736 } | 4864 } |
4737 | 4865 |
4738 | 4866 |
| 4867 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
| 4868 Label* miss, |
| 4869 Label* done, |
| 4870 Register properties, |
| 4871 String* name, |
| 4872 Register r0) { |
| 4873 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
| 4874 // not equal to the name and kProbes-th slot is not used (its name is the |
| 4875 // undefined value), it guarantees the hash table doesn't contain the |
| 4876 // property. It's true even if some slots represent deleted properties |
| 4877 // (their names are the null value). |
| 4878 for (int i = 0; i < kInlinedProbes; i++) { |
| 4879 // r0 points to properties hash. |
| 4880 // Compute the masked index: (hash + i + i * i) & mask. |
| 4881 Register index = r0; |
| 4882 // Capacity is smi 2^n. |
| 4883 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); |
| 4884 __ decl(index); |
| 4885 __ and_(index, |
| 4886 Immediate(name->Hash() + StringDictionary::GetProbeOffset(i))); |
| 4887 |
| 4888 // Scale the index by multiplying by the entry size. |
| 4889 ASSERT(StringDictionary::kEntrySize == 3); |
| 4890 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. |
| 4891 |
| 4892 Register entity_name = r0; |
| 4893 // Having undefined at this place means the name is not contained. |
| 4894 ASSERT_EQ(kSmiTagSize, 1); |
| 4895 __ movq(entity_name, Operand(properties, |
| 4896 index, |
| 4897 times_pointer_size, |
| 4898 kElementsStartOffset - kHeapObjectTag)); |
| 4899 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); |
| 4900 __ j(equal, done); |
| 4901 |
| 4902 // Stop if found the property. |
| 4903 __ Cmp(entity_name, Handle<String>(name)); |
| 4904 __ j(equal, miss); |
| 4905 |
| 4906 // Check if the entry name is not a symbol. |
| 4907 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); |
| 4908 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
| 4909 Immediate(kIsSymbolMask)); |
| 4910 __ j(zero, miss); |
| 4911 } |
| 4912 |
| 4913 StringDictionaryLookupStub stub(properties, |
| 4914 r0, |
| 4915 r0, |
| 4916 StringDictionaryLookupStub::NEGATIVE_LOOKUP); |
| 4917 __ Push(Handle<Object>(name)); |
| 4918 __ push(Immediate(name->Hash())); |
| 4919 __ CallStub(&stub); |
| 4920 __ testq(r0, r0); |
| 4921 __ j(not_zero, miss); |
| 4922 __ jmp(done); |
| 4923 } |
| 4924 |
| 4925 |
| 4926 // Probe the string dictionary in the |elements| register. Jump to the |
| 4927 // |done| label if a property with the given name is found leaving the |
| 4928 // index into the dictionary in |r1|. Jump to the |miss| label |
| 4929 // otherwise. |
| 4930 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
| 4931 Label* miss, |
| 4932 Label* done, |
| 4933 Register elements, |
| 4934 Register name, |
| 4935 Register r0, |
| 4936 Register r1) { |
| 4937 // Assert that name contains a string. |
| 4938 if (FLAG_debug_code) __ AbortIfNotString(name); |
| 4939 |
| 4940 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); |
| 4941 __ decl(r0); |
| 4942 |
| 4943 for (int i = 0; i < kInlinedProbes; i++) { |
| 4944 // Compute the masked index: (hash + i + i * i) & mask. |
| 4945 __ movl(r1, FieldOperand(name, String::kHashFieldOffset)); |
| 4946 __ shrl(r1, Immediate(String::kHashShift)); |
| 4947 if (i > 0) { |
| 4948 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); |
| 4949 } |
| 4950 __ and_(r1, r0); |
| 4951 |
| 4952 // Scale the index by multiplying by the entry size. |
| 4953 ASSERT(StringDictionary::kEntrySize == 3); |
| 4954 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 |
| 4955 |
| 4956 // Check if the key is identical to the name. |
| 4957 __ cmpq(name, Operand(elements, r1, times_pointer_size, |
| 4958 kElementsStartOffset - kHeapObjectTag)); |
| 4959 __ j(equal, done); |
| 4960 } |
| 4961 |
| 4962 StringDictionaryLookupStub stub(elements, |
| 4963 r0, |
| 4964 r1, |
| 4965 POSITIVE_LOOKUP); |
| 4966 __ push(name); |
| 4967 __ movl(r0, FieldOperand(name, String::kHashFieldOffset)); |
| 4968 __ shrl(r0, Immediate(String::kHashShift)); |
| 4969 __ push(r0); |
| 4970 __ CallStub(&stub); |
| 4971 |
| 4972 __ testq(r0, r0); |
| 4973 __ j(zero, miss); |
| 4974 __ jmp(done); |
| 4975 } |
| 4976 |
| 4977 |
| 4978 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 4979 // Stack frame on entry: |
| 4980 // esp[0 * kPointerSize]: return address. |
| 4981 // esp[1 * kPointerSize]: key's hash. |
| 4982 // esp[2 * kPointerSize]: key. |
| 4983 // Registers: |
| 4984 // dictionary_: StringDictionary to probe. |
| 4985 // result_: used as scratch. |
| 4986 // index_: will hold an index of entry if lookup is successful. |
| 4987 // might alias with result_. |
| 4988 // Returns: |
| 4989 // result_ is zero if lookup failed, non zero otherwise. |
| 4990 |
| 4991 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
| 4992 |
| 4993 Register scratch = result_; |
| 4994 |
| 4995 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset)); |
| 4996 __ decl(scratch); |
| 4997 __ push(scratch); |
| 4998 |
| 4999 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
| 5000 // not equal to the name and kProbes-th slot is not used (its name is the |
| 5001 // undefined value), it guarantees the hash table doesn't contain the |
| 5002 // property. It's true even if some slots represent deleted properties |
| 5003 // (their names are the null value). |
| 5004 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
| 5005 // Compute the masked index: (hash + i + i * i) & mask. |
| 5006 __ movq(scratch, Operand(rsp, 2 * kPointerSize)); |
| 5007 if (i > 0) { |
| 5008 __ addl(scratch, Immediate(StringDictionary::GetProbeOffset(i))); |
| 5009 } |
| 5010 __ and_(scratch, Operand(rsp, 0)); |
| 5011 |
| 5012 // Scale the index by multiplying by the entry size. |
| 5013 ASSERT(StringDictionary::kEntrySize == 3); |
| 5014 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
| 5015 |
| 5016 // Having undefined at this place means the name is not contained. |
| 5017 __ movq(scratch, Operand(dictionary_, |
| 5018 index_, |
| 5019 times_pointer_size, |
| 5020 kElementsStartOffset - kHeapObjectTag)); |
| 5021 |
| 5022 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); |
| 5023 __ j(equal, ¬_in_dictionary); |
| 5024 |
| 5025 // Stop if found the property. |
| 5026 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); |
| 5027 __ j(equal, &in_dictionary); |
| 5028 |
| 5029 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
| 5030 // If we hit a non symbol key during negative lookup |
| 5031 // we have to bailout as this key might be equal to the |
| 5032 // key we are looking for. |
| 5033 |
| 5034 // Check if the entry name is not a symbol. |
| 5035 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 5036 __ testb(FieldOperand(scratch, Map::kInstanceTypeOffset), |
| 5037 Immediate(kIsSymbolMask)); |
| 5038 __ j(zero, &maybe_in_dictionary); |
| 5039 } |
| 5040 } |
| 5041 |
| 5042 __ bind(&maybe_in_dictionary); |
| 5043 // If we are doing negative lookup then probing failure should be |
| 5044 // treated as a lookup success. For positive lookup probing failure |
| 5045 // should be treated as lookup failure. |
| 5046 if (mode_ == POSITIVE_LOOKUP) { |
| 5047 __ movq(scratch, Immediate(0)); |
| 5048 __ Drop(1); |
| 5049 __ ret(2 * kPointerSize); |
| 5050 } |
| 5051 |
| 5052 __ bind(&in_dictionary); |
| 5053 __ movq(scratch, Immediate(1)); |
| 5054 __ Drop(1); |
| 5055 __ ret(2 * kPointerSize); |
| 5056 |
| 5057 __ bind(¬_in_dictionary); |
| 5058 __ movq(scratch, Immediate(0)); |
| 5059 __ Drop(1); |
| 5060 __ ret(2 * kPointerSize); |
| 5061 } |
| 5062 |
| 5063 |
4739 #undef __ | 5064 #undef __ |
4740 | 5065 |
4741 } } // namespace v8::internal | 5066 } } // namespace v8::internal |
4742 | 5067 |
4743 #endif // V8_TARGET_ARCH_X64 | 5068 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |