OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4751 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4762 | 4762 |
4763 // Check the cache from finger to start of the cache. | 4763 // Check the cache from finger to start of the cache. |
4764 __ bind(&first_loop); | 4764 __ bind(&first_loop); |
4765 __ subl(dst_, kEntrySizeImm); | 4765 __ subl(dst_, kEntrySizeImm); |
4766 __ cmpl(dst_, kEntriesIndexImm); | 4766 __ cmpl(dst_, kEntriesIndexImm); |
4767 __ j(less, &search_further); | 4767 __ j(less, &search_further); |
4768 | 4768 |
4769 __ cmpq(ArrayElement(cache_, dst_), key_); | 4769 __ cmpq(ArrayElement(cache_, dst_), key_); |
4770 __ j(not_equal, &first_loop); | 4770 __ j(not_equal, &first_loop); |
4771 | 4771 |
4772 __ Integer32ToSmi(scratch_, dst_); | 4772 __ Integer32ToSmiField( |
4773 __ movq(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), scratch_); | 4773 FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_); |
4774 __ movq(dst_, ArrayElement(cache_, dst_, 1)); | 4774 __ movq(dst_, ArrayElement(cache_, dst_, 1)); |
4775 __ jmp(exit_label()); | 4775 __ jmp(exit_label()); |
4776 | 4776 |
4777 __ bind(&search_further); | 4777 __ bind(&search_further); |
4778 | 4778 |
4779 // Check the cache from end of cache up to finger. | 4779 // Check the cache from end of cache up to finger. |
4780 __ SmiToInteger32(dst_, | 4780 __ SmiToInteger32(dst_, |
4781 FieldOperand(cache_, | 4781 FieldOperand(cache_, |
4782 JSFunctionResultCache::kCacheSizeOffset)); | 4782 JSFunctionResultCache::kCacheSizeOffset)); |
4783 __ SmiToInteger32(scratch_, | 4783 __ SmiToInteger32(scratch_, |
4784 FieldOperand(cache_, JSFunctionResultCache::kFingerOffset)); | 4784 FieldOperand(cache_, JSFunctionResultCache::kFingerOffset)); |
4785 | 4785 |
4786 __ bind(&second_loop); | 4786 __ bind(&second_loop); |
4787 __ subl(dst_, kEntrySizeImm); | 4787 __ subl(dst_, kEntrySizeImm); |
4788 __ cmpl(dst_, scratch_); | 4788 __ cmpl(dst_, scratch_); |
4789 __ j(less_equal, &cache_miss); | 4789 __ j(less_equal, &cache_miss); |
4790 | 4790 |
4791 __ cmpq(ArrayElement(cache_, dst_), key_); | 4791 __ cmpq(ArrayElement(cache_, dst_), key_); |
4792 __ j(not_equal, &second_loop); | 4792 __ j(not_equal, &second_loop); |
4793 | 4793 |
4794 __ Integer32ToSmi(scratch_, dst_); | 4794 __ Integer32ToSmiField( |
4795 __ movq(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), scratch_); | 4795 FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_); |
4796 __ movq(dst_, ArrayElement(cache_, dst_, 1)); | 4796 __ movq(dst_, ArrayElement(cache_, dst_, 1)); |
4797 __ jmp(exit_label()); | 4797 __ jmp(exit_label()); |
4798 | 4798 |
4799 __ bind(&cache_miss); | 4799 __ bind(&cache_miss); |
4800 __ push(cache_); // store a reference to cache | 4800 __ push(cache_); // store a reference to cache |
4801 __ push(key_); // store a key | 4801 __ push(key_); // store a key |
4802 __ push(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 4802 __ push(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
4803 __ push(key_); | 4803 __ push(key_); |
4804 // On x64 function must be in rdi. | 4804 // On x64 function must be in rdi. |
4805 __ movq(rdi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset)); | 4805 __ movq(rdi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset)); |
4806 ParameterCount expected(1); | 4806 ParameterCount expected(1); |
4807 __ InvokeFunction(rdi, expected, CALL_FUNCTION); | 4807 __ InvokeFunction(rdi, expected, CALL_FUNCTION); |
4808 | 4808 |
4809 // Find a place to put new cached value into. | 4809 // Find a place to put new cached value into. |
4810 Label add_new_entry, update_cache; | 4810 Label add_new_entry, update_cache; |
4811 __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache | 4811 __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache |
4812 // Possible optimization: cache size is constant for the given cache | 4812 // Possible optimization: cache size is constant for the given cache |
4813 // so technically we could use a constant here. However, if we have | 4813 // so technically we could use a constant here. However, if we have |
4814 // cache miss this optimization would hardly matter much. | 4814 // cache miss this optimization would hardly matter much. |
4815 | 4815 |
4816 // Check if we could add new entry to cache. | 4816 // Check if we could add new entry to cache. |
4817 __ movq(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | 4817 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
4818 __ movq(r9, FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); | 4818 __ SmiToInteger32(r9, |
4819 __ SmiCompare(rbx, r9); | 4819 FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); |
| 4820 __ cmpl(rbx, r9); |
4820 __ j(greater, &add_new_entry); | 4821 __ j(greater, &add_new_entry); |
4821 | 4822 |
4822 // Check if we could evict entry after finger. | 4823 // Check if we could evict entry after finger. |
4823 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); | 4824 __ SmiToInteger32(rdx, |
4824 __ SmiToInteger32(rdx, rdx); | 4825 FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
4825 __ SmiToInteger32(rbx, rbx); | 4826 __ addl(rdx, kEntrySizeImm); |
4826 __ addq(rdx, kEntrySizeImm); | |
4827 Label forward; | 4827 Label forward; |
4828 __ cmpq(rbx, rdx); | 4828 __ cmpl(rbx, rdx); |
4829 __ j(greater, &forward); | 4829 __ j(greater, &forward); |
4830 // Need to wrap over the cache. | 4830 // Need to wrap over the cache. |
4831 __ movl(rdx, kEntriesIndexImm); | 4831 __ movl(rdx, kEntriesIndexImm); |
4832 __ bind(&forward); | 4832 __ bind(&forward); |
4833 __ Integer32ToSmi(r9, rdx); | 4833 __ movl(r9, rdx); |
4834 __ jmp(&update_cache); | 4834 __ jmp(&update_cache); |
4835 | 4835 |
4836 __ bind(&add_new_entry); | 4836 __ bind(&add_new_entry); |
4837 // r9 holds cache size as smi. | 4837 // r9 holds cache size as int32. |
4838 __ SmiToInteger32(rdx, r9); | 4838 __ leal(rbx, Operand(r9, JSFunctionResultCache::kEntrySize)); |
4839 __ SmiAddConstant(rbx, r9, Smi::FromInt(JSFunctionResultCache::kEntrySize)); | 4839 __ Integer32ToSmiField( |
4840 __ movq(FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); | 4840 FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); |
4841 | 4841 |
4842 // Update the cache itself. | 4842 // Update the cache itself. |
4843 // rdx holds the index as int. | 4843 // r9 holds the index as int32. |
4844 // r9 holds the index as smi. | |
4845 __ bind(&update_cache); | 4844 __ bind(&update_cache); |
4846 __ pop(rbx); // restore the key | 4845 __ pop(rbx); // restore the key |
4847 __ movq(FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9); | 4846 __ Integer32ToSmiField( |
| 4847 FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9); |
4848 // Store key. | 4848 // Store key. |
4849 __ movq(ArrayElement(rcx, rdx), rbx); | 4849 __ movq(ArrayElement(rcx, r9), rbx); |
4850 __ RecordWrite(rcx, 0, rbx, r9); | 4850 __ RecordWrite(rcx, 0, rbx, r9); |
4851 | 4851 |
4852 // Store value. | 4852 // Store value. |
4853 __ pop(rcx); // restore the cache. | 4853 __ pop(rcx); // restore the cache. |
4854 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); | 4854 __ SmiToInteger32(rdx, |
4855 __ SmiAddConstant(rdx, rdx, Smi::FromInt(1)); | 4855 FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
4856 __ movq(r9, rdx); | 4856 __ incl(rdx); |
4857 __ SmiToInteger32(rdx, rdx); | 4857 // Backup rax, because the RecordWrite macro clobbers its arguments. |
4858 __ movq(rbx, rax); | 4858 __ movq(rbx, rax); |
4859 __ movq(ArrayElement(rcx, rdx), rbx); | 4859 __ movq(ArrayElement(rcx, rdx), rax); |
4860 __ RecordWrite(rcx, 0, rbx, r9); | 4860 __ RecordWrite(rcx, 0, rbx, rdx); |
4861 | 4861 |
4862 if (!dst_.is(rax)) { | 4862 if (!dst_.is(rax)) { |
4863 __ movq(dst_, rax); | 4863 __ movq(dst_, rax); |
4864 } | 4864 } |
4865 } | 4865 } |
4866 | 4866 |
4867 | 4867 |
4868 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { | 4868 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { |
4869 ASSERT_EQ(2, args->length()); | 4869 ASSERT_EQ(2, args->length()); |
4870 | 4870 |
(...skipping 3673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8544 if (FLAG_debug_code) { | 8544 if (FLAG_debug_code) { |
8545 Condition is_smi = masm->CheckSmi(rcx); | 8545 Condition is_smi = masm->CheckSmi(rcx); |
8546 __ Check(NegateCondition(is_smi), | 8546 __ Check(NegateCondition(is_smi), |
8547 "Unexpected type for RegExp data, FixedArray expected"); | 8547 "Unexpected type for RegExp data, FixedArray expected"); |
8548 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister); | 8548 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister); |
8549 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); | 8549 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); |
8550 } | 8550 } |
8551 | 8551 |
8552 // rcx: RegExp data (FixedArray) | 8552 // rcx: RegExp data (FixedArray) |
8553 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 8553 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
8554 __ movq(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset)); | 8554 __ SmiToInteger32(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset)); |
8555 __ SmiCompare(rbx, Smi::FromInt(JSRegExp::IRREGEXP)); | 8555 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); |
8556 __ j(not_equal, &runtime); | 8556 __ j(not_equal, &runtime); |
8557 | 8557 |
8558 // rcx: RegExp data (FixedArray) | 8558 // rcx: RegExp data (FixedArray) |
8559 // Check that the number of captures fit in the static offsets vector buffer. | 8559 // Check that the number of captures fit in the static offsets vector buffer. |
8560 __ movq(rdx, FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); | 8560 __ SmiToInteger32(rdx, |
| 8561 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); |
8561 // Calculate number of capture registers (number_of_captures + 1) * 2. | 8562 // Calculate number of capture registers (number_of_captures + 1) * 2. |
8562 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rdx, 1); | 8563 __ leal(rdx, Operand(rdx, rdx, times_1, 2)); |
8563 __ addq(rdx, Immediate(2)); // rdx was number_of_captures * 2. | |
8564 // Check that the static offsets vector buffer is large enough. | 8564 // Check that the static offsets vector buffer is large enough. |
8565 __ cmpq(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize)); | 8565 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize)); |
8566 __ j(above, &runtime); | 8566 __ j(above, &runtime); |
8567 | 8567 |
8568 // rcx: RegExp data (FixedArray) | 8568 // rcx: RegExp data (FixedArray) |
8569 // rdx: Number of capture registers | 8569 // rdx: Number of capture registers |
8570 // Check that the second argument is a string. | 8570 // Check that the second argument is a string. |
8571 __ movq(rax, Operand(rsp, kSubjectOffset)); | 8571 __ movq(rax, Operand(rsp, kSubjectOffset)); |
8572 __ JumpIfSmi(rax, &runtime); | 8572 __ JumpIfSmi(rax, &runtime); |
8573 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); | 8573 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); |
8574 __ j(NegateCondition(is_string), &runtime); | 8574 __ j(NegateCondition(is_string), &runtime); |
8575 // Get the length of the string to rbx. | |
8576 __ movq(rbx, FieldOperand(rax, String::kLengthOffset)); | |
8577 | 8575 |
8578 // rbx: Length of subject string as smi | 8576 // rax: Subject string. |
8579 // rcx: RegExp data (FixedArray) | 8577 // rcx: RegExp data (FixedArray). |
8580 // rdx: Number of capture registers | 8578 // rdx: Number of capture registers. |
8581 // Check that the third argument is a positive smi less than the string | 8579 // Check that the third argument is a positive smi less than the string |
8582 // length. A negative value will be greater (unsigned comparison). | 8580 // length. A negative value will be greater (unsigned comparison). |
8583 __ movq(rax, Operand(rsp, kPreviousIndexOffset)); | 8581 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); |
8584 __ JumpIfNotSmi(rax, &runtime); | 8582 __ JumpIfNotSmi(rbx, &runtime); |
8585 __ SmiCompare(rax, rbx); | 8583 __ SmiCompare(rbx, FieldOperand(rax, String::kLengthOffset)); |
8586 __ j(above_equal, &runtime); | 8584 __ j(above_equal, &runtime); |
8587 | 8585 |
8588 // rcx: RegExp data (FixedArray) | 8586 // rcx: RegExp data (FixedArray) |
8589 // rdx: Number of capture registers | 8587 // rdx: Number of capture registers |
8590 // Check that the fourth object is a JSArray object. | 8588 // Check that the fourth object is a JSArray object. |
8591 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); | 8589 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); |
8592 __ JumpIfSmi(rax, &runtime); | 8590 __ JumpIfSmi(rax, &runtime); |
8593 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); | 8591 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); |
8594 __ j(not_equal, &runtime); | 8592 __ j(not_equal, &runtime); |
8595 // Check that the JSArray is in fast case. | 8593 // Check that the JSArray is in fast case. |
8596 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); | 8594 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); |
8597 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); | 8595 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); |
8598 __ Cmp(rax, Factory::fixed_array_map()); | 8596 __ Cmp(rax, Factory::fixed_array_map()); |
8599 __ j(not_equal, &runtime); | 8597 __ j(not_equal, &runtime); |
8600 // Check that the last match info has space for the capture registers and the | 8598 // Check that the last match info has space for the capture registers and the |
8601 // additional information. Ensure no overflow in add. | 8599 // additional information. Ensure no overflow in add. |
8602 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); | 8600 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); |
8603 __ movq(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); | 8601 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); |
8604 __ SmiToInteger32(rax, rax); | |
8605 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); | 8602 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); |
8606 __ cmpl(rdx, rax); | 8603 __ cmpl(rdx, rax); |
8607 __ j(greater, &runtime); | 8604 __ j(greater, &runtime); |
8608 | 8605 |
8609 // rcx: RegExp data (FixedArray) | 8606 // rcx: RegExp data (FixedArray) |
8610 // Check the representation and encoding of the subject string. | 8607 // Check the representation and encoding of the subject string. |
8611 Label seq_ascii_string, seq_two_byte_string, check_code; | 8608 Label seq_ascii_string, seq_two_byte_string, check_code; |
8612 __ movq(rax, Operand(rsp, kSubjectOffset)); | 8609 __ movq(rax, Operand(rsp, kSubjectOffset)); |
8613 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 8610 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
8614 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 8611 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8667 // encoding. If it has, the field contains a code object otherwise it contains | 8664 // encoding. If it has, the field contains a code object otherwise it contains |
8668 // the hole. | 8665 // the hole. |
8669 __ CmpObjectType(r12, CODE_TYPE, kScratchRegister); | 8666 __ CmpObjectType(r12, CODE_TYPE, kScratchRegister); |
8670 __ j(not_equal, &runtime); | 8667 __ j(not_equal, &runtime); |
8671 | 8668 |
8672 // rax: subject string | 8669 // rax: subject string |
8673 // rdi: encoding of subject string (1 if ascii, 0 if two_byte); | 8670 // rdi: encoding of subject string (1 if ascii, 0 if two_byte); |
8674 // r12: code | 8671 // r12: code |
8675 // Load used arguments before starting to push arguments for call to native | 8672 // Load used arguments before starting to push arguments for call to native |
8676 // RegExp code to avoid handling changing stack height. | 8673 // RegExp code to avoid handling changing stack height. |
8677 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); | 8674 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset)); |
8678 __ SmiToInteger64(rbx, rbx); // Previous index from smi. | |
8679 | 8675 |
8680 // rax: subject string | 8676 // rax: subject string |
8681 // rbx: previous index | 8677 // rbx: previous index |
8682 // rdi: encoding of subject string (1 if ascii 0 if two_byte); | 8678 // rdi: encoding of subject string (1 if ascii 0 if two_byte); |
8683 // r12: code | 8679 // r12: code |
8684 // All checks done. Now push arguments for native regexp code. | 8680 // All checks done. Now push arguments for native regexp code. |
8685 __ IncrementCounter(&Counters::regexp_entry_native, 1); | 8681 __ IncrementCounter(&Counters::regexp_entry_native, 1); |
8686 | 8682 |
8687 // rsi is caller save on Windows and used to pass parameter on Linux. | 8683 // rsi is caller save on Windows and used to pass parameter on Linux. |
8688 __ push(rsi); | 8684 __ push(rsi); |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8780 __ j(equal, &runtime); | 8776 __ j(equal, &runtime); |
8781 __ bind(&failure); | 8777 __ bind(&failure); |
8782 // For failure and exception return null. | 8778 // For failure and exception return null. |
8783 __ Move(rax, Factory::null_value()); | 8779 __ Move(rax, Factory::null_value()); |
8784 __ ret(4 * kPointerSize); | 8780 __ ret(4 * kPointerSize); |
8785 | 8781 |
8786 // Load RegExp data. | 8782 // Load RegExp data. |
8787 __ bind(&success); | 8783 __ bind(&success); |
8788 __ movq(rax, Operand(rsp, kJSRegExpOffset)); | 8784 __ movq(rax, Operand(rsp, kJSRegExpOffset)); |
8789 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); | 8785 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); |
8790 __ movq(rdx, FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); | 8786 __ SmiToInteger32(rax, |
| 8787 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); |
8791 // Calculate number of capture registers (number_of_captures + 1) * 2. | 8788 // Calculate number of capture registers (number_of_captures + 1) * 2. |
8792 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rdx, 1); | 8789 __ leal(rdx, Operand(rax, rax, times_1, 2)); |
8793 __ addq(rdx, Immediate(2)); // rdx was number_of_captures * 2. | |
8794 | 8790 |
8795 // rdx: Number of capture registers | 8791 // rdx: Number of capture registers |
8796 // Load last_match_info which is still known to be a fast case JSArray. | 8792 // Load last_match_info which is still known to be a fast case JSArray. |
8797 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); | 8793 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); |
8798 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); | 8794 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); |
8799 | 8795 |
8800 // rbx: last_match_info backing store (FixedArray) | 8796 // rbx: last_match_info backing store (FixedArray) |
8801 // rdx: number of capture registers | 8797 // rdx: number of capture registers |
8802 // Store the capture count. | 8798 // Store the capture count. |
8803 __ Integer32ToSmi(kScratchRegister, rdx); | 8799 __ Integer32ToSmi(kScratchRegister, rdx); |
(...skipping 22 matching lines...) Expand all Loading... |
8826 __ subq(rdx, Immediate(1)); | 8822 __ subq(rdx, Immediate(1)); |
8827 __ j(negative, &done); | 8823 __ j(negative, &done); |
8828 // Read the value from the static offsets vector buffer and make it a smi. | 8824 // Read the value from the static offsets vector buffer and make it a smi. |
8829 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); | 8825 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); |
8830 __ Integer32ToSmi(rdi, rdi, &runtime); | 8826 __ Integer32ToSmi(rdi, rdi, &runtime); |
8831 // Store the smi value in the last match info. | 8827 // Store the smi value in the last match info. |
8832 __ movq(FieldOperand(rbx, | 8828 __ movq(FieldOperand(rbx, |
8833 rdx, | 8829 rdx, |
8834 times_pointer_size, | 8830 times_pointer_size, |
8835 RegExpImpl::kFirstCaptureOffset), | 8831 RegExpImpl::kFirstCaptureOffset), |
8836 rdi); | 8832 rdi); |
8837 __ jmp(&next_capture); | 8833 __ jmp(&next_capture); |
8838 __ bind(&done); | 8834 __ bind(&done); |
8839 | 8835 |
8840 // Return last match info. | 8836 // Return last match info. |
8841 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); | 8837 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); |
8842 __ ret(4 * kPointerSize); | 8838 __ ret(4 * kPointerSize); |
8843 | 8839 |
8844 // Do the runtime call to execute the regexp. | 8840 // Do the runtime call to execute the regexp. |
8845 __ bind(&runtime); | 8841 __ bind(&runtime); |
8846 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 8842 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
(...skipping 23 matching lines...) Expand all Loading... |
8870 // Use of registers. Register result is used as a temporary. | 8866 // Use of registers. Register result is used as a temporary. |
8871 Register number_string_cache = result; | 8867 Register number_string_cache = result; |
8872 Register mask = scratch1; | 8868 Register mask = scratch1; |
8873 Register scratch = scratch2; | 8869 Register scratch = scratch2; |
8874 | 8870 |
8875 // Load the number string cache. | 8871 // Load the number string cache. |
8876 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); | 8872 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); |
8877 | 8873 |
8878 // Make the hash mask from the length of the number string cache. It | 8874 // Make the hash mask from the length of the number string cache. It |
8879 // contains two elements (number and string) for each cache entry. | 8875 // contains two elements (number and string) for each cache entry. |
8880 __ movq(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); | 8876 __ SmiToInteger32( |
8881 // Divide smi tagged length by two. | 8877 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
8882 __ PositiveSmiDivPowerOfTwoToInteger32(mask, mask, 1); | 8878 __ shrl(mask, Immediate(1)); |
8883 __ subq(mask, Immediate(1)); // Make mask. | 8879 __ subq(mask, Immediate(1)); // Make mask. |
8884 | 8880 |
8885 // Calculate the entry in the number string cache. The hash value in the | 8881 // Calculate the entry in the number string cache. The hash value in the |
8886 // number string cache for smis is just the smi value, and the hash for | 8882 // number string cache for smis is just the smi value, and the hash for |
8887 // doubles is the xor of the upper and lower words. See | 8883 // doubles is the xor of the upper and lower words. See |
8888 // Heap::GetNumberStringCache. | 8884 // Heap::GetNumberStringCache. |
8889 Label is_smi; | 8885 Label is_smi; |
8890 Label load_result_from_cache; | 8886 Label load_result_from_cache; |
8891 if (!object_is_smi) { | 8887 if (!object_is_smi) { |
8892 __ JumpIfSmi(object, &is_smi); | 8888 __ JumpIfSmi(object, &is_smi); |
(...skipping 16 matching lines...) Expand all Loading... |
8909 CpuFeatures::Scope fscope(SSE2); | 8905 CpuFeatures::Scope fscope(SSE2); |
8910 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); | 8906 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); |
8911 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); | 8907 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); |
8912 __ ucomisd(xmm0, xmm1); | 8908 __ ucomisd(xmm0, xmm1); |
8913 __ j(parity_even, not_found); // Bail out if NaN is involved. | 8909 __ j(parity_even, not_found); // Bail out if NaN is involved. |
8914 __ j(not_equal, not_found); // The cache did not contain this value. | 8910 __ j(not_equal, not_found); // The cache did not contain this value. |
8915 __ jmp(&load_result_from_cache); | 8911 __ jmp(&load_result_from_cache); |
8916 } | 8912 } |
8917 | 8913 |
8918 __ bind(&is_smi); | 8914 __ bind(&is_smi); |
8919 __ movq(scratch, object); | 8915 __ SmiToInteger32(scratch, object); |
8920 __ SmiToInteger32(scratch, scratch); | |
8921 GenerateConvertHashCodeToIndex(masm, scratch, mask); | 8916 GenerateConvertHashCodeToIndex(masm, scratch, mask); |
8922 | 8917 |
8923 Register index = scratch; | 8918 Register index = scratch; |
8924 // Check if the entry is the smi we are looking for. | 8919 // Check if the entry is the smi we are looking for. |
8925 __ cmpq(object, | 8920 __ cmpq(object, |
8926 FieldOperand(number_string_cache, | 8921 FieldOperand(number_string_cache, |
8927 index, | 8922 index, |
8928 times_1, | 8923 times_1, |
8929 FixedArray::kHeaderSize)); | 8924 FixedArray::kHeaderSize)); |
8930 __ j(not_equal, not_found); | 8925 __ j(not_equal, not_found); |
(...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9337 static const int kDisplacement = 2 * kPointerSize; | 9332 static const int kDisplacement = 2 * kPointerSize; |
9338 | 9333 |
9339 // Check if the calling frame is an arguments adaptor frame. | 9334 // Check if the calling frame is an arguments adaptor frame. |
9340 Label adaptor_frame, try_allocate, runtime; | 9335 Label adaptor_frame, try_allocate, runtime; |
9341 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 9336 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
9342 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), | 9337 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), |
9343 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 9338 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
9344 __ j(equal, &adaptor_frame); | 9339 __ j(equal, &adaptor_frame); |
9345 | 9340 |
9346 // Get the length from the frame. | 9341 // Get the length from the frame. |
9347 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 9342 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); |
9348 __ jmp(&try_allocate); | 9343 __ jmp(&try_allocate); |
9349 | 9344 |
9350 // Patch the arguments.length and the parameters pointer. | 9345 // Patch the arguments.length and the parameters pointer. |
9351 __ bind(&adaptor_frame); | 9346 __ bind(&adaptor_frame); |
9352 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 9347 __ SmiToInteger32(rcx, |
9353 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 9348 Operand(rdx, |
| 9349 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 9350 // Space on stack must already hold a smi. |
| 9351 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx); |
9354 // Do not clobber the length index for the indexing operation since | 9352 // Do not clobber the length index for the indexing operation since |
9355 // it is used compute the size for allocation later. | 9353 // it is used compute the size for allocation later. |
9356 SmiIndex index = masm->SmiToIndex(rbx, rcx, kPointerSizeLog2); | 9354 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement)); |
9357 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement)); | |
9358 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 9355 __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
9359 | 9356 |
9360 // Try the new space allocation. Start out with computing the size of | 9357 // Try the new space allocation. Start out with computing the size of |
9361 // the arguments object and the elements array. | 9358 // the arguments object and the elements array. |
9362 Label add_arguments_object; | 9359 Label add_arguments_object; |
9363 __ bind(&try_allocate); | 9360 __ bind(&try_allocate); |
9364 __ testq(rcx, rcx); | 9361 __ testl(rcx, rcx); |
9365 __ j(zero, &add_arguments_object); | 9362 __ j(zero, &add_arguments_object); |
9366 index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2); | 9363 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); |
9367 __ lea(rcx, Operand(index.reg, index.scale, FixedArray::kHeaderSize)); | |
9368 __ bind(&add_arguments_object); | 9364 __ bind(&add_arguments_object); |
9369 __ addq(rcx, Immediate(Heap::kArgumentsObjectSize)); | 9365 __ addl(rcx, Immediate(Heap::kArgumentsObjectSize)); |
9370 | 9366 |
9371 // Do the allocation of both objects in one go. | 9367 // Do the allocation of both objects in one go. |
9372 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); | 9368 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); |
9373 | 9369 |
9374 // Get the arguments boilerplate from the current (global) context. | 9370 // Get the arguments boilerplate from the current (global) context. |
9375 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); | 9371 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); |
9376 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 9372 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
9377 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); | 9373 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); |
9378 __ movq(rdi, Operand(rdi, offset)); | 9374 __ movq(rdi, Operand(rdi, offset)); |
9379 | 9375 |
9380 // Copy the JS object part. | 9376 // Copy the JS object part. |
9381 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 9377 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize); |
9382 __ movq(kScratchRegister, FieldOperand(rdi, i)); | 9378 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize)); |
9383 __ movq(FieldOperand(rax, i), kScratchRegister); | 9379 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize)); |
9384 } | 9380 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize)); |
| 9381 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister); |
| 9382 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx); |
| 9383 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx); |
9385 | 9384 |
9386 // Setup the callee in-object property. | 9385 // Setup the callee in-object property. |
9387 ASSERT(Heap::arguments_callee_index == 0); | 9386 ASSERT(Heap::arguments_callee_index == 0); |
9388 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize)); | 9387 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize)); |
9389 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister); | 9388 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister); |
9390 | 9389 |
9391 // Get the length (smi tagged) and set that as an in-object property too. | 9390 // Get the length (smi tagged) and set that as an in-object property too. |
9392 ASSERT(Heap::arguments_length_index == 1); | 9391 ASSERT(Heap::arguments_length_index == 1); |
9393 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 9392 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
9394 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx); | 9393 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx); |
9395 | 9394 |
9396 // If there are no actual arguments, we're done. | 9395 // If there are no actual arguments, we're done. |
9397 Label done; | 9396 Label done; |
9398 __ testq(rcx, rcx); | 9397 __ SmiTest(rcx); |
9399 __ j(zero, &done); | 9398 __ j(zero, &done); |
9400 | 9399 |
9401 // Get the parameters pointer from the stack and untag the length. | 9400 // Get the parameters pointer from the stack and untag the length. |
9402 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); | 9401 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
9403 | 9402 |
9404 // Setup the elements pointer in the allocated arguments object and | 9403 // Setup the elements pointer in the allocated arguments object and |
9405 // initialize the header in the elements fixed array. | 9404 // initialize the header in the elements fixed array. |
9406 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); | 9405 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); |
9407 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 9406 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
9408 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 9407 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
9409 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); | 9408 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
9410 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 9409 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
9411 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below. | 9410 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below. |
9412 | 9411 |
9413 // Copy the fixed array slots. | 9412 // Copy the fixed array slots. |
9414 Label loop; | 9413 Label loop; |
9415 __ bind(&loop); | 9414 __ bind(&loop); |
9416 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver. | 9415 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver. |
9417 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister); | 9416 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister); |
9418 __ addq(rdi, Immediate(kPointerSize)); | 9417 __ addq(rdi, Immediate(kPointerSize)); |
9419 __ subq(rdx, Immediate(kPointerSize)); | 9418 __ subq(rdx, Immediate(kPointerSize)); |
9420 __ decq(rcx); | 9419 __ decl(rcx); |
9421 __ j(not_zero, &loop); | 9420 __ j(not_zero, &loop); |
9422 | 9421 |
9423 // Return and remove the on-stack parameters. | 9422 // Return and remove the on-stack parameters. |
9424 __ bind(&done); | 9423 __ bind(&done); |
9425 __ ret(3 * kPointerSize); | 9424 __ ret(3 * kPointerSize); |
9426 | 9425 |
9427 // Do the runtime call to allocate the arguments object. | 9426 // Do the runtime call to allocate the arguments object. |
9428 __ bind(&runtime); | 9427 __ bind(&runtime); |
9429 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 9428 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
9430 } | 9429 } |
(...skipping 1394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10825 __ call(&get_result); | 10824 __ call(&get_result); |
10826 __ LeaveInternalFrame(); | 10825 __ LeaveInternalFrame(); |
10827 | 10826 |
10828 // Left and right arguments are already on stack. | 10827 // Left and right arguments are already on stack. |
10829 __ pop(rcx); | 10828 __ pop(rcx); |
10830 // Push the operation result. The tail call to BinaryOp_Patch will | 10829 // Push the operation result. The tail call to BinaryOp_Patch will |
10831 // return it to the original caller.. | 10830 // return it to the original caller.. |
10832 __ push(rax); | 10831 __ push(rax); |
10833 | 10832 |
10834 // Push this stub's key. | 10833 // Push this stub's key. |
10835 __ movq(rax, Immediate(MinorKey())); | 10834 __ Push(Smi::FromInt(MinorKey())); |
10836 __ Integer32ToSmi(rax, rax); | |
10837 __ push(rax); | |
10838 | 10835 |
10839 // Although the operation and the type info are encoded into the key, | 10836 // Although the operation and the type info are encoded into the key, |
10840 // the encoding is opaque, so push them too. | 10837 // the encoding is opaque, so push them too. |
10841 __ movq(rax, Immediate(op_)); | 10838 __ Push(Smi::FromInt(op_)); |
10842 __ Integer32ToSmi(rax, rax); | |
10843 __ push(rax); | |
10844 | 10839 |
10845 __ movq(rax, Immediate(runtime_operands_type_)); | 10840 __ Push(Smi::FromInt(runtime_operands_type_)); |
10846 __ Integer32ToSmi(rax, rax); | |
10847 __ push(rax); | |
10848 | 10841 |
10849 __ push(rcx); | 10842 __ push(rcx); |
10850 | 10843 |
10851 // Perform patching to an appropriate fast case and return the result. | 10844 // Perform patching to an appropriate fast case and return the result. |
10852 __ TailCallExternalReference( | 10845 __ TailCallExternalReference( |
10853 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)), | 10846 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)), |
10854 6, | 10847 6, |
10855 1); | 10848 1); |
10856 | 10849 |
10857 // The entry point for the result calculation is assumed to be immediately | 10850 // The entry point for the result calculation is assumed to be immediately |
(...skipping 1092 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11950 } | 11943 } |
11951 | 11944 |
11952 #endif | 11945 #endif |
11953 | 11946 |
11954 | 11947 |
11955 #undef __ | 11948 #undef __ |
11956 | 11949 |
11957 } } // namespace v8::internal | 11950 } } // namespace v8::internal |
11958 | 11951 |
11959 #endif // V8_TARGET_ARCH_X64 | 11952 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |