OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
347 | 347 |
348 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { | 348 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { |
349 // Update the static counter each time a new code stub is generated. | 349 // Update the static counter each time a new code stub is generated. |
350 isolate()->counters()->code_stubs()->Increment(); | 350 isolate()->counters()->code_stubs()->Increment(); |
351 | 351 |
352 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); | 352 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); |
353 int param_count = descriptor->GetEnvironmentParameterCount(); | 353 int param_count = descriptor->GetEnvironmentParameterCount(); |
354 { | 354 { |
355 // Call the runtime system in a fresh internal frame. | 355 // Call the runtime system in a fresh internal frame. |
356 FrameScope scope(masm, StackFrame::INTERNAL); | 356 FrameScope scope(masm, StackFrame::INTERNAL); |
357 ASSERT(param_count == 0 || | 357 DCHECK(param_count == 0 || |
358 eax.is(descriptor->GetEnvironmentParameterRegister( | 358 eax.is(descriptor->GetEnvironmentParameterRegister( |
359 param_count - 1))); | 359 param_count - 1))); |
360 // Push arguments | 360 // Push arguments |
361 for (int i = 0; i < param_count; ++i) { | 361 for (int i = 0; i < param_count; ++i) { |
362 __ push(descriptor->GetEnvironmentParameterRegister(i)); | 362 __ push(descriptor->GetEnvironmentParameterRegister(i)); |
363 } | 363 } |
364 ExternalReference miss = descriptor->miss_handler(); | 364 ExternalReference miss = descriptor->miss_handler(); |
365 __ CallExternalReference(miss, param_count); | 365 __ CallExternalReference(miss, param_count); |
366 } | 366 } |
367 | 367 |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
426 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if | 426 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if |
427 // either operand is not a number. Operands are in edx and eax. | 427 // either operand is not a number. Operands are in edx and eax. |
428 // Leaves operands unchanged. | 428 // Leaves operands unchanged. |
429 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers); | 429 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers); |
430 }; | 430 }; |
431 | 431 |
432 | 432 |
433 void DoubleToIStub::Generate(MacroAssembler* masm) { | 433 void DoubleToIStub::Generate(MacroAssembler* masm) { |
434 Register input_reg = this->source(); | 434 Register input_reg = this->source(); |
435 Register final_result_reg = this->destination(); | 435 Register final_result_reg = this->destination(); |
436 ASSERT(is_truncating()); | 436 DCHECK(is_truncating()); |
437 | 437 |
438 Label check_negative, process_64_bits, done, done_no_stash; | 438 Label check_negative, process_64_bits, done, done_no_stash; |
439 | 439 |
440 int double_offset = offset(); | 440 int double_offset = offset(); |
441 | 441 |
442 // Account for return address and saved regs if input is esp. | 442 // Account for return address and saved regs if input is esp. |
443 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; | 443 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; |
444 | 444 |
445 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); | 445 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); |
446 MemOperand exponent_operand(MemOperand(input_reg, | 446 MemOperand exponent_operand(MemOperand(input_reg, |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
538 } | 538 } |
539 __ cmov(greater, result_reg, scratch1); | 539 __ cmov(greater, result_reg, scratch1); |
540 | 540 |
541 // Restore registers | 541 // Restore registers |
542 __ bind(&done); | 542 __ bind(&done); |
543 if (stash_exponent_copy) { | 543 if (stash_exponent_copy) { |
544 __ add(esp, Immediate(kDoubleSize / 2)); | 544 __ add(esp, Immediate(kDoubleSize / 2)); |
545 } | 545 } |
546 __ bind(&done_no_stash); | 546 __ bind(&done_no_stash); |
547 if (!final_result_reg.is(result_reg)) { | 547 if (!final_result_reg.is(result_reg)) { |
548 ASSERT(final_result_reg.is(ecx)); | 548 DCHECK(final_result_reg.is(ecx)); |
549 __ mov(final_result_reg, result_reg); | 549 __ mov(final_result_reg, result_reg); |
550 } | 550 } |
551 __ pop(save_reg); | 551 __ pop(save_reg); |
552 __ pop(scratch1); | 552 __ pop(scratch1); |
553 __ ret(0); | 553 __ ret(0); |
554 } | 554 } |
555 | 555 |
556 | 556 |
557 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, | 557 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, |
558 Register number) { | 558 Register number) { |
(...skipping 1197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1756 // (11) Sliced string. Replace subject with parent. Go to (5a). | 1756 // (11) Sliced string. Replace subject with parent. Go to (5a). |
1757 // Load offset into edi and replace subject string with parent. | 1757 // Load offset into edi and replace subject string with parent. |
1758 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); | 1758 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); |
1759 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); | 1759 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); |
1760 __ jmp(&check_underlying); // Go to (5a). | 1760 __ jmp(&check_underlying); // Go to (5a). |
1761 #endif // V8_INTERPRETED_REGEXP | 1761 #endif // V8_INTERPRETED_REGEXP |
1762 } | 1762 } |
1763 | 1763 |
1764 | 1764 |
1765 static int NegativeComparisonResult(Condition cc) { | 1765 static int NegativeComparisonResult(Condition cc) { |
1766 ASSERT(cc != equal); | 1766 DCHECK(cc != equal); |
1767 ASSERT((cc == less) || (cc == less_equal) | 1767 DCHECK((cc == less) || (cc == less_equal) |
1768 || (cc == greater) || (cc == greater_equal)); | 1768 || (cc == greater) || (cc == greater_equal)); |
1769 return (cc == greater || cc == greater_equal) ? LESS : GREATER; | 1769 return (cc == greater || cc == greater_equal) ? LESS : GREATER; |
1770 } | 1770 } |
1771 | 1771 |
1772 | 1772 |
1773 static void CheckInputType(MacroAssembler* masm, | 1773 static void CheckInputType(MacroAssembler* masm, |
1774 Register input, | 1774 Register input, |
1775 CompareIC::State expected, | 1775 CompareIC::State expected, |
1776 Label* fail) { | 1776 Label* fail) { |
1777 Label ok; | 1777 Label ok; |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1867 if (cc == equal && strict()) { | 1867 if (cc == equal && strict()) { |
1868 Label slow; // Fallthrough label. | 1868 Label slow; // Fallthrough label. |
1869 Label not_smis; | 1869 Label not_smis; |
1870 // If we're doing a strict equality comparison, we don't have to do | 1870 // If we're doing a strict equality comparison, we don't have to do |
1871 // type conversion, so we generate code to do fast comparison for objects | 1871 // type conversion, so we generate code to do fast comparison for objects |
1872 // and oddballs. Non-smi numbers and strings still go through the usual | 1872 // and oddballs. Non-smi numbers and strings still go through the usual |
1873 // slow-case code. | 1873 // slow-case code. |
1874 // If either is a Smi (we know that not both are), then they can only | 1874 // If either is a Smi (we know that not both are), then they can only |
1875 // be equal if the other is a HeapNumber. If so, use the slow case. | 1875 // be equal if the other is a HeapNumber. If so, use the slow case. |
1876 STATIC_ASSERT(kSmiTag == 0); | 1876 STATIC_ASSERT(kSmiTag == 0); |
1877 ASSERT_EQ(0, Smi::FromInt(0)); | 1877 DCHECK_EQ(0, Smi::FromInt(0)); |
1878 __ mov(ecx, Immediate(kSmiTagMask)); | 1878 __ mov(ecx, Immediate(kSmiTagMask)); |
1879 __ and_(ecx, eax); | 1879 __ and_(ecx, eax); |
1880 __ test(ecx, edx); | 1880 __ test(ecx, edx); |
1881 __ j(not_zero, ¬_smis, Label::kNear); | 1881 __ j(not_zero, ¬_smis, Label::kNear); |
1882 // One operand is a smi. | 1882 // One operand is a smi. |
1883 | 1883 |
1884 // Check whether the non-smi is a heap number. | 1884 // Check whether the non-smi is a heap number. |
1885 STATIC_ASSERT(kSmiTagMask == 1); | 1885 STATIC_ASSERT(kSmiTagMask == 1); |
1886 // ecx still holds eax & kSmiTag, which is either zero or one. | 1886 // ecx still holds eax & kSmiTag, which is either zero or one. |
1887 __ sub(ecx, Immediate(0x01)); | 1887 __ sub(ecx, Immediate(0x01)); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1947 __ mov(eax, 0); // equal | 1947 __ mov(eax, 0); // equal |
1948 __ mov(ecx, Immediate(Smi::FromInt(1))); | 1948 __ mov(ecx, Immediate(Smi::FromInt(1))); |
1949 __ cmov(above, eax, ecx); | 1949 __ cmov(above, eax, ecx); |
1950 __ mov(ecx, Immediate(Smi::FromInt(-1))); | 1950 __ mov(ecx, Immediate(Smi::FromInt(-1))); |
1951 __ cmov(below, eax, ecx); | 1951 __ cmov(below, eax, ecx); |
1952 __ ret(0); | 1952 __ ret(0); |
1953 | 1953 |
1954 // If one of the numbers was NaN, then the result is always false. | 1954 // If one of the numbers was NaN, then the result is always false. |
1955 // The cc is never not-equal. | 1955 // The cc is never not-equal. |
1956 __ bind(&unordered); | 1956 __ bind(&unordered); |
1957 ASSERT(cc != not_equal); | 1957 DCHECK(cc != not_equal); |
1958 if (cc == less || cc == less_equal) { | 1958 if (cc == less || cc == less_equal) { |
1959 __ mov(eax, Immediate(Smi::FromInt(1))); | 1959 __ mov(eax, Immediate(Smi::FromInt(1))); |
1960 } else { | 1960 } else { |
1961 __ mov(eax, Immediate(Smi::FromInt(-1))); | 1961 __ mov(eax, Immediate(Smi::FromInt(-1))); |
1962 } | 1962 } |
1963 __ ret(0); | 1963 __ ret(0); |
1964 | 1964 |
1965 // The number comparison code did not provide a valid result. | 1965 // The number comparison code did not provide a valid result. |
1966 __ bind(&non_number_comparison); | 1966 __ bind(&non_number_comparison); |
1967 | 1967 |
(...skipping 784 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2752 // If call site patching is requested the stack will have the delta from the | 2752 // If call site patching is requested the stack will have the delta from the |
2753 // return address to the cmp instruction just below the return address. This | 2753 // return address to the cmp instruction just below the return address. This |
2754 // also means that call site patching can only take place with arguments in | 2754 // also means that call site patching can only take place with arguments in |
2755 // registers. TOS looks like this when call site patching is requested | 2755 // registers. TOS looks like this when call site patching is requested |
2756 // | 2756 // |
2757 // esp[0] : return address | 2757 // esp[0] : return address |
2758 // esp[4] : delta from return address to cmp instruction | 2758 // esp[4] : delta from return address to cmp instruction |
2759 // | 2759 // |
2760 void InstanceofStub::Generate(MacroAssembler* masm) { | 2760 void InstanceofStub::Generate(MacroAssembler* masm) { |
2761 // Call site inlining and patching implies arguments in registers. | 2761 // Call site inlining and patching implies arguments in registers. |
2762 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); | 2762 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
2763 | 2763 |
2764 // Fixed register usage throughout the stub. | 2764 // Fixed register usage throughout the stub. |
2765 Register object = eax; // Object (lhs). | 2765 Register object = eax; // Object (lhs). |
2766 Register map = ebx; // Map of the object. | 2766 Register map = ebx; // Map of the object. |
2767 Register function = edx; // Function (rhs). | 2767 Register function = edx; // Function (rhs). |
2768 Register prototype = edi; // Prototype of the function. | 2768 Register prototype = edi; // Prototype of the function. |
2769 Register scratch = ecx; | 2769 Register scratch = ecx; |
2770 | 2770 |
2771 // Constants describing the call site code to patch. | 2771 // Constants describing the call site code to patch. |
2772 static const int kDeltaToCmpImmediate = 2; | 2772 static const int kDeltaToCmpImmediate = 2; |
2773 static const int kDeltaToMov = 8; | 2773 static const int kDeltaToMov = 8; |
2774 static const int kDeltaToMovImmediate = 9; | 2774 static const int kDeltaToMovImmediate = 9; |
2775 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b); | 2775 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b); |
2776 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d); | 2776 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d); |
2777 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); | 2777 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); |
2778 | 2778 |
2779 ASSERT_EQ(object.code(), InstanceofStub::left().code()); | 2779 DCHECK_EQ(object.code(), InstanceofStub::left().code()); |
2780 ASSERT_EQ(function.code(), InstanceofStub::right().code()); | 2780 DCHECK_EQ(function.code(), InstanceofStub::right().code()); |
2781 | 2781 |
2782 // Get the object and function - they are always both needed. | 2782 // Get the object and function - they are always both needed. |
2783 Label slow, not_js_object; | 2783 Label slow, not_js_object; |
2784 if (!HasArgsInRegisters()) { | 2784 if (!HasArgsInRegisters()) { |
2785 __ mov(object, Operand(esp, 2 * kPointerSize)); | 2785 __ mov(object, Operand(esp, 2 * kPointerSize)); |
2786 __ mov(function, Operand(esp, 1 * kPointerSize)); | 2786 __ mov(function, Operand(esp, 1 * kPointerSize)); |
2787 } | 2787 } |
2788 | 2788 |
2789 // Check that the left hand is a JS object. | 2789 // Check that the left hand is a JS object. |
2790 __ JumpIfSmi(object, ¬_js_object); | 2790 __ JumpIfSmi(object, ¬_js_object); |
(...skipping 21 matching lines...) Expand all Loading... |
2812 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 2812 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
2813 | 2813 |
2814 // Update the global instanceof or call site inlined cache with the current | 2814 // Update the global instanceof or call site inlined cache with the current |
2815 // map and function. The cached answer will be set when it is known below. | 2815 // map and function. The cached answer will be set when it is known below. |
2816 if (!HasCallSiteInlineCheck()) { | 2816 if (!HasCallSiteInlineCheck()) { |
2817 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); | 2817 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); |
2818 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); | 2818 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); |
2819 } else { | 2819 } else { |
2820 // The constants for the code patching are based on no push instructions | 2820 // The constants for the code patching are based on no push instructions |
2821 // at the call site. | 2821 // at the call site. |
2822 ASSERT(HasArgsInRegisters()); | 2822 DCHECK(HasArgsInRegisters()); |
2823 // Get return address and delta to inlined map check. | 2823 // Get return address and delta to inlined map check. |
2824 __ mov(scratch, Operand(esp, 0 * kPointerSize)); | 2824 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
2825 __ sub(scratch, Operand(esp, 1 * kPointerSize)); | 2825 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
2826 if (FLAG_debug_code) { | 2826 if (FLAG_debug_code) { |
2827 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1); | 2827 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1); |
2828 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1); | 2828 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1); |
2829 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2); | 2829 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2); |
2830 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2); | 2830 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2); |
2831 } | 2831 } |
2832 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate)); | 2832 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate)); |
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3017 __ CheckMap(index_, | 3017 __ CheckMap(index_, |
3018 masm->isolate()->factory()->heap_number_map(), | 3018 masm->isolate()->factory()->heap_number_map(), |
3019 index_not_number_, | 3019 index_not_number_, |
3020 DONT_DO_SMI_CHECK); | 3020 DONT_DO_SMI_CHECK); |
3021 call_helper.BeforeCall(masm); | 3021 call_helper.BeforeCall(masm); |
3022 __ push(object_); | 3022 __ push(object_); |
3023 __ push(index_); // Consumed by runtime conversion function. | 3023 __ push(index_); // Consumed by runtime conversion function. |
3024 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 3024 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
3025 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 3025 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
3026 } else { | 3026 } else { |
3027 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 3027 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
3028 // NumberToSmi discards numbers that are not exact integers. | 3028 // NumberToSmi discards numbers that are not exact integers. |
3029 __ CallRuntime(Runtime::kNumberToSmi, 1); | 3029 __ CallRuntime(Runtime::kNumberToSmi, 1); |
3030 } | 3030 } |
3031 if (!index_.is(eax)) { | 3031 if (!index_.is(eax)) { |
3032 // Save the conversion result before the pop instructions below | 3032 // Save the conversion result before the pop instructions below |
3033 // have a chance to overwrite it. | 3033 // have a chance to overwrite it. |
3034 __ mov(index_, eax); | 3034 __ mov(index_, eax); |
3035 } | 3035 } |
3036 __ pop(object_); | 3036 __ pop(object_); |
3037 // Reload the instance type. | 3037 // Reload the instance type. |
(...skipping 25 matching lines...) Expand all Loading... |
3063 } | 3063 } |
3064 | 3064 |
3065 | 3065 |
3066 // ------------------------------------------------------------------------- | 3066 // ------------------------------------------------------------------------- |
3067 // StringCharFromCodeGenerator | 3067 // StringCharFromCodeGenerator |
3068 | 3068 |
3069 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 3069 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
3070 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 3070 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
3071 STATIC_ASSERT(kSmiTag == 0); | 3071 STATIC_ASSERT(kSmiTag == 0); |
3072 STATIC_ASSERT(kSmiShiftSize == 0); | 3072 STATIC_ASSERT(kSmiShiftSize == 0); |
3073 ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1)); | 3073 DCHECK(IsPowerOf2(String::kMaxOneByteCharCode + 1)); |
3074 __ test(code_, | 3074 __ test(code_, |
3075 Immediate(kSmiTagMask | | 3075 Immediate(kSmiTagMask | |
3076 ((~String::kMaxOneByteCharCode) << kSmiTagSize))); | 3076 ((~String::kMaxOneByteCharCode) << kSmiTagSize))); |
3077 __ j(not_zero, &slow_case_); | 3077 __ j(not_zero, &slow_case_); |
3078 | 3078 |
3079 Factory* factory = masm->isolate()->factory(); | 3079 Factory* factory = masm->isolate()->factory(); |
3080 __ Move(result_, Immediate(factory->single_character_string_cache())); | 3080 __ Move(result_, Immediate(factory->single_character_string_cache())); |
3081 STATIC_ASSERT(kSmiTag == 0); | 3081 STATIC_ASSERT(kSmiTag == 0); |
3082 STATIC_ASSERT(kSmiTagSize == 1); | 3082 STATIC_ASSERT(kSmiTagSize == 1); |
3083 STATIC_ASSERT(kSmiShiftSize == 0); | 3083 STATIC_ASSERT(kSmiShiftSize == 0); |
(...skipping 25 matching lines...) Expand all Loading... |
3109 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 3109 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
3110 } | 3110 } |
3111 | 3111 |
3112 | 3112 |
3113 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | 3113 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, |
3114 Register dest, | 3114 Register dest, |
3115 Register src, | 3115 Register src, |
3116 Register count, | 3116 Register count, |
3117 Register scratch, | 3117 Register scratch, |
3118 String::Encoding encoding) { | 3118 String::Encoding encoding) { |
3119 ASSERT(!scratch.is(dest)); | 3119 DCHECK(!scratch.is(dest)); |
3120 ASSERT(!scratch.is(src)); | 3120 DCHECK(!scratch.is(src)); |
3121 ASSERT(!scratch.is(count)); | 3121 DCHECK(!scratch.is(count)); |
3122 | 3122 |
3123 // Nothing to do for zero characters. | 3123 // Nothing to do for zero characters. |
3124 Label done; | 3124 Label done; |
3125 __ test(count, count); | 3125 __ test(count, count); |
3126 __ j(zero, &done); | 3126 __ j(zero, &done); |
3127 | 3127 |
3128 // Make count the number of bytes to copy. | 3128 // Make count the number of bytes to copy. |
3129 if (encoding == String::TWO_BYTE_ENCODING) { | 3129 if (encoding == String::TWO_BYTE_ENCODING) { |
3130 __ shl(count, 1); | 3130 __ shl(count, 1); |
3131 } | 3131 } |
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3625 } | 3625 } |
3626 | 3626 |
3627 // Tail call into the stub that handles binary operations with allocation | 3627 // Tail call into the stub that handles binary operations with allocation |
3628 // sites. | 3628 // sites. |
3629 BinaryOpWithAllocationSiteStub stub(isolate(), state_); | 3629 BinaryOpWithAllocationSiteStub stub(isolate(), state_); |
3630 __ TailCallStub(&stub); | 3630 __ TailCallStub(&stub); |
3631 } | 3631 } |
3632 | 3632 |
3633 | 3633 |
3634 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 3634 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
3635 ASSERT(state_ == CompareIC::SMI); | 3635 DCHECK(state_ == CompareIC::SMI); |
3636 Label miss; | 3636 Label miss; |
3637 __ mov(ecx, edx); | 3637 __ mov(ecx, edx); |
3638 __ or_(ecx, eax); | 3638 __ or_(ecx, eax); |
3639 __ JumpIfNotSmi(ecx, &miss, Label::kNear); | 3639 __ JumpIfNotSmi(ecx, &miss, Label::kNear); |
3640 | 3640 |
3641 if (GetCondition() == equal) { | 3641 if (GetCondition() == equal) { |
3642 // For equality we do not care about the sign of the result. | 3642 // For equality we do not care about the sign of the result. |
3643 __ sub(eax, edx); | 3643 __ sub(eax, edx); |
3644 } else { | 3644 } else { |
3645 Label done; | 3645 Label done; |
3646 __ sub(edx, eax); | 3646 __ sub(edx, eax); |
3647 __ j(no_overflow, &done, Label::kNear); | 3647 __ j(no_overflow, &done, Label::kNear); |
3648 // Correct sign of result in case of overflow. | 3648 // Correct sign of result in case of overflow. |
3649 __ not_(edx); | 3649 __ not_(edx); |
3650 __ bind(&done); | 3650 __ bind(&done); |
3651 __ mov(eax, edx); | 3651 __ mov(eax, edx); |
3652 } | 3652 } |
3653 __ ret(0); | 3653 __ ret(0); |
3654 | 3654 |
3655 __ bind(&miss); | 3655 __ bind(&miss); |
3656 GenerateMiss(masm); | 3656 GenerateMiss(masm); |
3657 } | 3657 } |
3658 | 3658 |
3659 | 3659 |
3660 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { | 3660 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { |
3661 ASSERT(state_ == CompareIC::NUMBER); | 3661 DCHECK(state_ == CompareIC::NUMBER); |
3662 | 3662 |
3663 Label generic_stub; | 3663 Label generic_stub; |
3664 Label unordered, maybe_undefined1, maybe_undefined2; | 3664 Label unordered, maybe_undefined1, maybe_undefined2; |
3665 Label miss; | 3665 Label miss; |
3666 | 3666 |
3667 if (left_ == CompareIC::SMI) { | 3667 if (left_ == CompareIC::SMI) { |
3668 __ JumpIfNotSmi(edx, &miss); | 3668 __ JumpIfNotSmi(edx, &miss); |
3669 } | 3669 } |
3670 if (right_ == CompareIC::SMI) { | 3670 if (right_ == CompareIC::SMI) { |
3671 __ JumpIfNotSmi(eax, &miss); | 3671 __ JumpIfNotSmi(eax, &miss); |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3733 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); | 3733 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); |
3734 __ j(equal, &unordered); | 3734 __ j(equal, &unordered); |
3735 } | 3735 } |
3736 | 3736 |
3737 __ bind(&miss); | 3737 __ bind(&miss); |
3738 GenerateMiss(masm); | 3738 GenerateMiss(masm); |
3739 } | 3739 } |
3740 | 3740 |
3741 | 3741 |
3742 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { | 3742 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { |
3743 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); | 3743 DCHECK(state_ == CompareIC::INTERNALIZED_STRING); |
3744 ASSERT(GetCondition() == equal); | 3744 DCHECK(GetCondition() == equal); |
3745 | 3745 |
3746 // Registers containing left and right operands respectively. | 3746 // Registers containing left and right operands respectively. |
3747 Register left = edx; | 3747 Register left = edx; |
3748 Register right = eax; | 3748 Register right = eax; |
3749 Register tmp1 = ecx; | 3749 Register tmp1 = ecx; |
3750 Register tmp2 = ebx; | 3750 Register tmp2 = ebx; |
3751 | 3751 |
3752 // Check that both operands are heap objects. | 3752 // Check that both operands are heap objects. |
3753 Label miss; | 3753 Label miss; |
3754 __ mov(tmp1, left); | 3754 __ mov(tmp1, left); |
3755 STATIC_ASSERT(kSmiTag == 0); | 3755 STATIC_ASSERT(kSmiTag == 0); |
3756 __ and_(tmp1, right); | 3756 __ and_(tmp1, right); |
3757 __ JumpIfSmi(tmp1, &miss, Label::kNear); | 3757 __ JumpIfSmi(tmp1, &miss, Label::kNear); |
3758 | 3758 |
3759 // Check that both operands are internalized strings. | 3759 // Check that both operands are internalized strings. |
3760 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 3760 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
3761 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 3761 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
3762 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 3762 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
3763 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 3763 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
3764 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); | 3764 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
3765 __ or_(tmp1, tmp2); | 3765 __ or_(tmp1, tmp2); |
3766 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); | 3766 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
3767 __ j(not_zero, &miss, Label::kNear); | 3767 __ j(not_zero, &miss, Label::kNear); |
3768 | 3768 |
3769 // Internalized strings are compared by identity. | 3769 // Internalized strings are compared by identity. |
3770 Label done; | 3770 Label done; |
3771 __ cmp(left, right); | 3771 __ cmp(left, right); |
3772 // Make sure eax is non-zero. At this point input operands are | 3772 // Make sure eax is non-zero. At this point input operands are |
3773 // guaranteed to be non-zero. | 3773 // guaranteed to be non-zero. |
3774 ASSERT(right.is(eax)); | 3774 DCHECK(right.is(eax)); |
3775 __ j(not_equal, &done, Label::kNear); | 3775 __ j(not_equal, &done, Label::kNear); |
3776 STATIC_ASSERT(EQUAL == 0); | 3776 STATIC_ASSERT(EQUAL == 0); |
3777 STATIC_ASSERT(kSmiTag == 0); | 3777 STATIC_ASSERT(kSmiTag == 0); |
3778 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); | 3778 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); |
3779 __ bind(&done); | 3779 __ bind(&done); |
3780 __ ret(0); | 3780 __ ret(0); |
3781 | 3781 |
3782 __ bind(&miss); | 3782 __ bind(&miss); |
3783 GenerateMiss(masm); | 3783 GenerateMiss(masm); |
3784 } | 3784 } |
3785 | 3785 |
3786 | 3786 |
3787 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) { | 3787 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) { |
3788 ASSERT(state_ == CompareIC::UNIQUE_NAME); | 3788 DCHECK(state_ == CompareIC::UNIQUE_NAME); |
3789 ASSERT(GetCondition() == equal); | 3789 DCHECK(GetCondition() == equal); |
3790 | 3790 |
3791 // Registers containing left and right operands respectively. | 3791 // Registers containing left and right operands respectively. |
3792 Register left = edx; | 3792 Register left = edx; |
3793 Register right = eax; | 3793 Register right = eax; |
3794 Register tmp1 = ecx; | 3794 Register tmp1 = ecx; |
3795 Register tmp2 = ebx; | 3795 Register tmp2 = ebx; |
3796 | 3796 |
3797 // Check that both operands are heap objects. | 3797 // Check that both operands are heap objects. |
3798 Label miss; | 3798 Label miss; |
3799 __ mov(tmp1, left); | 3799 __ mov(tmp1, left); |
3800 STATIC_ASSERT(kSmiTag == 0); | 3800 STATIC_ASSERT(kSmiTag == 0); |
3801 __ and_(tmp1, right); | 3801 __ and_(tmp1, right); |
3802 __ JumpIfSmi(tmp1, &miss, Label::kNear); | 3802 __ JumpIfSmi(tmp1, &miss, Label::kNear); |
3803 | 3803 |
3804 // Check that both operands are unique names. This leaves the instance | 3804 // Check that both operands are unique names. This leaves the instance |
3805 // types loaded in tmp1 and tmp2. | 3805 // types loaded in tmp1 and tmp2. |
3806 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 3806 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
3807 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 3807 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
3808 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 3808 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
3809 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 3809 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
3810 | 3810 |
3811 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); | 3811 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); |
3812 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); | 3812 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); |
3813 | 3813 |
3814 // Unique names are compared by identity. | 3814 // Unique names are compared by identity. |
3815 Label done; | 3815 Label done; |
3816 __ cmp(left, right); | 3816 __ cmp(left, right); |
3817 // Make sure eax is non-zero. At this point input operands are | 3817 // Make sure eax is non-zero. At this point input operands are |
3818 // guaranteed to be non-zero. | 3818 // guaranteed to be non-zero. |
3819 ASSERT(right.is(eax)); | 3819 DCHECK(right.is(eax)); |
3820 __ j(not_equal, &done, Label::kNear); | 3820 __ j(not_equal, &done, Label::kNear); |
3821 STATIC_ASSERT(EQUAL == 0); | 3821 STATIC_ASSERT(EQUAL == 0); |
3822 STATIC_ASSERT(kSmiTag == 0); | 3822 STATIC_ASSERT(kSmiTag == 0); |
3823 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); | 3823 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); |
3824 __ bind(&done); | 3824 __ bind(&done); |
3825 __ ret(0); | 3825 __ ret(0); |
3826 | 3826 |
3827 __ bind(&miss); | 3827 __ bind(&miss); |
3828 GenerateMiss(masm); | 3828 GenerateMiss(masm); |
3829 } | 3829 } |
3830 | 3830 |
3831 | 3831 |
3832 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { | 3832 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
3833 ASSERT(state_ == CompareIC::STRING); | 3833 DCHECK(state_ == CompareIC::STRING); |
3834 Label miss; | 3834 Label miss; |
3835 | 3835 |
3836 bool equality = Token::IsEqualityOp(op_); | 3836 bool equality = Token::IsEqualityOp(op_); |
3837 | 3837 |
3838 // Registers containing left and right operands respectively. | 3838 // Registers containing left and right operands respectively. |
3839 Register left = edx; | 3839 Register left = edx; |
3840 Register right = eax; | 3840 Register right = eax; |
3841 Register tmp1 = ecx; | 3841 Register tmp1 = ecx; |
3842 Register tmp2 = ebx; | 3842 Register tmp2 = ebx; |
3843 Register tmp3 = edi; | 3843 Register tmp3 = edi; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3877 // non-equality compare, we still need to determine the order. We | 3877 // non-equality compare, we still need to determine the order. We |
3878 // also know they are both strings. | 3878 // also know they are both strings. |
3879 if (equality) { | 3879 if (equality) { |
3880 Label do_compare; | 3880 Label do_compare; |
3881 STATIC_ASSERT(kInternalizedTag == 0); | 3881 STATIC_ASSERT(kInternalizedTag == 0); |
3882 __ or_(tmp1, tmp2); | 3882 __ or_(tmp1, tmp2); |
3883 __ test(tmp1, Immediate(kIsNotInternalizedMask)); | 3883 __ test(tmp1, Immediate(kIsNotInternalizedMask)); |
3884 __ j(not_zero, &do_compare, Label::kNear); | 3884 __ j(not_zero, &do_compare, Label::kNear); |
3885 // Make sure eax is non-zero. At this point input operands are | 3885 // Make sure eax is non-zero. At this point input operands are |
3886 // guaranteed to be non-zero. | 3886 // guaranteed to be non-zero. |
3887 ASSERT(right.is(eax)); | 3887 DCHECK(right.is(eax)); |
3888 __ ret(0); | 3888 __ ret(0); |
3889 __ bind(&do_compare); | 3889 __ bind(&do_compare); |
3890 } | 3890 } |
3891 | 3891 |
3892 // Check that both strings are sequential ASCII. | 3892 // Check that both strings are sequential ASCII. |
3893 Label runtime; | 3893 Label runtime; |
3894 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); | 3894 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); |
3895 | 3895 |
3896 // Compare flat ASCII strings. Returns when done. | 3896 // Compare flat ASCII strings. Returns when done. |
3897 if (equality) { | 3897 if (equality) { |
(...skipping 15 matching lines...) Expand all Loading... |
3913 } else { | 3913 } else { |
3914 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3914 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
3915 } | 3915 } |
3916 | 3916 |
3917 __ bind(&miss); | 3917 __ bind(&miss); |
3918 GenerateMiss(masm); | 3918 GenerateMiss(masm); |
3919 } | 3919 } |
3920 | 3920 |
3921 | 3921 |
3922 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 3922 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
3923 ASSERT(state_ == CompareIC::OBJECT); | 3923 DCHECK(state_ == CompareIC::OBJECT); |
3924 Label miss; | 3924 Label miss; |
3925 __ mov(ecx, edx); | 3925 __ mov(ecx, edx); |
3926 __ and_(ecx, eax); | 3926 __ and_(ecx, eax); |
3927 __ JumpIfSmi(ecx, &miss, Label::kNear); | 3927 __ JumpIfSmi(ecx, &miss, Label::kNear); |
3928 | 3928 |
3929 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); | 3929 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); |
3930 __ j(not_equal, &miss, Label::kNear); | 3930 __ j(not_equal, &miss, Label::kNear); |
3931 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); | 3931 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); |
3932 __ j(not_equal, &miss, Label::kNear); | 3932 __ j(not_equal, &miss, Label::kNear); |
3933 | 3933 |
3934 ASSERT(GetCondition() == equal); | 3934 DCHECK(GetCondition() == equal); |
3935 __ sub(eax, edx); | 3935 __ sub(eax, edx); |
3936 __ ret(0); | 3936 __ ret(0); |
3937 | 3937 |
3938 __ bind(&miss); | 3938 __ bind(&miss); |
3939 GenerateMiss(masm); | 3939 GenerateMiss(masm); |
3940 } | 3940 } |
3941 | 3941 |
3942 | 3942 |
3943 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { | 3943 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { |
3944 Label miss; | 3944 Label miss; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3988 // the property. This function may return false negatives, so miss_label | 3988 // the property. This function may return false negatives, so miss_label |
3989 // must always call a backup property check that is complete. | 3989 // must always call a backup property check that is complete. |
3990 // This function is safe to call if the receiver has fast properties. | 3990 // This function is safe to call if the receiver has fast properties. |
3991 // Name must be a unique name and receiver must be a heap object. | 3991 // Name must be a unique name and receiver must be a heap object. |
3992 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, | 3992 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
3993 Label* miss, | 3993 Label* miss, |
3994 Label* done, | 3994 Label* done, |
3995 Register properties, | 3995 Register properties, |
3996 Handle<Name> name, | 3996 Handle<Name> name, |
3997 Register r0) { | 3997 Register r0) { |
3998 ASSERT(name->IsUniqueName()); | 3998 DCHECK(name->IsUniqueName()); |
3999 | 3999 |
4000 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 4000 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
4001 // not equal to the name and kProbes-th slot is not used (its name is the | 4001 // not equal to the name and kProbes-th slot is not used (its name is the |
4002 // undefined value), it guarantees the hash table doesn't contain the | 4002 // undefined value), it guarantees the hash table doesn't contain the |
4003 // property. It's true even if some slots represent deleted properties | 4003 // property. It's true even if some slots represent deleted properties |
4004 // (their names are the hole value). | 4004 // (their names are the hole value). |
4005 for (int i = 0; i < kInlinedProbes; i++) { | 4005 for (int i = 0; i < kInlinedProbes; i++) { |
4006 // Compute the masked index: (hash + i + i * i) & mask. | 4006 // Compute the masked index: (hash + i + i * i) & mask. |
4007 Register index = r0; | 4007 Register index = r0; |
4008 // Capacity is smi 2^n. | 4008 // Capacity is smi 2^n. |
4009 __ mov(index, FieldOperand(properties, kCapacityOffset)); | 4009 __ mov(index, FieldOperand(properties, kCapacityOffset)); |
4010 __ dec(index); | 4010 __ dec(index); |
4011 __ and_(index, | 4011 __ and_(index, |
4012 Immediate(Smi::FromInt(name->Hash() + | 4012 Immediate(Smi::FromInt(name->Hash() + |
4013 NameDictionary::GetProbeOffset(i)))); | 4013 NameDictionary::GetProbeOffset(i)))); |
4014 | 4014 |
4015 // Scale the index by multiplying by the entry size. | 4015 // Scale the index by multiplying by the entry size. |
4016 ASSERT(NameDictionary::kEntrySize == 3); | 4016 DCHECK(NameDictionary::kEntrySize == 3); |
4017 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. | 4017 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. |
4018 Register entity_name = r0; | 4018 Register entity_name = r0; |
4019 // Having undefined at this place means the name is not contained. | 4019 // Having undefined at this place means the name is not contained. |
4020 ASSERT_EQ(kSmiTagSize, 1); | 4020 DCHECK_EQ(kSmiTagSize, 1); |
4021 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, | 4021 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, |
4022 kElementsStartOffset - kHeapObjectTag)); | 4022 kElementsStartOffset - kHeapObjectTag)); |
4023 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); | 4023 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); |
4024 __ j(equal, done); | 4024 __ j(equal, done); |
4025 | 4025 |
4026 // Stop if found the property. | 4026 // Stop if found the property. |
4027 __ cmp(entity_name, Handle<Name>(name)); | 4027 __ cmp(entity_name, Handle<Name>(name)); |
4028 __ j(equal, miss); | 4028 __ j(equal, miss); |
4029 | 4029 |
4030 Label good; | 4030 Label good; |
(...skipping 23 matching lines...) Expand all Loading... |
4054 // |done| label if a property with the given name is found leaving the | 4054 // |done| label if a property with the given name is found leaving the |
4055 // index into the dictionary in |r0|. Jump to the |miss| label | 4055 // index into the dictionary in |r0|. Jump to the |miss| label |
4056 // otherwise. | 4056 // otherwise. |
4057 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, | 4057 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
4058 Label* miss, | 4058 Label* miss, |
4059 Label* done, | 4059 Label* done, |
4060 Register elements, | 4060 Register elements, |
4061 Register name, | 4061 Register name, |
4062 Register r0, | 4062 Register r0, |
4063 Register r1) { | 4063 Register r1) { |
4064 ASSERT(!elements.is(r0)); | 4064 DCHECK(!elements.is(r0)); |
4065 ASSERT(!elements.is(r1)); | 4065 DCHECK(!elements.is(r1)); |
4066 ASSERT(!name.is(r0)); | 4066 DCHECK(!name.is(r0)); |
4067 ASSERT(!name.is(r1)); | 4067 DCHECK(!name.is(r1)); |
4068 | 4068 |
4069 __ AssertName(name); | 4069 __ AssertName(name); |
4070 | 4070 |
4071 __ mov(r1, FieldOperand(elements, kCapacityOffset)); | 4071 __ mov(r1, FieldOperand(elements, kCapacityOffset)); |
4072 __ shr(r1, kSmiTagSize); // convert smi to int | 4072 __ shr(r1, kSmiTagSize); // convert smi to int |
4073 __ dec(r1); | 4073 __ dec(r1); |
4074 | 4074 |
4075 // Generate an unrolled loop that performs a few probes before | 4075 // Generate an unrolled loop that performs a few probes before |
4076 // giving up. Measurements done on Gmail indicate that 2 probes | 4076 // giving up. Measurements done on Gmail indicate that 2 probes |
4077 // cover ~93% of loads from dictionaries. | 4077 // cover ~93% of loads from dictionaries. |
4078 for (int i = 0; i < kInlinedProbes; i++) { | 4078 for (int i = 0; i < kInlinedProbes; i++) { |
4079 // Compute the masked index: (hash + i + i * i) & mask. | 4079 // Compute the masked index: (hash + i + i * i) & mask. |
4080 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); | 4080 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); |
4081 __ shr(r0, Name::kHashShift); | 4081 __ shr(r0, Name::kHashShift); |
4082 if (i > 0) { | 4082 if (i > 0) { |
4083 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i))); | 4083 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i))); |
4084 } | 4084 } |
4085 __ and_(r0, r1); | 4085 __ and_(r0, r1); |
4086 | 4086 |
4087 // Scale the index by multiplying by the entry size. | 4087 // Scale the index by multiplying by the entry size. |
4088 ASSERT(NameDictionary::kEntrySize == 3); | 4088 DCHECK(NameDictionary::kEntrySize == 3); |
4089 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 | 4089 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 |
4090 | 4090 |
4091 // Check if the key is identical to the name. | 4091 // Check if the key is identical to the name. |
4092 __ cmp(name, Operand(elements, | 4092 __ cmp(name, Operand(elements, |
4093 r0, | 4093 r0, |
4094 times_4, | 4094 times_4, |
4095 kElementsStartOffset - kHeapObjectTag)); | 4095 kElementsStartOffset - kHeapObjectTag)); |
4096 __ j(equal, done); | 4096 __ j(equal, done); |
4097 } | 4097 } |
4098 | 4098 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4141 // (their names are the null value). | 4141 // (their names are the null value). |
4142 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 4142 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
4143 // Compute the masked index: (hash + i + i * i) & mask. | 4143 // Compute the masked index: (hash + i + i * i) & mask. |
4144 __ mov(scratch, Operand(esp, 2 * kPointerSize)); | 4144 __ mov(scratch, Operand(esp, 2 * kPointerSize)); |
4145 if (i > 0) { | 4145 if (i > 0) { |
4146 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); | 4146 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
4147 } | 4147 } |
4148 __ and_(scratch, Operand(esp, 0)); | 4148 __ and_(scratch, Operand(esp, 0)); |
4149 | 4149 |
4150 // Scale the index by multiplying by the entry size. | 4150 // Scale the index by multiplying by the entry size. |
4151 ASSERT(NameDictionary::kEntrySize == 3); | 4151 DCHECK(NameDictionary::kEntrySize == 3); |
4152 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 4152 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
4153 | 4153 |
4154 // Having undefined at this place means the name is not contained. | 4154 // Having undefined at this place means the name is not contained. |
4155 ASSERT_EQ(kSmiTagSize, 1); | 4155 DCHECK_EQ(kSmiTagSize, 1); |
4156 __ mov(scratch, Operand(dictionary_, | 4156 __ mov(scratch, Operand(dictionary_, |
4157 index_, | 4157 index_, |
4158 times_pointer_size, | 4158 times_pointer_size, |
4159 kElementsStartOffset - kHeapObjectTag)); | 4159 kElementsStartOffset - kHeapObjectTag)); |
4160 __ cmp(scratch, isolate()->factory()->undefined_value()); | 4160 __ cmp(scratch, isolate()->factory()->undefined_value()); |
4161 __ j(equal, ¬_in_dictionary); | 4161 __ j(equal, ¬_in_dictionary); |
4162 | 4162 |
4163 // Stop if found the property. | 4163 // Stop if found the property. |
4164 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); | 4164 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); |
4165 __ j(equal, &in_dictionary); | 4165 __ j(equal, &in_dictionary); |
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4520 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); | 4520 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); |
4521 __ push(eax); | 4521 __ push(eax); |
4522 | 4522 |
4523 // Retrieve our return address and use it to calculate the calling | 4523 // Retrieve our return address and use it to calculate the calling |
4524 // function's address. | 4524 // function's address. |
4525 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); | 4525 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); |
4526 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); | 4526 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); |
4527 __ push(eax); | 4527 __ push(eax); |
4528 | 4528 |
4529 // Call the entry hook. | 4529 // Call the entry hook. |
4530 ASSERT(isolate()->function_entry_hook() != NULL); | 4530 DCHECK(isolate()->function_entry_hook() != NULL); |
4531 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), | 4531 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), |
4532 RelocInfo::RUNTIME_ENTRY); | 4532 RelocInfo::RUNTIME_ENTRY); |
4533 __ add(esp, Immediate(2 * kPointerSize)); | 4533 __ add(esp, Immediate(2 * kPointerSize)); |
4534 | 4534 |
4535 // Restore ecx. | 4535 // Restore ecx. |
4536 __ pop(edx); | 4536 __ pop(edx); |
4537 __ pop(ecx); | 4537 __ pop(ecx); |
4538 __ pop(eax); | 4538 __ pop(eax); |
4539 | 4539 |
4540 __ ret(0); | 4540 __ ret(0); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4573 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, | 4573 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, |
4574 AllocationSiteOverrideMode mode) { | 4574 AllocationSiteOverrideMode mode) { |
4575 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) | 4575 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) |
4576 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) | 4576 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) |
4577 // eax - number of arguments | 4577 // eax - number of arguments |
4578 // edi - constructor? | 4578 // edi - constructor? |
4579 // esp[0] - return address | 4579 // esp[0] - return address |
4580 // esp[4] - last argument | 4580 // esp[4] - last argument |
4581 Label normal_sequence; | 4581 Label normal_sequence; |
4582 if (mode == DONT_OVERRIDE) { | 4582 if (mode == DONT_OVERRIDE) { |
4583 ASSERT(FAST_SMI_ELEMENTS == 0); | 4583 DCHECK(FAST_SMI_ELEMENTS == 0); |
4584 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 4584 DCHECK(FAST_HOLEY_SMI_ELEMENTS == 1); |
4585 ASSERT(FAST_ELEMENTS == 2); | 4585 DCHECK(FAST_ELEMENTS == 2); |
4586 ASSERT(FAST_HOLEY_ELEMENTS == 3); | 4586 DCHECK(FAST_HOLEY_ELEMENTS == 3); |
4587 ASSERT(FAST_DOUBLE_ELEMENTS == 4); | 4587 DCHECK(FAST_DOUBLE_ELEMENTS == 4); |
4588 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); | 4588 DCHECK(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
4589 | 4589 |
4590 // is the low bit set? If so, we are holey and that is good. | 4590 // is the low bit set? If so, we are holey and that is good. |
4591 __ test_b(edx, 1); | 4591 __ test_b(edx, 1); |
4592 __ j(not_zero, &normal_sequence); | 4592 __ j(not_zero, &normal_sequence); |
4593 } | 4593 } |
4594 | 4594 |
4595 // look at the first argument | 4595 // look at the first argument |
4596 __ mov(ecx, Operand(esp, kPointerSize)); | 4596 __ mov(ecx, Operand(esp, kPointerSize)); |
4597 __ test(ecx, ecx); | 4597 __ test(ecx, ecx); |
4598 __ j(zero, &normal_sequence); | 4598 __ j(zero, &normal_sequence); |
(...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5005 Operand(ebp, 7 * kPointerSize), | 5005 Operand(ebp, 7 * kPointerSize), |
5006 NULL); | 5006 NULL); |
5007 } | 5007 } |
5008 | 5008 |
5009 | 5009 |
5010 #undef __ | 5010 #undef __ |
5011 | 5011 |
5012 } } // namespace v8::internal | 5012 } } // namespace v8::internal |
5013 | 5013 |
5014 #endif // V8_TARGET_ARCH_IA32 | 5014 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |