OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X87 | 7 #if V8_TARGET_ARCH_X87 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
347 | 347 |
348 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { | 348 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { |
349 // Update the static counter each time a new code stub is generated. | 349 // Update the static counter each time a new code stub is generated. |
350 isolate()->counters()->code_stubs()->Increment(); | 350 isolate()->counters()->code_stubs()->Increment(); |
351 | 351 |
352 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); | 352 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); |
353 int param_count = descriptor->GetEnvironmentParameterCount(); | 353 int param_count = descriptor->GetEnvironmentParameterCount(); |
354 { | 354 { |
355 // Call the runtime system in a fresh internal frame. | 355 // Call the runtime system in a fresh internal frame. |
356 FrameScope scope(masm, StackFrame::INTERNAL); | 356 FrameScope scope(masm, StackFrame::INTERNAL); |
357 ASSERT(param_count == 0 || | 357 DCHECK(param_count == 0 || |
358 eax.is(descriptor->GetEnvironmentParameterRegister( | 358 eax.is(descriptor->GetEnvironmentParameterRegister( |
359 param_count - 1))); | 359 param_count - 1))); |
360 // Push arguments | 360 // Push arguments |
361 for (int i = 0; i < param_count; ++i) { | 361 for (int i = 0; i < param_count; ++i) { |
362 __ push(descriptor->GetEnvironmentParameterRegister(i)); | 362 __ push(descriptor->GetEnvironmentParameterRegister(i)); |
363 } | 363 } |
364 ExternalReference miss = descriptor->miss_handler(); | 364 ExternalReference miss = descriptor->miss_handler(); |
365 __ CallExternalReference(miss, param_count); | 365 __ CallExternalReference(miss, param_count); |
366 } | 366 } |
367 | 367 |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
406 // operands, jumps to the non_float label otherwise. | 406 // operands, jumps to the non_float label otherwise. |
407 static void CheckFloatOperands(MacroAssembler* masm, | 407 static void CheckFloatOperands(MacroAssembler* masm, |
408 Label* non_float, | 408 Label* non_float, |
409 Register scratch); | 409 Register scratch); |
410 }; | 410 }; |
411 | 411 |
412 | 412 |
413 void DoubleToIStub::Generate(MacroAssembler* masm) { | 413 void DoubleToIStub::Generate(MacroAssembler* masm) { |
414 Register input_reg = this->source(); | 414 Register input_reg = this->source(); |
415 Register final_result_reg = this->destination(); | 415 Register final_result_reg = this->destination(); |
416 ASSERT(is_truncating()); | 416 DCHECK(is_truncating()); |
417 | 417 |
418 Label check_negative, process_64_bits, done, done_no_stash; | 418 Label check_negative, process_64_bits, done, done_no_stash; |
419 | 419 |
420 int double_offset = offset(); | 420 int double_offset = offset(); |
421 | 421 |
422 // Account for return address and saved regs if input is esp. | 422 // Account for return address and saved regs if input is esp. |
423 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; | 423 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; |
424 | 424 |
425 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); | 425 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); |
426 MemOperand exponent_operand(MemOperand(input_reg, | 426 MemOperand exponent_operand(MemOperand(input_reg, |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
503 __ bind(&skip_mov); | 503 __ bind(&skip_mov); |
504 } | 504 } |
505 | 505 |
506 // Restore registers | 506 // Restore registers |
507 __ bind(&done); | 507 __ bind(&done); |
508 if (stash_exponent_copy) { | 508 if (stash_exponent_copy) { |
509 __ add(esp, Immediate(kDoubleSize / 2)); | 509 __ add(esp, Immediate(kDoubleSize / 2)); |
510 } | 510 } |
511 __ bind(&done_no_stash); | 511 __ bind(&done_no_stash); |
512 if (!final_result_reg.is(result_reg)) { | 512 if (!final_result_reg.is(result_reg)) { |
513 ASSERT(final_result_reg.is(ecx)); | 513 DCHECK(final_result_reg.is(ecx)); |
514 __ mov(final_result_reg, result_reg); | 514 __ mov(final_result_reg, result_reg); |
515 } | 515 } |
516 __ pop(save_reg); | 516 __ pop(save_reg); |
517 __ pop(scratch1); | 517 __ pop(scratch1); |
518 __ ret(0); | 518 __ ret(0); |
519 } | 519 } |
520 | 520 |
521 | 521 |
522 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, | 522 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, |
523 Register number) { | 523 Register number) { |
(...skipping 903 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1427 // (11) Sliced string. Replace subject with parent. Go to (5a). | 1427 // (11) Sliced string. Replace subject with parent. Go to (5a). |
1428 // Load offset into edi and replace subject string with parent. | 1428 // Load offset into edi and replace subject string with parent. |
1429 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); | 1429 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); |
1430 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); | 1430 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); |
1431 __ jmp(&check_underlying); // Go to (5a). | 1431 __ jmp(&check_underlying); // Go to (5a). |
1432 #endif // V8_INTERPRETED_REGEXP | 1432 #endif // V8_INTERPRETED_REGEXP |
1433 } | 1433 } |
1434 | 1434 |
1435 | 1435 |
1436 static int NegativeComparisonResult(Condition cc) { | 1436 static int NegativeComparisonResult(Condition cc) { |
1437 ASSERT(cc != equal); | 1437 DCHECK(cc != equal); |
1438 ASSERT((cc == less) || (cc == less_equal) | 1438 DCHECK((cc == less) || (cc == less_equal) |
1439 || (cc == greater) || (cc == greater_equal)); | 1439 || (cc == greater) || (cc == greater_equal)); |
1440 return (cc == greater || cc == greater_equal) ? LESS : GREATER; | 1440 return (cc == greater || cc == greater_equal) ? LESS : GREATER; |
1441 } | 1441 } |
1442 | 1442 |
1443 | 1443 |
1444 static void CheckInputType(MacroAssembler* masm, | 1444 static void CheckInputType(MacroAssembler* masm, |
1445 Register input, | 1445 Register input, |
1446 CompareIC::State expected, | 1446 CompareIC::State expected, |
1447 Label* fail) { | 1447 Label* fail) { |
1448 Label ok; | 1448 Label ok; |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1538 if (cc == equal && strict()) { | 1538 if (cc == equal && strict()) { |
1539 Label slow; // Fallthrough label. | 1539 Label slow; // Fallthrough label. |
1540 Label not_smis; | 1540 Label not_smis; |
1541 // If we're doing a strict equality comparison, we don't have to do | 1541 // If we're doing a strict equality comparison, we don't have to do |
1542 // type conversion, so we generate code to do fast comparison for objects | 1542 // type conversion, so we generate code to do fast comparison for objects |
1543 // and oddballs. Non-smi numbers and strings still go through the usual | 1543 // and oddballs. Non-smi numbers and strings still go through the usual |
1544 // slow-case code. | 1544 // slow-case code. |
1545 // If either is a Smi (we know that not both are), then they can only | 1545 // If either is a Smi (we know that not both are), then they can only |
1546 // be equal if the other is a HeapNumber. If so, use the slow case. | 1546 // be equal if the other is a HeapNumber. If so, use the slow case. |
1547 STATIC_ASSERT(kSmiTag == 0); | 1547 STATIC_ASSERT(kSmiTag == 0); |
1548 ASSERT_EQ(0, Smi::FromInt(0)); | 1548 DCHECK_EQ(0, Smi::FromInt(0)); |
1549 __ mov(ecx, Immediate(kSmiTagMask)); | 1549 __ mov(ecx, Immediate(kSmiTagMask)); |
1550 __ and_(ecx, eax); | 1550 __ and_(ecx, eax); |
1551 __ test(ecx, edx); | 1551 __ test(ecx, edx); |
1552 __ j(not_zero, ¬_smis, Label::kNear); | 1552 __ j(not_zero, ¬_smis, Label::kNear); |
1553 // One operand is a smi. | 1553 // One operand is a smi. |
1554 | 1554 |
1555 // Check whether the non-smi is a heap number. | 1555 // Check whether the non-smi is a heap number. |
1556 STATIC_ASSERT(kSmiTagMask == 1); | 1556 STATIC_ASSERT(kSmiTagMask == 1); |
1557 // ecx still holds eax & kSmiTag, which is either zero or one. | 1557 // ecx still holds eax & kSmiTag, which is either zero or one. |
1558 __ sub(ecx, Immediate(0x01)); | 1558 __ sub(ecx, Immediate(0x01)); |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1630 __ mov(eax, Immediate(Smi::FromInt(-1))); | 1630 __ mov(eax, Immediate(Smi::FromInt(-1))); |
1631 __ ret(0); | 1631 __ ret(0); |
1632 | 1632 |
1633 __ bind(&above_label); | 1633 __ bind(&above_label); |
1634 __ mov(eax, Immediate(Smi::FromInt(1))); | 1634 __ mov(eax, Immediate(Smi::FromInt(1))); |
1635 __ ret(0); | 1635 __ ret(0); |
1636 | 1636 |
1637 // If one of the numbers was NaN, then the result is always false. | 1637 // If one of the numbers was NaN, then the result is always false. |
1638 // The cc is never not-equal. | 1638 // The cc is never not-equal. |
1639 __ bind(&unordered); | 1639 __ bind(&unordered); |
1640 ASSERT(cc != not_equal); | 1640 DCHECK(cc != not_equal); |
1641 if (cc == less || cc == less_equal) { | 1641 if (cc == less || cc == less_equal) { |
1642 __ mov(eax, Immediate(Smi::FromInt(1))); | 1642 __ mov(eax, Immediate(Smi::FromInt(1))); |
1643 } else { | 1643 } else { |
1644 __ mov(eax, Immediate(Smi::FromInt(-1))); | 1644 __ mov(eax, Immediate(Smi::FromInt(-1))); |
1645 } | 1645 } |
1646 __ ret(0); | 1646 __ ret(0); |
1647 | 1647 |
1648 // The number comparison code did not provide a valid result. | 1648 // The number comparison code did not provide a valid result. |
1649 __ bind(&non_number_comparison); | 1649 __ bind(&non_number_comparison); |
1650 | 1650 |
(...skipping 776 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2427 // If call site patching is requested the stack will have the delta from the | 2427 // If call site patching is requested the stack will have the delta from the |
2428 // return address to the cmp instruction just below the return address. This | 2428 // return address to the cmp instruction just below the return address. This |
2429 // also means that call site patching can only take place with arguments in | 2429 // also means that call site patching can only take place with arguments in |
2430 // registers. TOS looks like this when call site patching is requested | 2430 // registers. TOS looks like this when call site patching is requested |
2431 // | 2431 // |
2432 // esp[0] : return address | 2432 // esp[0] : return address |
2433 // esp[4] : delta from return address to cmp instruction | 2433 // esp[4] : delta from return address to cmp instruction |
2434 // | 2434 // |
2435 void InstanceofStub::Generate(MacroAssembler* masm) { | 2435 void InstanceofStub::Generate(MacroAssembler* masm) { |
2436 // Call site inlining and patching implies arguments in registers. | 2436 // Call site inlining and patching implies arguments in registers. |
2437 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); | 2437 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
2438 | 2438 |
2439 // Fixed register usage throughout the stub. | 2439 // Fixed register usage throughout the stub. |
2440 Register object = eax; // Object (lhs). | 2440 Register object = eax; // Object (lhs). |
2441 Register map = ebx; // Map of the object. | 2441 Register map = ebx; // Map of the object. |
2442 Register function = edx; // Function (rhs). | 2442 Register function = edx; // Function (rhs). |
2443 Register prototype = edi; // Prototype of the function. | 2443 Register prototype = edi; // Prototype of the function. |
2444 Register scratch = ecx; | 2444 Register scratch = ecx; |
2445 | 2445 |
2446 // Constants describing the call site code to patch. | 2446 // Constants describing the call site code to patch. |
2447 static const int kDeltaToCmpImmediate = 2; | 2447 static const int kDeltaToCmpImmediate = 2; |
2448 static const int kDeltaToMov = 8; | 2448 static const int kDeltaToMov = 8; |
2449 static const int kDeltaToMovImmediate = 9; | 2449 static const int kDeltaToMovImmediate = 9; |
2450 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b); | 2450 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b); |
2451 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d); | 2451 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d); |
2452 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); | 2452 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); |
2453 | 2453 |
2454 ASSERT_EQ(object.code(), InstanceofStub::left().code()); | 2454 DCHECK_EQ(object.code(), InstanceofStub::left().code()); |
2455 ASSERT_EQ(function.code(), InstanceofStub::right().code()); | 2455 DCHECK_EQ(function.code(), InstanceofStub::right().code()); |
2456 | 2456 |
2457 // Get the object and function - they are always both needed. | 2457 // Get the object and function - they are always both needed. |
2458 Label slow, not_js_object; | 2458 Label slow, not_js_object; |
2459 if (!HasArgsInRegisters()) { | 2459 if (!HasArgsInRegisters()) { |
2460 __ mov(object, Operand(esp, 2 * kPointerSize)); | 2460 __ mov(object, Operand(esp, 2 * kPointerSize)); |
2461 __ mov(function, Operand(esp, 1 * kPointerSize)); | 2461 __ mov(function, Operand(esp, 1 * kPointerSize)); |
2462 } | 2462 } |
2463 | 2463 |
2464 // Check that the left hand is a JS object. | 2464 // Check that the left hand is a JS object. |
2465 __ JumpIfSmi(object, ¬_js_object); | 2465 __ JumpIfSmi(object, ¬_js_object); |
(...skipping 21 matching lines...) Expand all Loading... |
2487 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 2487 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
2488 | 2488 |
2489 // Update the global instanceof or call site inlined cache with the current | 2489 // Update the global instanceof or call site inlined cache with the current |
2490 // map and function. The cached answer will be set when it is known below. | 2490 // map and function. The cached answer will be set when it is known below. |
2491 if (!HasCallSiteInlineCheck()) { | 2491 if (!HasCallSiteInlineCheck()) { |
2492 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); | 2492 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex); |
2493 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); | 2493 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); |
2494 } else { | 2494 } else { |
2495 // The constants for the code patching are based on no push instructions | 2495 // The constants for the code patching are based on no push instructions |
2496 // at the call site. | 2496 // at the call site. |
2497 ASSERT(HasArgsInRegisters()); | 2497 DCHECK(HasArgsInRegisters()); |
2498 // Get return address and delta to inlined map check. | 2498 // Get return address and delta to inlined map check. |
2499 __ mov(scratch, Operand(esp, 0 * kPointerSize)); | 2499 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
2500 __ sub(scratch, Operand(esp, 1 * kPointerSize)); | 2500 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
2501 if (FLAG_debug_code) { | 2501 if (FLAG_debug_code) { |
2502 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1); | 2502 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1); |
2503 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1); | 2503 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1); |
2504 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2); | 2504 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2); |
2505 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2); | 2505 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2); |
2506 } | 2506 } |
2507 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate)); | 2507 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate)); |
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2692 __ CheckMap(index_, | 2692 __ CheckMap(index_, |
2693 masm->isolate()->factory()->heap_number_map(), | 2693 masm->isolate()->factory()->heap_number_map(), |
2694 index_not_number_, | 2694 index_not_number_, |
2695 DONT_DO_SMI_CHECK); | 2695 DONT_DO_SMI_CHECK); |
2696 call_helper.BeforeCall(masm); | 2696 call_helper.BeforeCall(masm); |
2697 __ push(object_); | 2697 __ push(object_); |
2698 __ push(index_); // Consumed by runtime conversion function. | 2698 __ push(index_); // Consumed by runtime conversion function. |
2699 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 2699 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
2700 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 2700 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
2701 } else { | 2701 } else { |
2702 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 2702 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
2703 // NumberToSmi discards numbers that are not exact integers. | 2703 // NumberToSmi discards numbers that are not exact integers. |
2704 __ CallRuntime(Runtime::kNumberToSmi, 1); | 2704 __ CallRuntime(Runtime::kNumberToSmi, 1); |
2705 } | 2705 } |
2706 if (!index_.is(eax)) { | 2706 if (!index_.is(eax)) { |
2707 // Save the conversion result before the pop instructions below | 2707 // Save the conversion result before the pop instructions below |
2708 // have a chance to overwrite it. | 2708 // have a chance to overwrite it. |
2709 __ mov(index_, eax); | 2709 __ mov(index_, eax); |
2710 } | 2710 } |
2711 __ pop(object_); | 2711 __ pop(object_); |
2712 // Reload the instance type. | 2712 // Reload the instance type. |
(...skipping 25 matching lines...) Expand all Loading... |
2738 } | 2738 } |
2739 | 2739 |
2740 | 2740 |
2741 // ------------------------------------------------------------------------- | 2741 // ------------------------------------------------------------------------- |
2742 // StringCharFromCodeGenerator | 2742 // StringCharFromCodeGenerator |
2743 | 2743 |
2744 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 2744 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
2745 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 2745 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
2746 STATIC_ASSERT(kSmiTag == 0); | 2746 STATIC_ASSERT(kSmiTag == 0); |
2747 STATIC_ASSERT(kSmiShiftSize == 0); | 2747 STATIC_ASSERT(kSmiShiftSize == 0); |
2748 ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1)); | 2748 DCHECK(IsPowerOf2(String::kMaxOneByteCharCode + 1)); |
2749 __ test(code_, | 2749 __ test(code_, |
2750 Immediate(kSmiTagMask | | 2750 Immediate(kSmiTagMask | |
2751 ((~String::kMaxOneByteCharCode) << kSmiTagSize))); | 2751 ((~String::kMaxOneByteCharCode) << kSmiTagSize))); |
2752 __ j(not_zero, &slow_case_); | 2752 __ j(not_zero, &slow_case_); |
2753 | 2753 |
2754 Factory* factory = masm->isolate()->factory(); | 2754 Factory* factory = masm->isolate()->factory(); |
2755 __ Move(result_, Immediate(factory->single_character_string_cache())); | 2755 __ Move(result_, Immediate(factory->single_character_string_cache())); |
2756 STATIC_ASSERT(kSmiTag == 0); | 2756 STATIC_ASSERT(kSmiTag == 0); |
2757 STATIC_ASSERT(kSmiTagSize == 1); | 2757 STATIC_ASSERT(kSmiTagSize == 1); |
2758 STATIC_ASSERT(kSmiShiftSize == 0); | 2758 STATIC_ASSERT(kSmiShiftSize == 0); |
(...skipping 25 matching lines...) Expand all Loading... |
2784 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 2784 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
2785 } | 2785 } |
2786 | 2786 |
2787 | 2787 |
2788 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | 2788 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, |
2789 Register dest, | 2789 Register dest, |
2790 Register src, | 2790 Register src, |
2791 Register count, | 2791 Register count, |
2792 Register scratch, | 2792 Register scratch, |
2793 String::Encoding encoding) { | 2793 String::Encoding encoding) { |
2794 ASSERT(!scratch.is(dest)); | 2794 DCHECK(!scratch.is(dest)); |
2795 ASSERT(!scratch.is(src)); | 2795 DCHECK(!scratch.is(src)); |
2796 ASSERT(!scratch.is(count)); | 2796 DCHECK(!scratch.is(count)); |
2797 | 2797 |
2798 // Nothing to do for zero characters. | 2798 // Nothing to do for zero characters. |
2799 Label done; | 2799 Label done; |
2800 __ test(count, count); | 2800 __ test(count, count); |
2801 __ j(zero, &done); | 2801 __ j(zero, &done); |
2802 | 2802 |
2803 // Make count the number of bytes to copy. | 2803 // Make count the number of bytes to copy. |
2804 if (encoding == String::TWO_BYTE_ENCODING) { | 2804 if (encoding == String::TWO_BYTE_ENCODING) { |
2805 __ shl(count, 1); | 2805 __ shl(count, 1); |
2806 } | 2806 } |
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3300 } | 3300 } |
3301 | 3301 |
3302 // Tail call into the stub that handles binary operations with allocation | 3302 // Tail call into the stub that handles binary operations with allocation |
3303 // sites. | 3303 // sites. |
3304 BinaryOpWithAllocationSiteStub stub(isolate(), state_); | 3304 BinaryOpWithAllocationSiteStub stub(isolate(), state_); |
3305 __ TailCallStub(&stub); | 3305 __ TailCallStub(&stub); |
3306 } | 3306 } |
3307 | 3307 |
3308 | 3308 |
3309 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 3309 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
3310 ASSERT(state_ == CompareIC::SMI); | 3310 DCHECK(state_ == CompareIC::SMI); |
3311 Label miss; | 3311 Label miss; |
3312 __ mov(ecx, edx); | 3312 __ mov(ecx, edx); |
3313 __ or_(ecx, eax); | 3313 __ or_(ecx, eax); |
3314 __ JumpIfNotSmi(ecx, &miss, Label::kNear); | 3314 __ JumpIfNotSmi(ecx, &miss, Label::kNear); |
3315 | 3315 |
3316 if (GetCondition() == equal) { | 3316 if (GetCondition() == equal) { |
3317 // For equality we do not care about the sign of the result. | 3317 // For equality we do not care about the sign of the result. |
3318 __ sub(eax, edx); | 3318 __ sub(eax, edx); |
3319 } else { | 3319 } else { |
3320 Label done; | 3320 Label done; |
3321 __ sub(edx, eax); | 3321 __ sub(edx, eax); |
3322 __ j(no_overflow, &done, Label::kNear); | 3322 __ j(no_overflow, &done, Label::kNear); |
3323 // Correct sign of result in case of overflow. | 3323 // Correct sign of result in case of overflow. |
3324 __ not_(edx); | 3324 __ not_(edx); |
3325 __ bind(&done); | 3325 __ bind(&done); |
3326 __ mov(eax, edx); | 3326 __ mov(eax, edx); |
3327 } | 3327 } |
3328 __ ret(0); | 3328 __ ret(0); |
3329 | 3329 |
3330 __ bind(&miss); | 3330 __ bind(&miss); |
3331 GenerateMiss(masm); | 3331 GenerateMiss(masm); |
3332 } | 3332 } |
3333 | 3333 |
3334 | 3334 |
3335 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { | 3335 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { |
3336 ASSERT(state_ == CompareIC::NUMBER); | 3336 DCHECK(state_ == CompareIC::NUMBER); |
3337 | 3337 |
3338 Label generic_stub; | 3338 Label generic_stub; |
3339 Label unordered, maybe_undefined1, maybe_undefined2; | 3339 Label unordered, maybe_undefined1, maybe_undefined2; |
3340 Label miss; | 3340 Label miss; |
3341 | 3341 |
3342 if (left_ == CompareIC::SMI) { | 3342 if (left_ == CompareIC::SMI) { |
3343 __ JumpIfNotSmi(edx, &miss); | 3343 __ JumpIfNotSmi(edx, &miss); |
3344 } | 3344 } |
3345 if (right_ == CompareIC::SMI) { | 3345 if (right_ == CompareIC::SMI) { |
3346 __ JumpIfNotSmi(eax, &miss); | 3346 __ JumpIfNotSmi(eax, &miss); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3380 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); | 3380 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); |
3381 __ j(equal, &unordered); | 3381 __ j(equal, &unordered); |
3382 } | 3382 } |
3383 | 3383 |
3384 __ bind(&miss); | 3384 __ bind(&miss); |
3385 GenerateMiss(masm); | 3385 GenerateMiss(masm); |
3386 } | 3386 } |
3387 | 3387 |
3388 | 3388 |
3389 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { | 3389 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { |
3390 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); | 3390 DCHECK(state_ == CompareIC::INTERNALIZED_STRING); |
3391 ASSERT(GetCondition() == equal); | 3391 DCHECK(GetCondition() == equal); |
3392 | 3392 |
3393 // Registers containing left and right operands respectively. | 3393 // Registers containing left and right operands respectively. |
3394 Register left = edx; | 3394 Register left = edx; |
3395 Register right = eax; | 3395 Register right = eax; |
3396 Register tmp1 = ecx; | 3396 Register tmp1 = ecx; |
3397 Register tmp2 = ebx; | 3397 Register tmp2 = ebx; |
3398 | 3398 |
3399 // Check that both operands are heap objects. | 3399 // Check that both operands are heap objects. |
3400 Label miss; | 3400 Label miss; |
3401 __ mov(tmp1, left); | 3401 __ mov(tmp1, left); |
3402 STATIC_ASSERT(kSmiTag == 0); | 3402 STATIC_ASSERT(kSmiTag == 0); |
3403 __ and_(tmp1, right); | 3403 __ and_(tmp1, right); |
3404 __ JumpIfSmi(tmp1, &miss, Label::kNear); | 3404 __ JumpIfSmi(tmp1, &miss, Label::kNear); |
3405 | 3405 |
3406 // Check that both operands are internalized strings. | 3406 // Check that both operands are internalized strings. |
3407 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 3407 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
3408 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 3408 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
3409 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 3409 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
3410 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 3410 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
3411 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); | 3411 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
3412 __ or_(tmp1, tmp2); | 3412 __ or_(tmp1, tmp2); |
3413 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); | 3413 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
3414 __ j(not_zero, &miss, Label::kNear); | 3414 __ j(not_zero, &miss, Label::kNear); |
3415 | 3415 |
3416 // Internalized strings are compared by identity. | 3416 // Internalized strings are compared by identity. |
3417 Label done; | 3417 Label done; |
3418 __ cmp(left, right); | 3418 __ cmp(left, right); |
3419 // Make sure eax is non-zero. At this point input operands are | 3419 // Make sure eax is non-zero. At this point input operands are |
3420 // guaranteed to be non-zero. | 3420 // guaranteed to be non-zero. |
3421 ASSERT(right.is(eax)); | 3421 DCHECK(right.is(eax)); |
3422 __ j(not_equal, &done, Label::kNear); | 3422 __ j(not_equal, &done, Label::kNear); |
3423 STATIC_ASSERT(EQUAL == 0); | 3423 STATIC_ASSERT(EQUAL == 0); |
3424 STATIC_ASSERT(kSmiTag == 0); | 3424 STATIC_ASSERT(kSmiTag == 0); |
3425 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); | 3425 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); |
3426 __ bind(&done); | 3426 __ bind(&done); |
3427 __ ret(0); | 3427 __ ret(0); |
3428 | 3428 |
3429 __ bind(&miss); | 3429 __ bind(&miss); |
3430 GenerateMiss(masm); | 3430 GenerateMiss(masm); |
3431 } | 3431 } |
3432 | 3432 |
3433 | 3433 |
3434 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) { | 3434 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) { |
3435 ASSERT(state_ == CompareIC::UNIQUE_NAME); | 3435 DCHECK(state_ == CompareIC::UNIQUE_NAME); |
3436 ASSERT(GetCondition() == equal); | 3436 DCHECK(GetCondition() == equal); |
3437 | 3437 |
3438 // Registers containing left and right operands respectively. | 3438 // Registers containing left and right operands respectively. |
3439 Register left = edx; | 3439 Register left = edx; |
3440 Register right = eax; | 3440 Register right = eax; |
3441 Register tmp1 = ecx; | 3441 Register tmp1 = ecx; |
3442 Register tmp2 = ebx; | 3442 Register tmp2 = ebx; |
3443 | 3443 |
3444 // Check that both operands are heap objects. | 3444 // Check that both operands are heap objects. |
3445 Label miss; | 3445 Label miss; |
3446 __ mov(tmp1, left); | 3446 __ mov(tmp1, left); |
3447 STATIC_ASSERT(kSmiTag == 0); | 3447 STATIC_ASSERT(kSmiTag == 0); |
3448 __ and_(tmp1, right); | 3448 __ and_(tmp1, right); |
3449 __ JumpIfSmi(tmp1, &miss, Label::kNear); | 3449 __ JumpIfSmi(tmp1, &miss, Label::kNear); |
3450 | 3450 |
3451 // Check that both operands are unique names. This leaves the instance | 3451 // Check that both operands are unique names. This leaves the instance |
3452 // types loaded in tmp1 and tmp2. | 3452 // types loaded in tmp1 and tmp2. |
3453 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 3453 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
3454 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 3454 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
3455 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 3455 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
3456 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 3456 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
3457 | 3457 |
3458 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); | 3458 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); |
3459 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); | 3459 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); |
3460 | 3460 |
3461 // Unique names are compared by identity. | 3461 // Unique names are compared by identity. |
3462 Label done; | 3462 Label done; |
3463 __ cmp(left, right); | 3463 __ cmp(left, right); |
3464 // Make sure eax is non-zero. At this point input operands are | 3464 // Make sure eax is non-zero. At this point input operands are |
3465 // guaranteed to be non-zero. | 3465 // guaranteed to be non-zero. |
3466 ASSERT(right.is(eax)); | 3466 DCHECK(right.is(eax)); |
3467 __ j(not_equal, &done, Label::kNear); | 3467 __ j(not_equal, &done, Label::kNear); |
3468 STATIC_ASSERT(EQUAL == 0); | 3468 STATIC_ASSERT(EQUAL == 0); |
3469 STATIC_ASSERT(kSmiTag == 0); | 3469 STATIC_ASSERT(kSmiTag == 0); |
3470 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); | 3470 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); |
3471 __ bind(&done); | 3471 __ bind(&done); |
3472 __ ret(0); | 3472 __ ret(0); |
3473 | 3473 |
3474 __ bind(&miss); | 3474 __ bind(&miss); |
3475 GenerateMiss(masm); | 3475 GenerateMiss(masm); |
3476 } | 3476 } |
3477 | 3477 |
3478 | 3478 |
3479 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { | 3479 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
3480 ASSERT(state_ == CompareIC::STRING); | 3480 DCHECK(state_ == CompareIC::STRING); |
3481 Label miss; | 3481 Label miss; |
3482 | 3482 |
3483 bool equality = Token::IsEqualityOp(op_); | 3483 bool equality = Token::IsEqualityOp(op_); |
3484 | 3484 |
3485 // Registers containing left and right operands respectively. | 3485 // Registers containing left and right operands respectively. |
3486 Register left = edx; | 3486 Register left = edx; |
3487 Register right = eax; | 3487 Register right = eax; |
3488 Register tmp1 = ecx; | 3488 Register tmp1 = ecx; |
3489 Register tmp2 = ebx; | 3489 Register tmp2 = ebx; |
3490 Register tmp3 = edi; | 3490 Register tmp3 = edi; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3524 // non-equality compare, we still need to determine the order. We | 3524 // non-equality compare, we still need to determine the order. We |
3525 // also know they are both strings. | 3525 // also know they are both strings. |
3526 if (equality) { | 3526 if (equality) { |
3527 Label do_compare; | 3527 Label do_compare; |
3528 STATIC_ASSERT(kInternalizedTag == 0); | 3528 STATIC_ASSERT(kInternalizedTag == 0); |
3529 __ or_(tmp1, tmp2); | 3529 __ or_(tmp1, tmp2); |
3530 __ test(tmp1, Immediate(kIsNotInternalizedMask)); | 3530 __ test(tmp1, Immediate(kIsNotInternalizedMask)); |
3531 __ j(not_zero, &do_compare, Label::kNear); | 3531 __ j(not_zero, &do_compare, Label::kNear); |
3532 // Make sure eax is non-zero. At this point input operands are | 3532 // Make sure eax is non-zero. At this point input operands are |
3533 // guaranteed to be non-zero. | 3533 // guaranteed to be non-zero. |
3534 ASSERT(right.is(eax)); | 3534 DCHECK(right.is(eax)); |
3535 __ ret(0); | 3535 __ ret(0); |
3536 __ bind(&do_compare); | 3536 __ bind(&do_compare); |
3537 } | 3537 } |
3538 | 3538 |
3539 // Check that both strings are sequential ASCII. | 3539 // Check that both strings are sequential ASCII. |
3540 Label runtime; | 3540 Label runtime; |
3541 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); | 3541 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); |
3542 | 3542 |
3543 // Compare flat ASCII strings. Returns when done. | 3543 // Compare flat ASCII strings. Returns when done. |
3544 if (equality) { | 3544 if (equality) { |
(...skipping 15 matching lines...) Expand all Loading... |
3560 } else { | 3560 } else { |
3561 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 3561 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
3562 } | 3562 } |
3563 | 3563 |
3564 __ bind(&miss); | 3564 __ bind(&miss); |
3565 GenerateMiss(masm); | 3565 GenerateMiss(masm); |
3566 } | 3566 } |
3567 | 3567 |
3568 | 3568 |
3569 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 3569 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
3570 ASSERT(state_ == CompareIC::OBJECT); | 3570 DCHECK(state_ == CompareIC::OBJECT); |
3571 Label miss; | 3571 Label miss; |
3572 __ mov(ecx, edx); | 3572 __ mov(ecx, edx); |
3573 __ and_(ecx, eax); | 3573 __ and_(ecx, eax); |
3574 __ JumpIfSmi(ecx, &miss, Label::kNear); | 3574 __ JumpIfSmi(ecx, &miss, Label::kNear); |
3575 | 3575 |
3576 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); | 3576 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); |
3577 __ j(not_equal, &miss, Label::kNear); | 3577 __ j(not_equal, &miss, Label::kNear); |
3578 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); | 3578 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); |
3579 __ j(not_equal, &miss, Label::kNear); | 3579 __ j(not_equal, &miss, Label::kNear); |
3580 | 3580 |
3581 ASSERT(GetCondition() == equal); | 3581 DCHECK(GetCondition() == equal); |
3582 __ sub(eax, edx); | 3582 __ sub(eax, edx); |
3583 __ ret(0); | 3583 __ ret(0); |
3584 | 3584 |
3585 __ bind(&miss); | 3585 __ bind(&miss); |
3586 GenerateMiss(masm); | 3586 GenerateMiss(masm); |
3587 } | 3587 } |
3588 | 3588 |
3589 | 3589 |
3590 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { | 3590 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { |
3591 Label miss; | 3591 Label miss; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3635 // the property. This function may return false negatives, so miss_label | 3635 // the property. This function may return false negatives, so miss_label |
3636 // must always call a backup property check that is complete. | 3636 // must always call a backup property check that is complete. |
3637 // This function is safe to call if the receiver has fast properties. | 3637 // This function is safe to call if the receiver has fast properties. |
3638 // Name must be a unique name and receiver must be a heap object. | 3638 // Name must be a unique name and receiver must be a heap object. |
3639 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, | 3639 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
3640 Label* miss, | 3640 Label* miss, |
3641 Label* done, | 3641 Label* done, |
3642 Register properties, | 3642 Register properties, |
3643 Handle<Name> name, | 3643 Handle<Name> name, |
3644 Register r0) { | 3644 Register r0) { |
3645 ASSERT(name->IsUniqueName()); | 3645 DCHECK(name->IsUniqueName()); |
3646 | 3646 |
3647 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 3647 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
3648 // not equal to the name and kProbes-th slot is not used (its name is the | 3648 // not equal to the name and kProbes-th slot is not used (its name is the |
3649 // undefined value), it guarantees the hash table doesn't contain the | 3649 // undefined value), it guarantees the hash table doesn't contain the |
3650 // property. It's true even if some slots represent deleted properties | 3650 // property. It's true even if some slots represent deleted properties |
3651 // (their names are the hole value). | 3651 // (their names are the hole value). |
3652 for (int i = 0; i < kInlinedProbes; i++) { | 3652 for (int i = 0; i < kInlinedProbes; i++) { |
3653 // Compute the masked index: (hash + i + i * i) & mask. | 3653 // Compute the masked index: (hash + i + i * i) & mask. |
3654 Register index = r0; | 3654 Register index = r0; |
3655 // Capacity is smi 2^n. | 3655 // Capacity is smi 2^n. |
3656 __ mov(index, FieldOperand(properties, kCapacityOffset)); | 3656 __ mov(index, FieldOperand(properties, kCapacityOffset)); |
3657 __ dec(index); | 3657 __ dec(index); |
3658 __ and_(index, | 3658 __ and_(index, |
3659 Immediate(Smi::FromInt(name->Hash() + | 3659 Immediate(Smi::FromInt(name->Hash() + |
3660 NameDictionary::GetProbeOffset(i)))); | 3660 NameDictionary::GetProbeOffset(i)))); |
3661 | 3661 |
3662 // Scale the index by multiplying by the entry size. | 3662 // Scale the index by multiplying by the entry size. |
3663 ASSERT(NameDictionary::kEntrySize == 3); | 3663 DCHECK(NameDictionary::kEntrySize == 3); |
3664 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. | 3664 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. |
3665 Register entity_name = r0; | 3665 Register entity_name = r0; |
3666 // Having undefined at this place means the name is not contained. | 3666 // Having undefined at this place means the name is not contained. |
3667 ASSERT_EQ(kSmiTagSize, 1); | 3667 DCHECK_EQ(kSmiTagSize, 1); |
3668 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, | 3668 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, |
3669 kElementsStartOffset - kHeapObjectTag)); | 3669 kElementsStartOffset - kHeapObjectTag)); |
3670 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); | 3670 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); |
3671 __ j(equal, done); | 3671 __ j(equal, done); |
3672 | 3672 |
3673 // Stop if found the property. | 3673 // Stop if found the property. |
3674 __ cmp(entity_name, Handle<Name>(name)); | 3674 __ cmp(entity_name, Handle<Name>(name)); |
3675 __ j(equal, miss); | 3675 __ j(equal, miss); |
3676 | 3676 |
3677 Label good; | 3677 Label good; |
(...skipping 23 matching lines...) Expand all Loading... |
3701 // |done| label if a property with the given name is found leaving the | 3701 // |done| label if a property with the given name is found leaving the |
3702 // index into the dictionary in |r0|. Jump to the |miss| label | 3702 // index into the dictionary in |r0|. Jump to the |miss| label |
3703 // otherwise. | 3703 // otherwise. |
3704 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, | 3704 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
3705 Label* miss, | 3705 Label* miss, |
3706 Label* done, | 3706 Label* done, |
3707 Register elements, | 3707 Register elements, |
3708 Register name, | 3708 Register name, |
3709 Register r0, | 3709 Register r0, |
3710 Register r1) { | 3710 Register r1) { |
3711 ASSERT(!elements.is(r0)); | 3711 DCHECK(!elements.is(r0)); |
3712 ASSERT(!elements.is(r1)); | 3712 DCHECK(!elements.is(r1)); |
3713 ASSERT(!name.is(r0)); | 3713 DCHECK(!name.is(r0)); |
3714 ASSERT(!name.is(r1)); | 3714 DCHECK(!name.is(r1)); |
3715 | 3715 |
3716 __ AssertName(name); | 3716 __ AssertName(name); |
3717 | 3717 |
3718 __ mov(r1, FieldOperand(elements, kCapacityOffset)); | 3718 __ mov(r1, FieldOperand(elements, kCapacityOffset)); |
3719 __ shr(r1, kSmiTagSize); // convert smi to int | 3719 __ shr(r1, kSmiTagSize); // convert smi to int |
3720 __ dec(r1); | 3720 __ dec(r1); |
3721 | 3721 |
3722 // Generate an unrolled loop that performs a few probes before | 3722 // Generate an unrolled loop that performs a few probes before |
3723 // giving up. Measurements done on Gmail indicate that 2 probes | 3723 // giving up. Measurements done on Gmail indicate that 2 probes |
3724 // cover ~93% of loads from dictionaries. | 3724 // cover ~93% of loads from dictionaries. |
3725 for (int i = 0; i < kInlinedProbes; i++) { | 3725 for (int i = 0; i < kInlinedProbes; i++) { |
3726 // Compute the masked index: (hash + i + i * i) & mask. | 3726 // Compute the masked index: (hash + i + i * i) & mask. |
3727 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); | 3727 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); |
3728 __ shr(r0, Name::kHashShift); | 3728 __ shr(r0, Name::kHashShift); |
3729 if (i > 0) { | 3729 if (i > 0) { |
3730 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i))); | 3730 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i))); |
3731 } | 3731 } |
3732 __ and_(r0, r1); | 3732 __ and_(r0, r1); |
3733 | 3733 |
3734 // Scale the index by multiplying by the entry size. | 3734 // Scale the index by multiplying by the entry size. |
3735 ASSERT(NameDictionary::kEntrySize == 3); | 3735 DCHECK(NameDictionary::kEntrySize == 3); |
3736 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 | 3736 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 |
3737 | 3737 |
3738 // Check if the key is identical to the name. | 3738 // Check if the key is identical to the name. |
3739 __ cmp(name, Operand(elements, | 3739 __ cmp(name, Operand(elements, |
3740 r0, | 3740 r0, |
3741 times_4, | 3741 times_4, |
3742 kElementsStartOffset - kHeapObjectTag)); | 3742 kElementsStartOffset - kHeapObjectTag)); |
3743 __ j(equal, done); | 3743 __ j(equal, done); |
3744 } | 3744 } |
3745 | 3745 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3788 // (their names are the null value). | 3788 // (their names are the null value). |
3789 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 3789 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
3790 // Compute the masked index: (hash + i + i * i) & mask. | 3790 // Compute the masked index: (hash + i + i * i) & mask. |
3791 __ mov(scratch, Operand(esp, 2 * kPointerSize)); | 3791 __ mov(scratch, Operand(esp, 2 * kPointerSize)); |
3792 if (i > 0) { | 3792 if (i > 0) { |
3793 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); | 3793 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
3794 } | 3794 } |
3795 __ and_(scratch, Operand(esp, 0)); | 3795 __ and_(scratch, Operand(esp, 0)); |
3796 | 3796 |
3797 // Scale the index by multiplying by the entry size. | 3797 // Scale the index by multiplying by the entry size. |
3798 ASSERT(NameDictionary::kEntrySize == 3); | 3798 DCHECK(NameDictionary::kEntrySize == 3); |
3799 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 3799 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
3800 | 3800 |
3801 // Having undefined at this place means the name is not contained. | 3801 // Having undefined at this place means the name is not contained. |
3802 ASSERT_EQ(kSmiTagSize, 1); | 3802 DCHECK_EQ(kSmiTagSize, 1); |
3803 __ mov(scratch, Operand(dictionary_, | 3803 __ mov(scratch, Operand(dictionary_, |
3804 index_, | 3804 index_, |
3805 times_pointer_size, | 3805 times_pointer_size, |
3806 kElementsStartOffset - kHeapObjectTag)); | 3806 kElementsStartOffset - kHeapObjectTag)); |
3807 __ cmp(scratch, isolate()->factory()->undefined_value()); | 3807 __ cmp(scratch, isolate()->factory()->undefined_value()); |
3808 __ j(equal, ¬_in_dictionary); | 3808 __ j(equal, ¬_in_dictionary); |
3809 | 3809 |
3810 // Stop if found the property. | 3810 // Stop if found the property. |
3811 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); | 3811 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); |
3812 __ j(equal, &in_dictionary); | 3812 __ j(equal, &in_dictionary); |
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4160 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); | 4160 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); |
4161 __ push(eax); | 4161 __ push(eax); |
4162 | 4162 |
4163 // Retrieve our return address and use it to calculate the calling | 4163 // Retrieve our return address and use it to calculate the calling |
4164 // function's address. | 4164 // function's address. |
4165 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); | 4165 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); |
4166 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); | 4166 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); |
4167 __ push(eax); | 4167 __ push(eax); |
4168 | 4168 |
4169 // Call the entry hook. | 4169 // Call the entry hook. |
4170 ASSERT(isolate()->function_entry_hook() != NULL); | 4170 DCHECK(isolate()->function_entry_hook() != NULL); |
4171 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), | 4171 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), |
4172 RelocInfo::RUNTIME_ENTRY); | 4172 RelocInfo::RUNTIME_ENTRY); |
4173 __ add(esp, Immediate(2 * kPointerSize)); | 4173 __ add(esp, Immediate(2 * kPointerSize)); |
4174 | 4174 |
4175 // Restore ecx. | 4175 // Restore ecx. |
4176 __ pop(edx); | 4176 __ pop(edx); |
4177 __ pop(ecx); | 4177 __ pop(ecx); |
4178 __ pop(eax); | 4178 __ pop(eax); |
4179 | 4179 |
4180 __ ret(0); | 4180 __ ret(0); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4213 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, | 4213 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, |
4214 AllocationSiteOverrideMode mode) { | 4214 AllocationSiteOverrideMode mode) { |
4215 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) | 4215 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) |
4216 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) | 4216 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) |
4217 // eax - number of arguments | 4217 // eax - number of arguments |
4218 // edi - constructor? | 4218 // edi - constructor? |
4219 // esp[0] - return address | 4219 // esp[0] - return address |
4220 // esp[4] - last argument | 4220 // esp[4] - last argument |
4221 Label normal_sequence; | 4221 Label normal_sequence; |
4222 if (mode == DONT_OVERRIDE) { | 4222 if (mode == DONT_OVERRIDE) { |
4223 ASSERT(FAST_SMI_ELEMENTS == 0); | 4223 DCHECK(FAST_SMI_ELEMENTS == 0); |
4224 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 4224 DCHECK(FAST_HOLEY_SMI_ELEMENTS == 1); |
4225 ASSERT(FAST_ELEMENTS == 2); | 4225 DCHECK(FAST_ELEMENTS == 2); |
4226 ASSERT(FAST_HOLEY_ELEMENTS == 3); | 4226 DCHECK(FAST_HOLEY_ELEMENTS == 3); |
4227 ASSERT(FAST_DOUBLE_ELEMENTS == 4); | 4227 DCHECK(FAST_DOUBLE_ELEMENTS == 4); |
4228 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); | 4228 DCHECK(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
4229 | 4229 |
4230 // is the low bit set? If so, we are holey and that is good. | 4230 // is the low bit set? If so, we are holey and that is good. |
4231 __ test_b(edx, 1); | 4231 __ test_b(edx, 1); |
4232 __ j(not_zero, &normal_sequence); | 4232 __ j(not_zero, &normal_sequence); |
4233 } | 4233 } |
4234 | 4234 |
4235 // look at the first argument | 4235 // look at the first argument |
4236 __ mov(ecx, Operand(esp, kPointerSize)); | 4236 __ mov(ecx, Operand(esp, kPointerSize)); |
4237 __ test(ecx, ecx); | 4237 __ test(ecx, ecx); |
4238 __ j(zero, &normal_sequence); | 4238 __ j(zero, &normal_sequence); |
(...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4645 Operand(ebp, 7 * kPointerSize), | 4645 Operand(ebp, 7 * kPointerSize), |
4646 NULL); | 4646 NULL); |
4647 } | 4647 } |
4648 | 4648 |
4649 | 4649 |
4650 #undef __ | 4650 #undef __ |
4651 | 4651 |
4652 } } // namespace v8::internal | 4652 } } // namespace v8::internal |
4653 | 4653 |
4654 #endif // V8_TARGET_ARCH_X87 | 4654 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |