OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
542 if (destination == kCoreRegisters) { | 542 if (destination == kCoreRegisters) { |
543 __ vmov(dst1, dst2, double_dst); | 543 __ vmov(dst1, dst2, double_dst); |
544 } | 544 } |
545 } else { | 545 } else { |
546 Label fewer_than_20_useful_bits; | 546 Label fewer_than_20_useful_bits; |
547 // Expected output: | 547 // Expected output: |
548 // | dst2 | dst1 | | 548 // | dst2 | dst1 | |
549 // | s | exp | mantissa | | 549 // | s | exp | mantissa | |
550 | 550 |
551 // Check for zero. | 551 // Check for zero. |
552 __ cmp(int_scratch, Operand(0)); | 552 __ cmp(int_scratch, Operand::Zero()); |
553 __ mov(dst2, int_scratch); | 553 __ mov(dst2, int_scratch); |
554 __ mov(dst1, int_scratch); | 554 __ mov(dst1, int_scratch); |
555 __ b(eq, &done); | 555 __ b(eq, &done); |
556 | 556 |
557 // Preload the sign of the value. | 557 // Preload the sign of the value. |
558 __ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC); | 558 __ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC); |
559 // Get the absolute value of the object (as an unsigned integer). | 559 // Get the absolute value of the object (as an unsigned integer). |
560 __ rsb(int_scratch, int_scratch, Operand(0), SetCC, mi); | 560 __ rsb(int_scratch, int_scratch, Operand::Zero(), SetCC, mi); |
561 | 561 |
562 // Get mantisssa[51:20]. | 562 // Get mantisssa[51:20]. |
563 | 563 |
564 // Get the position of the first set bit. | 564 // Get the position of the first set bit. |
565 __ CountLeadingZeros(dst1, int_scratch, scratch2); | 565 __ CountLeadingZeros(dst1, int_scratch, scratch2); |
566 __ rsb(dst1, dst1, Operand(31)); | 566 __ rsb(dst1, dst1, Operand(31)); |
567 | 567 |
568 // Set the exponent. | 568 // Set the exponent. |
569 __ add(scratch2, dst1, Operand(HeapNumber::kExponentBias)); | 569 __ add(scratch2, dst1, Operand(HeapNumber::kExponentBias)); |
570 __ Bfi(dst2, scratch2, scratch2, | 570 __ Bfi(dst2, scratch2, scratch2, |
(...skipping 11 matching lines...) Expand all Loading... |
582 __ orr(dst2, dst2, Operand(int_scratch, LSR, scratch2)); | 582 __ orr(dst2, dst2, Operand(int_scratch, LSR, scratch2)); |
583 __ rsb(scratch2, scratch2, Operand(32)); | 583 __ rsb(scratch2, scratch2, Operand(32)); |
584 __ mov(dst1, Operand(int_scratch, LSL, scratch2)); | 584 __ mov(dst1, Operand(int_scratch, LSL, scratch2)); |
585 __ b(&done); | 585 __ b(&done); |
586 | 586 |
587 __ bind(&fewer_than_20_useful_bits); | 587 __ bind(&fewer_than_20_useful_bits); |
588 __ rsb(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord)); | 588 __ rsb(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord)); |
589 __ mov(scratch2, Operand(int_scratch, LSL, scratch2)); | 589 __ mov(scratch2, Operand(int_scratch, LSL, scratch2)); |
590 __ orr(dst2, dst2, scratch2); | 590 __ orr(dst2, dst2, scratch2); |
591 // Set dst1 to 0. | 591 // Set dst1 to 0. |
592 __ mov(dst1, Operand(0)); | 592 __ mov(dst1, Operand::Zero()); |
593 } | 593 } |
594 __ bind(&done); | 594 __ bind(&done); |
595 } | 595 } |
596 | 596 |
597 | 597 |
598 void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm, | 598 void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm, |
599 Register object, | 599 Register object, |
600 Destination destination, | 600 Destination destination, |
601 DwVfpRegister double_dst, | 601 DwVfpRegister double_dst, |
602 Register dst1, | 602 Register dst1, |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
650 } | 650 } |
651 | 651 |
652 } else { | 652 } else { |
653 ASSERT(!scratch1.is(object) && !scratch2.is(object)); | 653 ASSERT(!scratch1.is(object) && !scratch2.is(object)); |
654 // Load the double value in the destination registers.. | 654 // Load the double value in the destination registers.. |
655 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); | 655 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); |
656 | 656 |
657 // Check for 0 and -0. | 657 // Check for 0 and -0. |
658 __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask)); | 658 __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask)); |
659 __ orr(scratch1, scratch1, Operand(dst2)); | 659 __ orr(scratch1, scratch1, Operand(dst2)); |
660 __ cmp(scratch1, Operand(0)); | 660 __ cmp(scratch1, Operand::Zero()); |
661 __ b(eq, &done); | 661 __ b(eq, &done); |
662 | 662 |
663 // Check that the value can be exactly represented by a 32-bit integer. | 663 // Check that the value can be exactly represented by a 32-bit integer. |
664 // Jump to not_int32 if that's not the case. | 664 // Jump to not_int32 if that's not the case. |
665 DoubleIs32BitInteger(masm, dst1, dst2, scratch1, scratch2, not_int32); | 665 DoubleIs32BitInteger(masm, dst1, dst2, scratch1, scratch2, not_int32); |
666 | 666 |
667 // dst1 and dst2 were trashed. Reload the double value. | 667 // dst1 and dst2 were trashed. Reload the double value. |
668 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); | 668 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); |
669 } | 669 } |
670 | 670 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
723 __ vmov(dst, single_scratch); | 723 __ vmov(dst, single_scratch); |
724 | 724 |
725 } else { | 725 } else { |
726 // Load the double value in the destination registers. | 726 // Load the double value in the destination registers. |
727 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); | 727 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); |
728 __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset)); | 728 __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset)); |
729 | 729 |
730 // Check for 0 and -0. | 730 // Check for 0 and -0. |
731 __ bic(dst, scratch1, Operand(HeapNumber::kSignMask)); | 731 __ bic(dst, scratch1, Operand(HeapNumber::kSignMask)); |
732 __ orr(dst, scratch2, Operand(dst)); | 732 __ orr(dst, scratch2, Operand(dst)); |
733 __ cmp(dst, Operand(0)); | 733 __ cmp(dst, Operand::Zero()); |
734 __ b(eq, &done); | 734 __ b(eq, &done); |
735 | 735 |
736 DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32); | 736 DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32); |
737 | 737 |
738 // Registers state after DoubleIs32BitInteger. | 738 // Registers state after DoubleIs32BitInteger. |
739 // dst: mantissa[51:20]. | 739 // dst: mantissa[51:20]. |
740 // scratch2: 1 | 740 // scratch2: 1 |
741 | 741 |
742 // Shift back the higher bits of the mantissa. | 742 // Shift back the higher bits of the mantissa. |
743 __ mov(dst, Operand(dst, LSR, scratch3)); | 743 __ mov(dst, Operand(dst, LSR, scratch3)); |
744 // Set the implicit first bit. | 744 // Set the implicit first bit. |
745 __ rsb(scratch3, scratch3, Operand(32)); | 745 __ rsb(scratch3, scratch3, Operand(32)); |
746 __ orr(dst, dst, Operand(scratch2, LSL, scratch3)); | 746 __ orr(dst, dst, Operand(scratch2, LSL, scratch3)); |
747 // Set the sign. | 747 // Set the sign. |
748 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); | 748 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); |
749 __ tst(scratch1, Operand(HeapNumber::kSignMask)); | 749 __ tst(scratch1, Operand(HeapNumber::kSignMask)); |
750 __ rsb(dst, dst, Operand(0), LeaveCC, mi); | 750 __ rsb(dst, dst, Operand::Zero(), LeaveCC, mi); |
751 } | 751 } |
752 | 752 |
753 __ bind(&done); | 753 __ bind(&done); |
754 } | 754 } |
755 | 755 |
756 | 756 |
757 void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm, | 757 void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm, |
758 Register src1, | 758 Register src1, |
759 Register src2, | 759 Register src2, |
760 Register dst, | 760 Register dst, |
(...skipping 1656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2417 void BinaryOpStub::GenerateSmiCode( | 2417 void BinaryOpStub::GenerateSmiCode( |
2418 MacroAssembler* masm, | 2418 MacroAssembler* masm, |
2419 Label* use_runtime, | 2419 Label* use_runtime, |
2420 Label* gc_required, | 2420 Label* gc_required, |
2421 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { | 2421 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { |
2422 Label not_smis; | 2422 Label not_smis; |
2423 | 2423 |
2424 Register left = r1; | 2424 Register left = r1; |
2425 Register right = r0; | 2425 Register right = r0; |
2426 Register scratch1 = r7; | 2426 Register scratch1 = r7; |
2427 Register scratch2 = r9; | |
2428 | 2427 |
2429 // Perform combined smi check on both operands. | 2428 // Perform combined smi check on both operands. |
2430 __ orr(scratch1, left, Operand(right)); | 2429 __ orr(scratch1, left, Operand(right)); |
2431 STATIC_ASSERT(kSmiTag == 0); | 2430 STATIC_ASSERT(kSmiTag == 0); |
2432 __ JumpIfNotSmi(scratch1, ¬_smis); | 2431 __ JumpIfNotSmi(scratch1, ¬_smis); |
2433 | 2432 |
2434 // If the smi-smi operation results in a smi return is generated. | 2433 // If the smi-smi operation results in a smi return is generated. |
2435 GenerateSmiSmiOperation(masm); | 2434 GenerateSmiSmiOperation(masm); |
2436 | 2435 |
2437 // If heap number results are possible generate the result in an allocated | 2436 // If heap number results are possible generate the result in an allocated |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2611 __ b(ne, &transition); | 2610 __ b(ne, &transition); |
2612 } | 2611 } |
2613 | 2612 |
2614 // Check if the result fits in a smi. | 2613 // Check if the result fits in a smi. |
2615 __ vmov(scratch1, single_scratch); | 2614 __ vmov(scratch1, single_scratch); |
2616 __ add(scratch2, scratch1, Operand(0x40000000), SetCC); | 2615 __ add(scratch2, scratch1, Operand(0x40000000), SetCC); |
2617 // If not try to return a heap number. | 2616 // If not try to return a heap number. |
2618 __ b(mi, &return_heap_number); | 2617 __ b(mi, &return_heap_number); |
2619 // Check for minus zero. Return heap number for minus zero. | 2618 // Check for minus zero. Return heap number for minus zero. |
2620 Label not_zero; | 2619 Label not_zero; |
2621 __ cmp(scratch1, Operand(0)); | 2620 __ cmp(scratch1, Operand::Zero()); |
2622 __ b(ne, ¬_zero); | 2621 __ b(ne, ¬_zero); |
2623 __ vmov(scratch2, d5.high()); | 2622 __ vmov(scratch2, d5.high()); |
2624 __ tst(scratch2, Operand(HeapNumber::kSignMask)); | 2623 __ tst(scratch2, Operand(HeapNumber::kSignMask)); |
2625 __ b(ne, &return_heap_number); | 2624 __ b(ne, &return_heap_number); |
2626 __ bind(¬_zero); | 2625 __ bind(¬_zero); |
2627 | 2626 |
2628 // Tag the result and return. | 2627 // Tag the result and return. |
2629 __ SmiTag(r0, scratch1); | 2628 __ SmiTag(r0, scratch1); |
2630 __ Ret(); | 2629 __ Ret(); |
2631 } else { | 2630 } else { |
(...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3103 __ bind(&invalid_cache); | 3102 __ bind(&invalid_cache); |
3104 ExternalReference runtime_function = | 3103 ExternalReference runtime_function = |
3105 ExternalReference(RuntimeFunction(), masm->isolate()); | 3104 ExternalReference(RuntimeFunction(), masm->isolate()); |
3106 __ TailCallExternalReference(runtime_function, 1, 1); | 3105 __ TailCallExternalReference(runtime_function, 1, 1); |
3107 } else { | 3106 } else { |
3108 if (!CpuFeatures::IsSupported(VFP3)) UNREACHABLE(); | 3107 if (!CpuFeatures::IsSupported(VFP3)) UNREACHABLE(); |
3109 CpuFeatures::Scope scope(VFP3); | 3108 CpuFeatures::Scope scope(VFP3); |
3110 | 3109 |
3111 Label no_update; | 3110 Label no_update; |
3112 Label skip_cache; | 3111 Label skip_cache; |
3113 const Register heap_number_map = r5; | |
3114 | 3112 |
3115 // Call C function to calculate the result and update the cache. | 3113 // Call C function to calculate the result and update the cache. |
3116 // Register r0 holds precalculated cache entry address; preserve | 3114 // Register r0 holds precalculated cache entry address; preserve |
3117 // it on the stack and pop it into register cache_entry after the | 3115 // it on the stack and pop it into register cache_entry after the |
3118 // call. | 3116 // call. |
3119 __ push(cache_entry); | 3117 __ push(cache_entry); |
3120 GenerateCallCFunction(masm, scratch0); | 3118 GenerateCallCFunction(masm, scratch0); |
3121 __ GetCFunctionDoubleResult(d2); | 3119 __ GetCFunctionDoubleResult(d2); |
3122 | 3120 |
3123 // Try to update the cache. If we cannot allocate a | 3121 // Try to update the cache. If we cannot allocate a |
(...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3574 __ Push(r8, r7, r6, r5); | 3572 __ Push(r8, r7, r6, r5); |
3575 | 3573 |
3576 // Setup frame pointer for the frame to be pushed. | 3574 // Setup frame pointer for the frame to be pushed. |
3577 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); | 3575 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
3578 | 3576 |
3579 // If this is the outermost JS call, set js_entry_sp value. | 3577 // If this is the outermost JS call, set js_entry_sp value. |
3580 Label non_outermost_js; | 3578 Label non_outermost_js; |
3581 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); | 3579 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); |
3582 __ mov(r5, Operand(ExternalReference(js_entry_sp))); | 3580 __ mov(r5, Operand(ExternalReference(js_entry_sp))); |
3583 __ ldr(r6, MemOperand(r5)); | 3581 __ ldr(r6, MemOperand(r5)); |
3584 __ cmp(r6, Operand(0)); | 3582 __ cmp(r6, Operand::Zero()); |
3585 __ b(ne, &non_outermost_js); | 3583 __ b(ne, &non_outermost_js); |
3586 __ str(fp, MemOperand(r5)); | 3584 __ str(fp, MemOperand(r5)); |
3587 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); | 3585 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
3588 Label cont; | 3586 Label cont; |
3589 __ b(&cont); | 3587 __ b(&cont); |
3590 __ bind(&non_outermost_js); | 3588 __ bind(&non_outermost_js); |
3591 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); | 3589 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); |
3592 __ bind(&cont); | 3590 __ bind(&cont); |
3593 __ push(ip); | 3591 __ push(ip); |
3594 | 3592 |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3649 | 3647 |
3650 // Unlink this frame from the handler chain. | 3648 // Unlink this frame from the handler chain. |
3651 __ PopTryHandler(); | 3649 __ PopTryHandler(); |
3652 | 3650 |
3653 __ bind(&exit); // r0 holds result | 3651 __ bind(&exit); // r0 holds result |
3654 // Check if the current stack frame is marked as the outermost JS frame. | 3652 // Check if the current stack frame is marked as the outermost JS frame. |
3655 Label non_outermost_js_2; | 3653 Label non_outermost_js_2; |
3656 __ pop(r5); | 3654 __ pop(r5); |
3657 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); | 3655 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
3658 __ b(ne, &non_outermost_js_2); | 3656 __ b(ne, &non_outermost_js_2); |
3659 __ mov(r6, Operand(0)); | 3657 __ mov(r6, Operand::Zero()); |
3660 __ mov(r5, Operand(ExternalReference(js_entry_sp))); | 3658 __ mov(r5, Operand(ExternalReference(js_entry_sp))); |
3661 __ str(r6, MemOperand(r5)); | 3659 __ str(r6, MemOperand(r5)); |
3662 __ bind(&non_outermost_js_2); | 3660 __ bind(&non_outermost_js_2); |
3663 | 3661 |
3664 // Restore the top frame descriptors from the stack. | 3662 // Restore the top frame descriptors from the stack. |
3665 __ pop(r3); | 3663 __ pop(r3); |
3666 __ mov(ip, | 3664 __ mov(ip, |
3667 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); | 3665 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); |
3668 __ str(r3, MemOperand(ip)); | 3666 __ str(r3, MemOperand(ip)); |
3669 | 3667 |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3850 if (!ReturnTrueFalseObject()) { | 3848 if (!ReturnTrueFalseObject()) { |
3851 if (HasArgsInRegisters()) { | 3849 if (HasArgsInRegisters()) { |
3852 __ Push(r0, r1); | 3850 __ Push(r0, r1); |
3853 } | 3851 } |
3854 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3852 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
3855 } else { | 3853 } else { |
3856 __ EnterInternalFrame(); | 3854 __ EnterInternalFrame(); |
3857 __ Push(r0, r1); | 3855 __ Push(r0, r1); |
3858 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | 3856 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
3859 __ LeaveInternalFrame(); | 3857 __ LeaveInternalFrame(); |
3860 __ cmp(r0, Operand(0)); | 3858 __ cmp(r0, Operand::Zero()); |
3861 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); | 3859 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); |
3862 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); | 3860 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); |
3863 __ Ret(HasArgsInRegisters() ? 0 : 2); | 3861 __ Ret(HasArgsInRegisters() ? 0 : 2); |
3864 } | 3862 } |
3865 } | 3863 } |
3866 | 3864 |
3867 | 3865 |
3868 Register InstanceofStub::left() { return r0; } | 3866 Register InstanceofStub::left() { return r0; } |
3869 | 3867 |
3870 | 3868 |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3984 __ mov(r1, Operand(r2), LeaveCC, gt); | 3982 __ mov(r1, Operand(r2), LeaveCC, gt); |
3985 | 3983 |
3986 __ bind(&try_allocate); | 3984 __ bind(&try_allocate); |
3987 | 3985 |
3988 // Compute the sizes of backing store, parameter map, and arguments object. | 3986 // Compute the sizes of backing store, parameter map, and arguments object. |
3989 // 1. Parameter map, has 2 extra words containing context and backing store. | 3987 // 1. Parameter map, has 2 extra words containing context and backing store. |
3990 const int kParameterMapHeaderSize = | 3988 const int kParameterMapHeaderSize = |
3991 FixedArray::kHeaderSize + 2 * kPointerSize; | 3989 FixedArray::kHeaderSize + 2 * kPointerSize; |
3992 // If there are no mapped parameters, we do not need the parameter_map. | 3990 // If there are no mapped parameters, we do not need the parameter_map. |
3993 __ cmp(r1, Operand(Smi::FromInt(0))); | 3991 __ cmp(r1, Operand(Smi::FromInt(0))); |
3994 __ mov(r9, Operand(0), LeaveCC, eq); | 3992 __ mov(r9, Operand::Zero(), LeaveCC, eq); |
3995 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne); | 3993 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne); |
3996 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne); | 3994 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne); |
3997 | 3995 |
3998 // 2. Backing store. | 3996 // 2. Backing store. |
3999 __ add(r9, r9, Operand(r2, LSL, 1)); | 3997 __ add(r9, r9, Operand(r2, LSL, 1)); |
4000 __ add(r9, r9, Operand(FixedArray::kHeaderSize)); | 3998 __ add(r9, r9, Operand(FixedArray::kHeaderSize)); |
4001 | 3999 |
4002 // 3. Arguments object. | 4000 // 3. Arguments object. |
4003 __ add(r9, r9, Operand(Heap::kArgumentsObjectSize)); | 4001 __ add(r9, r9, Operand(Heap::kArgumentsObjectSize)); |
4004 | 4002 |
4005 // Do the allocation of all three objects in one go. | 4003 // Do the allocation of all three objects in one go. |
4006 __ AllocateInNewSpace(r9, r0, r3, r4, &runtime, TAG_OBJECT); | 4004 __ AllocateInNewSpace(r9, r0, r3, r4, &runtime, TAG_OBJECT); |
4007 | 4005 |
4008 // r0 = address of new object(s) (tagged) | 4006 // r0 = address of new object(s) (tagged) |
4009 // r2 = argument count (tagged) | 4007 // r2 = argument count (tagged) |
4010 // Get the arguments boilerplate from the current (global) context into r4. | 4008 // Get the arguments boilerplate from the current (global) context into r4. |
4011 const int kNormalOffset = | 4009 const int kNormalOffset = |
4012 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); | 4010 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); |
4013 const int kAliasedOffset = | 4011 const int kAliasedOffset = |
4014 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); | 4012 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); |
4015 | 4013 |
4016 __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX))); | 4014 __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX))); |
4017 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset)); | 4015 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset)); |
4018 __ cmp(r1, Operand(0)); | 4016 __ cmp(r1, Operand::Zero()); |
4019 __ ldr(r4, MemOperand(r4, kNormalOffset), eq); | 4017 __ ldr(r4, MemOperand(r4, kNormalOffset), eq); |
4020 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); | 4018 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); |
4021 | 4019 |
4022 // r0 = address of new object (tagged) | 4020 // r0 = address of new object (tagged) |
4023 // r1 = mapped parameter count (tagged) | 4021 // r1 = mapped parameter count (tagged) |
4024 // r2 = argument count (tagged) | 4022 // r2 = argument count (tagged) |
4025 // r4 = address of boilerplate object (tagged) | 4023 // r4 = address of boilerplate object (tagged) |
4026 // Copy the JS object part. | 4024 // Copy the JS object part. |
4027 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 4025 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
4028 __ ldr(r3, FieldMemOperand(r4, i)); | 4026 __ ldr(r3, FieldMemOperand(r4, i)); |
(...skipping 1661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5690 Register scratch2, | 5688 Register scratch2, |
5691 Label* chars_not_equal) { | 5689 Label* chars_not_equal) { |
5692 // Change index to run from -length to -1 by adding length to string | 5690 // Change index to run from -length to -1 by adding length to string |
5693 // start. This means that loop ends when index reaches zero, which | 5691 // start. This means that loop ends when index reaches zero, which |
5694 // doesn't need an additional compare. | 5692 // doesn't need an additional compare. |
5695 __ SmiUntag(length); | 5693 __ SmiUntag(length); |
5696 __ add(scratch1, length, | 5694 __ add(scratch1, length, |
5697 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5695 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5698 __ add(left, left, Operand(scratch1)); | 5696 __ add(left, left, Operand(scratch1)); |
5699 __ add(right, right, Operand(scratch1)); | 5697 __ add(right, right, Operand(scratch1)); |
5700 __ rsb(length, length, Operand(0)); | 5698 __ rsb(length, length, Operand::Zero()); |
5701 Register index = length; // index = -length; | 5699 Register index = length; // index = -length; |
5702 | 5700 |
5703 // Compare loop. | 5701 // Compare loop. |
5704 Label loop; | 5702 Label loop; |
5705 __ bind(&loop); | 5703 __ bind(&loop); |
5706 __ ldrb(scratch1, MemOperand(left, index)); | 5704 __ ldrb(scratch1, MemOperand(left, index)); |
5707 __ ldrb(scratch2, MemOperand(right, index)); | 5705 __ ldrb(scratch2, MemOperand(right, index)); |
5708 __ cmp(scratch1, scratch2); | 5706 __ cmp(scratch1, scratch2); |
5709 __ b(ne, chars_not_equal); | 5707 __ b(ne, chars_not_equal); |
5710 __ add(index, index, Operand(1), SetCC); | 5708 __ add(index, index, Operand(1), SetCC); |
(...skipping 837 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6548 __ tst(entry_key, Operand(kIsSymbolMask)); | 6546 __ tst(entry_key, Operand(kIsSymbolMask)); |
6549 __ b(eq, &maybe_in_dictionary); | 6547 __ b(eq, &maybe_in_dictionary); |
6550 } | 6548 } |
6551 } | 6549 } |
6552 | 6550 |
6553 __ bind(&maybe_in_dictionary); | 6551 __ bind(&maybe_in_dictionary); |
6554 // If we are doing negative lookup then probing failure should be | 6552 // If we are doing negative lookup then probing failure should be |
6555 // treated as a lookup success. For positive lookup probing failure | 6553 // treated as a lookup success. For positive lookup probing failure |
6556 // should be treated as lookup failure. | 6554 // should be treated as lookup failure. |
6557 if (mode_ == POSITIVE_LOOKUP) { | 6555 if (mode_ == POSITIVE_LOOKUP) { |
6558 __ mov(result, Operand(0)); | 6556 __ mov(result, Operand::Zero()); |
6559 __ Ret(); | 6557 __ Ret(); |
6560 } | 6558 } |
6561 | 6559 |
6562 __ bind(&in_dictionary); | 6560 __ bind(&in_dictionary); |
6563 __ mov(result, Operand(1)); | 6561 __ mov(result, Operand(1)); |
6564 __ Ret(); | 6562 __ Ret(); |
6565 | 6563 |
6566 __ bind(¬_in_dictionary); | 6564 __ bind(¬_in_dictionary); |
6567 __ mov(result, Operand(0)); | 6565 __ mov(result, Operand::Zero()); |
6568 __ Ret(); | 6566 __ Ret(); |
6569 } | 6567 } |
6570 | 6568 |
6571 | 6569 |
6572 #undef __ | 6570 #undef __ |
6573 | 6571 |
6574 } } // namespace v8::internal | 6572 } } // namespace v8::internal |
6575 | 6573 |
6576 #endif // V8_TARGET_ARCH_ARM | 6574 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |