| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 75 // as the map of the allocated object. | 75 // as the map of the allocated object. |
| 76 __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 76 __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 77 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); | 77 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); |
| 78 __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX))); | 78 __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX))); |
| 79 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 79 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 80 | 80 |
| 81 // Initialize the rest of the function. We don't have to update the | 81 // Initialize the rest of the function. We don't have to update the |
| 82 // write barrier because the allocated object is in new space. | 82 // write barrier because the allocated object is in new space. |
| 83 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex); | 83 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex); |
| 84 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); | 84 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); |
| 85 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 85 __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 86 __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |
| 86 __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset)); | 87 __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset)); |
| 87 __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); | 88 __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); |
| 88 __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); | 89 __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); |
| 89 __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); | 90 __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); |
| 90 __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); | 91 __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); |
| 92 __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset)); |
| 93 |
| 91 | 94 |
| 92 // Initialize the code pointer in the function to be the one | 95 // Initialize the code pointer in the function to be the one |
| 93 // found in the shared function info object. | 96 // found in the shared function info object. |
| 94 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); | 97 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); |
| 95 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | 98 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 96 __ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); | 99 __ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset)); |
| 97 | 100 |
| 98 // Return result. The argument function info has been popped already. | 101 // Return result. The argument function info has been popped already. |
| 99 __ Ret(); | 102 __ Ret(); |
| 100 | 103 |
| (...skipping 980 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1081 } | 1084 } |
| 1082 | 1085 |
| 1083 | 1086 |
| 1084 // This stub does not handle the inlined cases (Smis, Booleans, undefined). | 1087 // This stub does not handle the inlined cases (Smis, Booleans, undefined). |
| 1085 // The stub returns zero for false, and a non-zero value for true. | 1088 // The stub returns zero for false, and a non-zero value for true. |
| 1086 void ToBooleanStub::Generate(MacroAssembler* masm) { | 1089 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 1087 Label false_result; | 1090 Label false_result; |
| 1088 Label not_heap_number; | 1091 Label not_heap_number; |
| 1089 Register scratch = r7; | 1092 Register scratch = r7; |
| 1090 | 1093 |
| 1094 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 1095 __ cmp(tos_, ip); |
| 1096 __ b(eq, &false_result); |
| 1097 |
| 1091 // HeapNumber => false iff +0, -0, or NaN. | 1098 // HeapNumber => false iff +0, -0, or NaN. |
| 1092 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); | 1099 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); |
| 1093 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 1100 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 1094 __ cmp(scratch, ip); | 1101 __ cmp(scratch, ip); |
| 1095 __ b(¬_heap_number, ne); | 1102 __ b(¬_heap_number, ne); |
| 1096 | 1103 |
| 1097 __ sub(ip, tos_, Operand(kHeapObjectTag)); | 1104 __ sub(ip, tos_, Operand(kHeapObjectTag)); |
| 1098 __ vldr(d1, ip, HeapNumber::kValueOffset); | 1105 __ vldr(d1, ip, HeapNumber::kValueOffset); |
| 1099 __ vcmp(d1, 0.0); | 1106 __ vcmp(d1, 0.0); |
| 1100 __ vmrs(pc); | 1107 __ vmrs(pc); |
| (...skipping 1094 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2195 1); | 2202 1); |
| 2196 } | 2203 } |
| 2197 | 2204 |
| 2198 | 2205 |
| 2199 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) { | 2206 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) { |
| 2200 GenericBinaryOpStub stub(key, type_info); | 2207 GenericBinaryOpStub stub(key, type_info); |
| 2201 return stub.GetCode(); | 2208 return stub.GetCode(); |
| 2202 } | 2209 } |
| 2203 | 2210 |
| 2204 | 2211 |
| 2212 Handle<Code> GetTypeRecordingBinaryOpStub(int key, |
| 2213 TRBinaryOpIC::TypeInfo type_info, |
| 2214 TRBinaryOpIC::TypeInfo result_type_info) { |
| 2215 UNIMPLEMENTED(); |
| 2216 return Handle<Code>::null(); |
| 2217 } |
| 2218 |
| 2219 |
| 2205 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 2220 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
| 2206 // Argument is a number and is on stack and in r0. | 2221 // Argument is a number and is on stack and in r0. |
| 2207 Label runtime_call; | 2222 Label runtime_call; |
| 2208 Label input_not_smi; | 2223 Label input_not_smi; |
| 2209 Label loaded; | 2224 Label loaded; |
| 2210 | 2225 |
| 2211 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { | 2226 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 2212 // Load argument and check if it is a smi. | 2227 // Load argument and check if it is a smi. |
| 2213 __ BranchOnNotSmi(r0, &input_not_smi); | 2228 __ BranchOnNotSmi(r0, &input_not_smi); |
| 2214 | 2229 |
| (...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2642 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); | 2657 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); |
| 2643 // Lower 2 bits of r2 are 0 iff r0 has failure tag. | 2658 // Lower 2 bits of r2 are 0 iff r0 has failure tag. |
| 2644 __ add(r2, r0, Operand(1)); | 2659 __ add(r2, r0, Operand(1)); |
| 2645 __ tst(r2, Operand(kFailureTagMask)); | 2660 __ tst(r2, Operand(kFailureTagMask)); |
| 2646 __ b(eq, &failure_returned); | 2661 __ b(eq, &failure_returned); |
| 2647 | 2662 |
| 2648 // Exit C frame and return. | 2663 // Exit C frame and return. |
| 2649 // r0:r1: result | 2664 // r0:r1: result |
| 2650 // sp: stack pointer | 2665 // sp: stack pointer |
| 2651 // fp: frame pointer | 2666 // fp: frame pointer |
| 2652 __ LeaveExitFrame(); | 2667 __ LeaveExitFrame(save_doubles_); |
| 2653 | 2668 |
| 2654 // check if we should retry or throw exception | 2669 // check if we should retry or throw exception |
| 2655 Label retry; | 2670 Label retry; |
| 2656 __ bind(&failure_returned); | 2671 __ bind(&failure_returned); |
| 2657 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); | 2672 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); |
| 2658 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); | 2673 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); |
| 2659 __ b(eq, &retry); | 2674 __ b(eq, &retry); |
| 2660 | 2675 |
| 2661 // Special handling of out of memory exceptions. | 2676 // Special handling of out of memory exceptions. |
| 2662 Failure* out_of_memory = Failure::OutOfMemoryException(); | 2677 Failure* out_of_memory = Failure::OutOfMemoryException(); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 2691 // cp: current context (C callee-saved) | 2706 // cp: current context (C callee-saved) |
| 2692 | 2707 |
| 2693 // Result returned in r0 or r0+r1 by default. | 2708 // Result returned in r0 or r0+r1 by default. |
| 2694 | 2709 |
| 2695 // NOTE: Invocations of builtins may return failure objects | 2710 // NOTE: Invocations of builtins may return failure objects |
| 2696 // instead of a proper result. The builtin entry handles | 2711 // instead of a proper result. The builtin entry handles |
| 2697 // this by performing a garbage collection and retrying the | 2712 // this by performing a garbage collection and retrying the |
| 2698 // builtin once. | 2713 // builtin once. |
| 2699 | 2714 |
| 2700 // Enter the exit frame that transitions from JavaScript to C++. | 2715 // Enter the exit frame that transitions from JavaScript to C++. |
| 2701 __ EnterExitFrame(); | 2716 __ EnterExitFrame(save_doubles_); |
| 2702 | 2717 |
| 2703 // r4: number of arguments (C callee-saved) | 2718 // r4: number of arguments (C callee-saved) |
| 2704 // r5: pointer to builtin function (C callee-saved) | 2719 // r5: pointer to builtin function (C callee-saved) |
| 2705 // r6: pointer to first argument (C callee-saved) | 2720 // r6: pointer to first argument (C callee-saved) |
| 2706 | 2721 |
| 2707 Label throw_normal_exception; | 2722 Label throw_normal_exception; |
| 2708 Label throw_termination_exception; | 2723 Label throw_termination_exception; |
| 2709 Label throw_out_of_memory_exception; | 2724 Label throw_out_of_memory_exception; |
| 2710 | 2725 |
| 2711 // Call into the runtime system. | 2726 // Call into the runtime system. |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2779 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 2794 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
| 2780 __ mov(r7, Operand(Smi::FromInt(marker))); | 2795 __ mov(r7, Operand(Smi::FromInt(marker))); |
| 2781 __ mov(r6, Operand(Smi::FromInt(marker))); | 2796 __ mov(r6, Operand(Smi::FromInt(marker))); |
| 2782 __ mov(r5, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); | 2797 __ mov(r5, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); |
| 2783 __ ldr(r5, MemOperand(r5)); | 2798 __ ldr(r5, MemOperand(r5)); |
| 2784 __ Push(r8, r7, r6, r5); | 2799 __ Push(r8, r7, r6, r5); |
| 2785 | 2800 |
| 2786 // Setup frame pointer for the frame to be pushed. | 2801 // Setup frame pointer for the frame to be pushed. |
| 2787 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); | 2802 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 2788 | 2803 |
| 2804 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 2805 // If this is the outermost JS call, set js_entry_sp value. |
| 2806 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address); |
| 2807 __ mov(r5, Operand(ExternalReference(js_entry_sp))); |
| 2808 __ ldr(r6, MemOperand(r5)); |
| 2809 __ cmp(r6, Operand(0, RelocInfo::NONE)); |
| 2810 __ str(fp, MemOperand(r5), eq); |
| 2811 #endif |
| 2812 |
| 2789 // Call a faked try-block that does the invoke. | 2813 // Call a faked try-block that does the invoke. |
| 2790 __ bl(&invoke); | 2814 __ bl(&invoke); |
| 2791 | 2815 |
| 2792 // Caught exception: Store result (exception) in the pending | 2816 // Caught exception: Store result (exception) in the pending |
| 2793 // exception field in the JSEnv and return a failure sentinel. | 2817 // exception field in the JSEnv and return a failure sentinel. |
| 2794 // Coming in here the fp will be invalid because the PushTryHandler below | 2818 // Coming in here the fp will be invalid because the PushTryHandler below |
| 2795 // sets it to 0 to signal the existence of the JSEntry frame. | 2819 // sets it to 0 to signal the existence of the JSEntry frame. |
| 2796 __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address))); | 2820 __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address))); |
| 2797 __ str(r0, MemOperand(ip)); | 2821 __ str(r0, MemOperand(ip)); |
| 2798 __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); | 2822 __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2841 // Unlink this frame from the handler chain. When reading the | 2865 // Unlink this frame from the handler chain. When reading the |
| 2842 // address of the next handler, there is no need to use the address | 2866 // address of the next handler, there is no need to use the address |
| 2843 // displacement since the current stack pointer (sp) points directly | 2867 // displacement since the current stack pointer (sp) points directly |
| 2844 // to the stack handler. | 2868 // to the stack handler. |
| 2845 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset)); | 2869 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset)); |
| 2846 __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address))); | 2870 __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address))); |
| 2847 __ str(r3, MemOperand(ip)); | 2871 __ str(r3, MemOperand(ip)); |
| 2848 // No need to restore registers | 2872 // No need to restore registers |
| 2849 __ add(sp, sp, Operand(StackHandlerConstants::kSize)); | 2873 __ add(sp, sp, Operand(StackHandlerConstants::kSize)); |
| 2850 | 2874 |
| 2875 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 2876 // If current FP value is the same as js_entry_sp value, it means that |
| 2877 // the current function is the outermost. |
| 2878 __ mov(r5, Operand(ExternalReference(js_entry_sp))); |
| 2879 __ ldr(r6, MemOperand(r5)); |
| 2880 __ cmp(fp, Operand(r6)); |
| 2881 __ mov(r6, Operand(0, RelocInfo::NONE), LeaveCC, eq); |
| 2882 __ str(r6, MemOperand(r5), eq); |
| 2883 #endif |
| 2851 | 2884 |
| 2852 __ bind(&exit); // r0 holds result | 2885 __ bind(&exit); // r0 holds result |
| 2853 // Restore the top frame descriptors from the stack. | 2886 // Restore the top frame descriptors from the stack. |
| 2854 __ pop(r3); | 2887 __ pop(r3); |
| 2855 __ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); | 2888 __ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); |
| 2856 __ str(r3, MemOperand(ip)); | 2889 __ str(r3, MemOperand(ip)); |
| 2857 | 2890 |
| 2858 // Reset the stack to the callee saved registers. | 2891 // Reset the stack to the callee saved registers. |
| 2859 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); | 2892 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 2860 | 2893 |
| (...skipping 570 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3431 __ add(sp, sp, Operand(4 * kPointerSize)); | 3464 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 3432 __ Ret(); | 3465 __ Ret(); |
| 3433 | 3466 |
| 3434 // Do the runtime call to execute the regexp. | 3467 // Do the runtime call to execute the regexp. |
| 3435 __ bind(&runtime); | 3468 __ bind(&runtime); |
| 3436 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 3469 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
| 3437 #endif // V8_INTERPRETED_REGEXP | 3470 #endif // V8_INTERPRETED_REGEXP |
| 3438 } | 3471 } |
| 3439 | 3472 |
| 3440 | 3473 |
| 3474 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
| 3475 const int kMaxInlineLength = 100; |
| 3476 Label slowcase; |
| 3477 Label done; |
| 3478 __ ldr(r1, MemOperand(sp, kPointerSize * 2)); |
| 3479 STATIC_ASSERT(kSmiTag == 0); |
| 3480 STATIC_ASSERT(kSmiTagSize == 1); |
| 3481 __ tst(r1, Operand(kSmiTagMask)); |
| 3482 __ b(ne, &slowcase); |
| 3483 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength))); |
| 3484 __ b(hi, &slowcase); |
| 3485 // Smi-tagging is equivalent to multiplying by 2. |
| 3486 // Allocate RegExpResult followed by FixedArray with size in ebx. |
| 3487 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] |
| 3488 // Elements: [Map][Length][..elements..] |
| 3489 // Size of JSArray with two in-object properties and the header of a |
| 3490 // FixedArray. |
| 3491 int objects_size = |
| 3492 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize; |
| 3493 __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize)); |
| 3494 __ add(r2, r5, Operand(objects_size)); |
| 3495 __ AllocateInNewSpace( |
| 3496 r2, // In: Size, in words. |
| 3497 r0, // Out: Start of allocation (tagged). |
| 3498 r3, // Scratch register. |
| 3499 r4, // Scratch register. |
| 3500 &slowcase, |
| 3501 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); |
| 3502 // r0: Start of allocated area, object-tagged. |
| 3503 // r1: Number of elements in array, as smi. |
| 3504 // r5: Number of elements, untagged. |
| 3505 |
| 3506 // Set JSArray map to global.regexp_result_map(). |
| 3507 // Set empty properties FixedArray. |
| 3508 // Set elements to point to FixedArray allocated right after the JSArray. |
| 3509 // Interleave operations for better latency. |
| 3510 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX)); |
| 3511 __ add(r3, r0, Operand(JSRegExpResult::kSize)); |
| 3512 __ mov(r4, Operand(FACTORY->empty_fixed_array())); |
| 3513 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); |
| 3514 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); |
| 3515 __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX)); |
| 3516 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |
| 3517 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 3518 |
| 3519 // Set input, index and length fields from arguments. |
| 3520 __ ldr(r1, MemOperand(sp, kPointerSize * 0)); |
| 3521 __ str(r1, FieldMemOperand(r0, JSRegExpResult::kInputOffset)); |
| 3522 __ ldr(r1, MemOperand(sp, kPointerSize * 1)); |
| 3523 __ str(r1, FieldMemOperand(r0, JSRegExpResult::kIndexOffset)); |
| 3524 __ ldr(r1, MemOperand(sp, kPointerSize * 2)); |
| 3525 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset)); |
| 3526 |
| 3527 // Fill out the elements FixedArray. |
| 3528 // r0: JSArray, tagged. |
| 3529 // r3: FixedArray, tagged. |
| 3530 // r5: Number of elements in array, untagged. |
| 3531 |
| 3532 // Set map. |
| 3533 __ mov(r2, Operand(FACTORY->fixed_array_map())); |
| 3534 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 3535 // Set FixedArray length. |
| 3536 __ mov(r6, Operand(r5, LSL, kSmiTagSize)); |
| 3537 __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset)); |
| 3538 // Fill contents of fixed-array with the-hole. |
| 3539 __ mov(r2, Operand(FACTORY->the_hole_value())); |
| 3540 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3541 // Fill fixed array elements with hole. |
| 3542 // r0: JSArray, tagged. |
| 3543 // r2: the hole. |
| 3544 // r3: Start of elements in FixedArray. |
| 3545 // r5: Number of elements to fill. |
| 3546 Label loop; |
| 3547 __ tst(r5, Operand(r5)); |
| 3548 __ bind(&loop); |
| 3549 __ b(le, &done); // Jump if r1 is negative or zero. |
| 3550 __ sub(r5, r5, Operand(1), SetCC); |
| 3551 __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2)); |
| 3552 __ jmp(&loop); |
| 3553 |
| 3554 __ bind(&done); |
| 3555 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 3556 __ Ret(); |
| 3557 |
| 3558 __ bind(&slowcase); |
| 3559 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); |
| 3560 } |
| 3561 |
| 3562 |
| 3441 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3563 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3442 Label slow; | 3564 Label slow; |
| 3443 | 3565 |
| 3444 // If the receiver might be a value (string, number or boolean) check for this | 3566 // If the receiver might be a value (string, number or boolean) check for this |
| 3445 // and box it if it is. | 3567 // and box it if it is. |
| 3446 if (ReceiverMightBeValue()) { | 3568 if (ReceiverMightBeValue()) { |
| 3447 // Get the receiver from the stack. | 3569 // Get the receiver from the stack. |
| 3448 // function, receiver [, arguments] | 3570 // function, receiver [, arguments] |
| 3449 Label receiver_is_value, receiver_is_js_object; | 3571 Label receiver_is_value, receiver_is_js_object; |
| 3450 __ ldr(r1, MemOperand(sp, argc_ * kPointerSize)); | 3572 __ ldr(r1, MemOperand(sp, argc_ * kPointerSize)); |
| (...skipping 1274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4725 __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3); | 4847 __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3); |
| 4726 __ add(sp, sp, Operand(2 * kPointerSize)); | 4848 __ add(sp, sp, Operand(2 * kPointerSize)); |
| 4727 __ Ret(); | 4849 __ Ret(); |
| 4728 | 4850 |
| 4729 // Just jump to runtime to add the two strings. | 4851 // Just jump to runtime to add the two strings. |
| 4730 __ bind(&string_add_runtime); | 4852 __ bind(&string_add_runtime); |
| 4731 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 4853 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
| 4732 } | 4854 } |
| 4733 | 4855 |
| 4734 | 4856 |
| 4857 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 4858 ASSERT(state_ == CompareIC::SMIS); |
| 4859 Label miss; |
| 4860 __ orr(r2, r1, r0); |
| 4861 __ tst(r2, Operand(kSmiTagMask)); |
| 4862 __ b(ne, &miss); |
| 4863 |
| 4864 if (GetCondition() == eq) { |
| 4865 // For equality we do not care about the sign of the result. |
| 4866 __ sub(r0, r0, r1, SetCC); |
| 4867 } else { |
| 4868 __ sub(r1, r1, r0, SetCC); |
| 4869 // Correct sign of result in case of overflow. |
| 4870 __ rsb(r1, r1, Operand(0), SetCC, vs); |
| 4871 __ mov(r0, r1); |
| 4872 } |
| 4873 __ Ret(); |
| 4874 |
| 4875 __ bind(&miss); |
| 4876 GenerateMiss(masm); |
| 4877 } |
| 4878 |
| 4879 |
| 4880 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { |
| 4881 ASSERT(state_ == CompareIC::HEAP_NUMBERS); |
| 4882 |
| 4883 Label generic_stub; |
| 4884 Label unordered; |
| 4885 Label miss; |
| 4886 __ and_(r2, r1, Operand(r0)); |
| 4887 __ tst(r2, Operand(kSmiTagMask)); |
| 4888 __ b(eq, &generic_stub); |
| 4889 |
| 4890 __ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE); |
| 4891 __ b(ne, &miss); |
| 4892 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); |
| 4893 __ b(ne, &miss); |
| 4894 |
| 4895 // Inlining the double comparison and falling back to the general compare |
| 4896 // stub if NaN is involved or VFP3 is unsupported. |
| 4897 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 4898 CpuFeatures::Scope scope(VFP3); |
| 4899 |
| 4900 // Load left and right operand |
| 4901 __ sub(r2, r1, Operand(kHeapObjectTag)); |
| 4902 __ vldr(d0, r2, HeapNumber::kValueOffset); |
| 4903 __ sub(r2, r0, Operand(kHeapObjectTag)); |
| 4904 __ vldr(d1, r2, HeapNumber::kValueOffset); |
| 4905 |
| 4906 // Compare operands |
| 4907 __ vcmp(d0, d1); |
| 4908 __ vmrs(pc); // Move vector status bits to normal status bits. |
| 4909 |
| 4910 // Don't base result on status bits when a NaN is involved. |
| 4911 __ b(vs, &unordered); |
| 4912 |
| 4913 // Return a result of -1, 0, or 1, based on status bits. |
| 4914 __ mov(r0, Operand(EQUAL), LeaveCC, eq); |
| 4915 __ mov(r0, Operand(LESS), LeaveCC, lt); |
| 4916 __ mov(r0, Operand(GREATER), LeaveCC, gt); |
| 4917 __ Ret(); |
| 4918 |
| 4919 __ bind(&unordered); |
| 4920 } |
| 4921 |
| 4922 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0); |
| 4923 __ bind(&generic_stub); |
| 4924 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4925 |
| 4926 __ bind(&miss); |
| 4927 GenerateMiss(masm); |
| 4928 } |
| 4929 |
| 4930 |
| 4931 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
| 4932 ASSERT(state_ == CompareIC::OBJECTS); |
| 4933 Label miss; |
| 4934 __ and_(r2, r1, Operand(r0)); |
| 4935 __ tst(r2, Operand(kSmiTagMask)); |
| 4936 __ b(eq, &miss); |
| 4937 |
| 4938 __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE); |
| 4939 __ b(ne, &miss); |
| 4940 __ CompareObjectType(r1, r2, r2, JS_OBJECT_TYPE); |
| 4941 __ b(ne, &miss); |
| 4942 |
| 4943 ASSERT(GetCondition() == eq); |
| 4944 __ sub(r0, r0, Operand(r1)); |
| 4945 __ Ret(); |
| 4946 |
| 4947 __ bind(&miss); |
| 4948 GenerateMiss(masm); |
| 4949 } |
| 4950 |
| 4951 |
| 4952 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
| 4953 __ Push(r1, r0); |
| 4954 __ push(lr); |
| 4955 |
| 4956 // Call the runtime system in a fresh internal frame. |
| 4957 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); |
| 4958 __ EnterInternalFrame(); |
| 4959 __ Push(r1, r0); |
| 4960 __ mov(ip, Operand(Smi::FromInt(op_))); |
| 4961 __ push(ip); |
| 4962 __ CallExternalReference(miss, 3); |
| 4963 __ LeaveInternalFrame(); |
| 4964 // Compute the entry point of the rewritten stub. |
| 4965 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4966 // Restore registers. |
| 4967 __ pop(lr); |
| 4968 __ pop(r0); |
| 4969 __ pop(r1); |
| 4970 __ Jump(r2); |
| 4971 } |
| 4972 |
| 4973 |
| 4735 #undef __ | 4974 #undef __ |
| 4736 | 4975 |
| 4737 } } // namespace v8::internal | 4976 } } // namespace v8::internal |
| 4738 | 4977 |
| 4739 #endif // V8_TARGET_ARCH_ARM | 4978 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |