Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 6322008: Version 3.0.10... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.cc ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
105 __ bind(&gc); 105 __ bind(&gc);
106 __ LoadRoot(r4, Heap::kFalseValueRootIndex); 106 __ LoadRoot(r4, Heap::kFalseValueRootIndex);
107 __ Push(cp, r3, r4); 107 __ Push(cp, r3, r4);
108 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); 108 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
109 } 109 }
110 110
111 111
112 void FastNewContextStub::Generate(MacroAssembler* masm) { 112 void FastNewContextStub::Generate(MacroAssembler* masm) {
113 // Try to allocate the context in new space. 113 // Try to allocate the context in new space.
114 Label gc; 114 Label gc;
115 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
116 115
117 // Attempt to allocate the context in new space. 116 // Attempt to allocate the context in new space.
118 __ AllocateInNewSpace(FixedArray::SizeFor(length), 117 __ AllocateInNewSpace(FixedArray::SizeFor(slots_),
119 r0, 118 r0,
120 r1, 119 r1,
121 r2, 120 r2,
122 &gc, 121 &gc,
123 TAG_OBJECT); 122 TAG_OBJECT);
124 123
125 // Load the function from the stack. 124 // Load the function from the stack.
126 __ ldr(r3, MemOperand(sp, 0)); 125 __ ldr(r3, MemOperand(sp, 0));
127 126
128 // Setup the object header. 127 // Setup the object header.
129 __ LoadRoot(r2, Heap::kContextMapRootIndex); 128 __ LoadRoot(r2, Heap::kContextMapRootIndex);
130 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 129 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
131 __ mov(r2, Operand(Smi::FromInt(length))); 130 __ mov(r2, Operand(Smi::FromInt(slots_)));
132 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); 131 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
133 132
134 // Setup the fixed slots. 133 // Setup the fixed slots.
135 __ mov(r1, Operand(Smi::FromInt(0))); 134 __ mov(r1, Operand(Smi::FromInt(0)));
136 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); 135 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
137 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX))); 136 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX)));
138 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); 137 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
139 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); 138 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
140 139
141 // Copy the global object from the surrounding context. 140 // Copy the global object from the surrounding context.
142 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); 141 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
143 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); 142 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
144 143
145 // Initialize the rest of the slots to undefined. 144 // Initialize the rest of the slots to undefined.
146 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 145 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
147 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { 146 for (int i = Context::MIN_CONTEXT_SLOTS; i < slots_; i++) {
148 __ str(r1, MemOperand(r0, Context::SlotOffset(i))); 147 __ str(r1, MemOperand(r0, Context::SlotOffset(i)));
149 } 148 }
150 149
151 // Remove the on-stack argument and return. 150 // Remove the on-stack argument and return.
152 __ mov(cp, r0); 151 __ mov(cp, r0);
153 __ pop(); 152 __ pop();
154 __ Ret(); 153 __ Ret();
155 154
156 // Need to collect. Call into runtime system. 155 // Need to collect. Call into runtime system.
157 __ bind(&gc); 156 __ bind(&gc);
(...skipping 2725 matching lines...) Expand 10 before | Expand all | Expand 10 after
2883 // Restore callee-saved registers and return. 2882 // Restore callee-saved registers and return.
2884 #ifdef DEBUG 2883 #ifdef DEBUG
2885 if (FLAG_debug_code) { 2884 if (FLAG_debug_code) {
2886 __ mov(lr, Operand(pc)); 2885 __ mov(lr, Operand(pc));
2887 } 2886 }
2888 #endif 2887 #endif
2889 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); 2888 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
2890 } 2889 }
2891 2890
2892 2891
2893 // Uses registers r0 to r4. Expected input is 2892 // Uses registers r0 to r4.
2894 // object in r0 (or at sp+1*kPointerSize) and function in 2893 // Expected input (depending on whether args are in registers or on the stack):
2895 // r1 (or at sp), depending on whether or not 2894 // * object: r0 or at sp + 1 * kPointerSize.
2896 // args_in_registers() is true. 2895 // * function: r1 or at sp.
2896 //
2897 // An inlined call site may have been generated before calling this stub.
2898 // In this case the offset to the inline site to patch is passed on the stack,
2899 // in the safepoint slot for register r4.
2900 // (See LCodeGen::DoInstanceOfKnownGlobal)
2897 void InstanceofStub::Generate(MacroAssembler* masm) { 2901 void InstanceofStub::Generate(MacroAssembler* masm) {
2902 // Call site inlining and patching implies arguments in registers.
2903 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
2904 // ReturnTrueFalse is only implemented for inlined call sites.
2905 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
2906
2898 // Fixed register usage throughout the stub: 2907 // Fixed register usage throughout the stub:
2899 const Register object = r0; // Object (lhs). 2908 const Register object = r0; // Object (lhs).
2900 const Register map = r3; // Map of the object. 2909 Register map = r3; // Map of the object.
2901 const Register function = r1; // Function (rhs). 2910 const Register function = r1; // Function (rhs).
2902 const Register prototype = r4; // Prototype of the function. 2911 const Register prototype = r4; // Prototype of the function.
2912 const Register inline_site = r9;
2903 const Register scratch = r2; 2913 const Register scratch = r2;
2914
2915 const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
2916
2904 Label slow, loop, is_instance, is_not_instance, not_js_object; 2917 Label slow, loop, is_instance, is_not_instance, not_js_object;
2918
2905 if (!HasArgsInRegisters()) { 2919 if (!HasArgsInRegisters()) {
2906 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); 2920 __ ldr(object, MemOperand(sp, 1 * kPointerSize));
2907 __ ldr(function, MemOperand(sp, 0)); 2921 __ ldr(function, MemOperand(sp, 0));
2908 } 2922 }
2909 2923
2910 // Check that the left hand is a JS object and load map. 2924 // Check that the left hand is a JS object and load map.
2911 __ BranchOnSmi(object, &not_js_object); 2925 __ BranchOnSmi(object, &not_js_object);
2912 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 2926 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
2913 2927
2914 // Look up the function and the map in the instanceof cache. 2928 // If there is a call site cache don't look in the global cache, but do the
2915 Label miss; 2929 // real lookup and update the call site cache.
2916 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); 2930 if (!HasCallSiteInlineCheck()) {
2917 __ cmp(function, ip); 2931 Label miss;
2918 __ b(ne, &miss); 2932 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
2919 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); 2933 __ cmp(function, ip);
2920 __ cmp(map, ip); 2934 __ b(ne, &miss);
2921 __ b(ne, &miss); 2935 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
2922 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 2936 __ cmp(map, ip);
2923 __ Ret(HasArgsInRegisters() ? 0 : 2); 2937 __ b(ne, &miss);
2938 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
2939 __ Ret(HasArgsInRegisters() ? 0 : 2);
2924 2940
2925 __ bind(&miss); 2941 __ bind(&miss);
2942 }
2943
2944 // Get the prototype of the function.
2926 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 2945 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
2927 2946
2928 // Check that the function prototype is a JS object. 2947 // Check that the function prototype is a JS object.
2929 __ BranchOnSmi(prototype, &slow); 2948 __ BranchOnSmi(prototype, &slow);
2930 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 2949 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2931 2950
2932 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 2951 // Update the global instanceof or call site inlined cache with the current
2933 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); 2952 // map and function. The cached answer will be set when it is known below.
2953 if (!HasCallSiteInlineCheck()) {
2954 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2955 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
2956 } else {
2957 ASSERT(HasArgsInRegisters());
2958 // Patch the (relocated) inlined map check.
2959
2960 // The offset was stored in r4 safepoint slot.
2961 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal)
2962 __ ldr(scratch, MacroAssembler::SafepointRegisterSlot(r4));
2963 __ sub(inline_site, lr, scratch);
2964 // Get the map location in scratch and patch it.
2965 __ GetRelocatedValueLocation(inline_site, scratch);
2966 __ str(map, MemOperand(scratch));
2967 }
2934 2968
2935 // Register mapping: r3 is object map and r4 is function prototype. 2969 // Register mapping: r3 is object map and r4 is function prototype.
2936 // Get prototype of object into r2. 2970 // Get prototype of object into r2.
2937 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); 2971 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
2938 2972
2973 // We don't need map any more. Use it as a scratch register.
2974 Register scratch2 = map;
2975 map = no_reg;
2976
2939 // Loop through the prototype chain looking for the function prototype. 2977 // Loop through the prototype chain looking for the function prototype.
2978 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
2940 __ bind(&loop); 2979 __ bind(&loop);
2941 __ cmp(scratch, Operand(prototype)); 2980 __ cmp(scratch, Operand(prototype));
2942 __ b(eq, &is_instance); 2981 __ b(eq, &is_instance);
2943 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2982 __ cmp(scratch, scratch2);
2944 __ cmp(scratch, ip);
2945 __ b(eq, &is_not_instance); 2983 __ b(eq, &is_not_instance);
2946 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); 2984 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2947 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); 2985 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
2948 __ jmp(&loop); 2986 __ jmp(&loop);
2949 2987
2950 __ bind(&is_instance); 2988 __ bind(&is_instance);
2951 __ mov(r0, Operand(Smi::FromInt(0))); 2989 if (!HasCallSiteInlineCheck()) {
2952 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 2990 __ mov(r0, Operand(Smi::FromInt(0)));
2991 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
2992 } else {
2993 // Patch the call site to return true.
2994 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
2995 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
2996 // Get the boolean result location in scratch and patch it.
2997 __ GetRelocatedValueLocation(inline_site, scratch);
2998 __ str(r0, MemOperand(scratch));
2999
3000 if (!ReturnTrueFalseObject()) {
3001 __ mov(r0, Operand(Smi::FromInt(0)));
3002 }
3003 }
2953 __ Ret(HasArgsInRegisters() ? 0 : 2); 3004 __ Ret(HasArgsInRegisters() ? 0 : 2);
2954 3005
2955 __ bind(&is_not_instance); 3006 __ bind(&is_not_instance);
2956 __ mov(r0, Operand(Smi::FromInt(1))); 3007 if (!HasCallSiteInlineCheck()) {
2957 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 3008 __ mov(r0, Operand(Smi::FromInt(1)));
3009 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
3010 } else {
3011 // Patch the call site to return false.
3012 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3013 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
3014 // Get the boolean result location in scratch and patch it.
3015 __ GetRelocatedValueLocation(inline_site, scratch);
3016 __ str(r0, MemOperand(scratch));
3017
3018 if (!ReturnTrueFalseObject()) {
3019 __ mov(r0, Operand(Smi::FromInt(1)));
3020 }
3021 }
2958 __ Ret(HasArgsInRegisters() ? 0 : 2); 3022 __ Ret(HasArgsInRegisters() ? 0 : 2);
2959 3023
2960 Label object_not_null, object_not_null_or_smi; 3024 Label object_not_null, object_not_null_or_smi;
2961 __ bind(&not_js_object); 3025 __ bind(&not_js_object);
2962 // Before null, smi and string value checks, check that the rhs is a function 3026 // Before null, smi and string value checks, check that the rhs is a function
2963 // as for a non-function rhs an exception needs to be thrown. 3027 // as for a non-function rhs an exception needs to be thrown.
2964 __ BranchOnSmi(function, &slow); 3028 __ BranchOnSmi(function, &slow);
2965 __ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE); 3029 __ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE);
2966 __ b(ne, &slow); 3030 __ b(ne, &slow);
2967 3031
2968 // Null is not instance of anything. 3032 // Null is not instance of anything.
2969 __ cmp(scratch, Operand(Factory::null_value())); 3033 __ cmp(scratch, Operand(Factory::null_value()));
2970 __ b(ne, &object_not_null); 3034 __ b(ne, &object_not_null);
2971 __ mov(r0, Operand(Smi::FromInt(1))); 3035 __ mov(r0, Operand(Smi::FromInt(1)));
2972 __ Ret(HasArgsInRegisters() ? 0 : 2); 3036 __ Ret(HasArgsInRegisters() ? 0 : 2);
2973 3037
2974 __ bind(&object_not_null); 3038 __ bind(&object_not_null);
2975 // Smi values are not instances of anything. 3039 // Smi values are not instances of anything.
2976 __ BranchOnNotSmi(object, &object_not_null_or_smi); 3040 __ BranchOnNotSmi(object, &object_not_null_or_smi);
2977 __ mov(r0, Operand(Smi::FromInt(1))); 3041 __ mov(r0, Operand(Smi::FromInt(1)));
2978 __ Ret(HasArgsInRegisters() ? 0 : 2); 3042 __ Ret(HasArgsInRegisters() ? 0 : 2);
2979 3043
2980 __ bind(&object_not_null_or_smi); 3044 __ bind(&object_not_null_or_smi);
2981 // String values are not instances of anything. 3045 // String values are not instances of anything.
2982 __ IsObjectJSStringType(object, scratch, &slow); 3046 __ IsObjectJSStringType(object, scratch, &slow);
2983 __ mov(r0, Operand(Smi::FromInt(1))); 3047 __ mov(r0, Operand(Smi::FromInt(1)));
2984 __ Ret(HasArgsInRegisters() ? 0 : 2); 3048 __ Ret(HasArgsInRegisters() ? 0 : 2);
2985 3049
2986 // Slow-case. Tail call builtin. 3050 // Slow-case. Tail call builtin.
2987 __ bind(&slow); 3051 __ bind(&slow);
2988 if (HasArgsInRegisters()) { 3052 if (!ReturnTrueFalseObject()) {
3053 if (HasArgsInRegisters()) {
3054 __ Push(r0, r1);
3055 }
3056 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
3057 } else {
3058 __ EnterInternalFrame();
2989 __ Push(r0, r1); 3059 __ Push(r0, r1);
3060 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_JS);
3061 __ LeaveInternalFrame();
3062 __ cmp(r0, Operand(0));
3063 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
3064 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
3065 __ Ret(HasArgsInRegisters() ? 0 : 2);
2990 } 3066 }
2991 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
2992 } 3067 }
2993 3068
2994 3069
3070 Register InstanceofStub::left() { return r0; }
3071
3072
3073 Register InstanceofStub::right() { return r1; }
3074
3075
2995 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 3076 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2996 // The displacement is the offset of the last parameter (if any) 3077 // The displacement is the offset of the last parameter (if any)
2997 // relative to the frame pointer. 3078 // relative to the frame pointer.
2998 static const int kDisplacement = 3079 static const int kDisplacement =
2999 StandardFrameConstants::kCallerSPOffset - kPointerSize; 3080 StandardFrameConstants::kCallerSPOffset - kPointerSize;
3000 3081
3001 // Check that the key is a smi. 3082 // Check that the key is a smi.
3002 Label slow; 3083 Label slow;
3003 __ BranchOnNotSmi(r1, &slow); 3084 __ BranchOnNotSmi(r1, &slow);
3004 3085
(...skipping 691 matching lines...) Expand 10 before | Expand all | Expand 10 after
3696 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28) 3777 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
3697 | RegisterField::encode(lhs_.is(r0)) 3778 | RegisterField::encode(lhs_.is(r0))
3698 | StrictField::encode(strict_) 3779 | StrictField::encode(strict_)
3699 | NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false) 3780 | NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false)
3700 | IncludeNumberCompareField::encode(include_number_compare_) 3781 | IncludeNumberCompareField::encode(include_number_compare_)
3701 | IncludeSmiCompareField::encode(include_smi_compare_); 3782 | IncludeSmiCompareField::encode(include_smi_compare_);
3702 } 3783 }
3703 3784
3704 3785
3705 // StringCharCodeAtGenerator 3786 // StringCharCodeAtGenerator
3706
3707 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3787 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3708 Label flat_string; 3788 Label flat_string;
3709 Label ascii_string; 3789 Label ascii_string;
3710 Label got_char_code; 3790 Label got_char_code;
3711 3791
3712 // If the receiver is a smi trigger the non-string case. 3792 // If the receiver is a smi trigger the non-string case.
3713 __ BranchOnSmi(object_, receiver_not_string_); 3793 __ BranchOnSmi(object_, receiver_not_string_);
3714 3794
3715 // Fetch the instance type of the receiver into result register. 3795 // Fetch the instance type of the receiver into result register.
3716 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 3796 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
(...skipping 1138 matching lines...) Expand 10 before | Expand all | Expand 10 after
4855 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3); 4935 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
4856 __ add(sp, sp, Operand(2 * kPointerSize)); 4936 __ add(sp, sp, Operand(2 * kPointerSize));
4857 __ Ret(); 4937 __ Ret();
4858 4938
4859 // Just jump to runtime to add the two strings. 4939 // Just jump to runtime to add the two strings.
4860 __ bind(&string_add_runtime); 4940 __ bind(&string_add_runtime);
4861 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 4941 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4862 } 4942 }
4863 4943
4864 4944
4945 void StringCharAtStub::Generate(MacroAssembler* masm) {
4946 // Expects two arguments (object, index) on the stack:
4947 // lr: return address
4948 // sp[0]: index
4949 // sp[4]: object
4950 Register object = r1;
4951 Register index = r0;
4952 Register scratch1 = r2;
4953 Register scratch2 = r3;
4954 Register result = r0;
4955
4956 // Get object and index from the stack.
4957 __ pop(index);
4958 __ pop(object);
4959
4960 Label need_conversion;
4961 Label index_out_of_range;
4962 Label done;
4963 StringCharAtGenerator generator(object,
4964 index,
4965 scratch1,
4966 scratch2,
4967 result,
4968 &need_conversion,
4969 &need_conversion,
4970 &index_out_of_range,
4971 STRING_INDEX_IS_NUMBER);
4972 generator.GenerateFast(masm);
4973 __ b(&done);
4974
4975 __ bind(&index_out_of_range);
4976 // When the index is out of range, the spec requires us to return
4977 // the empty string.
4978 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
4979 __ jmp(&done);
4980
4981 __ bind(&need_conversion);
4982 // Move smi zero into the result register, which will trigger
4983 // conversion.
4984 __ mov(result, Operand(Smi::FromInt(0)));
4985 __ b(&done);
4986
4987 StubRuntimeCallHelper call_helper;
4988 generator.GenerateSlow(masm, call_helper);
4989
4990 __ bind(&done);
4991 __ Ret();
4992 }
4993
4994
4865 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 4995 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4866 ASSERT(state_ == CompareIC::SMIS); 4996 ASSERT(state_ == CompareIC::SMIS);
4867 Label miss; 4997 Label miss;
4868 __ orr(r2, r1, r0); 4998 __ orr(r2, r1, r0);
4869 __ tst(r2, Operand(kSmiTagMask)); 4999 __ tst(r2, Operand(kSmiTagMask));
4870 __ b(ne, &miss); 5000 __ b(ne, &miss);
4871 5001
4872 if (GetCondition() == eq) { 5002 if (GetCondition() == eq) {
4873 // For equality we do not care about the sign of the result. 5003 // For equality we do not care about the sign of the result.
4874 __ sub(r0, r0, r1, SetCC); 5004 __ sub(r0, r0, r1, SetCC);
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
4976 __ pop(r1); 5106 __ pop(r1);
4977 __ Jump(r2); 5107 __ Jump(r2);
4978 } 5108 }
4979 5109
4980 5110
4981 #undef __ 5111 #undef __
4982 5112
4983 } } // namespace v8::internal 5113 } } // namespace v8::internal
4984 5114
4985 #endif // V8_TARGET_ARCH_ARM 5115 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.cc ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698