OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2049 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2060 | 2060 |
2061 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 2061 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2062 // r1 - function | 2062 // r1 - function |
2063 // r3 - slot id | 2063 // r3 - slot id |
2064 // r2 - vector | 2064 // r2 - vector |
2065 // r4 - allocation site (loaded from vector[slot]) | 2065 // r4 - allocation site (loaded from vector[slot]) |
2066 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); | 2066 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); |
2067 __ cmp(r1, r5); | 2067 __ cmp(r1, r5); |
2068 __ b(ne, miss); | 2068 __ b(ne, miss); |
2069 | 2069 |
| 2070 __ mov(r0, Operand(arg_count())); |
| 2071 |
2070 // Increment the call count for monomorphic function calls. | 2072 // Increment the call count for monomorphic function calls. |
2071 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2073 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2072 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); | 2074 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); |
2073 __ ldr(r3, FieldMemOperand(r2, 0)); | 2075 __ ldr(r3, FieldMemOperand(r2, 0)); |
2074 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2076 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2075 __ str(r3, FieldMemOperand(r2, 0)); | 2077 __ str(r3, FieldMemOperand(r2, 0)); |
2076 | 2078 |
2077 __ mov(r2, r4); | 2079 __ mov(r2, r4); |
2078 __ mov(r3, r1); | 2080 __ mov(r3, r1); |
2079 if (argc_in_register()) { | 2081 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
2080 // Pass a default ArgumentCountKey::Any since the argc is only available | 2082 __ TailCallStub(&stub); |
2081 // in r0. We do not have the actual count here. | |
2082 ArrayConstructorStub stub(masm->isolate()); | |
2083 __ TailCallStub(&stub); | |
2084 } else { | |
2085 // arg_count() is expected in r0 if the arg_count() >= 2 | |
2086 // (ArgumentCountKey::MORE_THAN_ONE). | |
2087 ArrayConstructorStub stub(masm->isolate(), arg_count()); | |
2088 __ TailCallStub(&stub); | |
2089 } | |
2090 } | 2083 } |
2091 | 2084 |
2092 | 2085 |
2093 void CallICStub::Generate(MacroAssembler* masm) { | 2086 void CallICStub::Generate(MacroAssembler* masm) { |
2094 // r0 - number of arguments if argc_in_register() is true | |
2095 // r1 - function | 2087 // r1 - function |
2096 // r3 - slot id (Smi) | 2088 // r3 - slot id (Smi) |
2097 // r2 - vector | 2089 // r2 - vector |
2098 Label extra_checks_or_miss, call, call_function; | 2090 Label extra_checks_or_miss, call, call_function; |
2099 if (!argc_in_register()) { | 2091 int argc = arg_count(); |
2100 int argc = arg_count(); | 2092 ParameterCount actual(argc); |
2101 __ mov(r0, Operand(argc)); | |
2102 } | |
2103 | 2093 |
2104 // The checks. First, does r1 match the recorded monomorphic target? | 2094 // The checks. First, does r1 match the recorded monomorphic target? |
2105 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2095 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2106 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2096 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2107 | 2097 |
2108 // We don't know that we have a weak cell. We might have a private symbol | 2098 // We don't know that we have a weak cell. We might have a private symbol |
2109 // or an AllocationSite, but the memory is safe to examine. | 2099 // or an AllocationSite, but the memory is safe to examine. |
2110 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 2100 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
2111 // FixedArray. | 2101 // FixedArray. |
2112 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | 2102 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
(...skipping 15 matching lines...) Expand all Loading... |
2128 __ JumpIfSmi(r1, &extra_checks_or_miss); | 2118 __ JumpIfSmi(r1, &extra_checks_or_miss); |
2129 | 2119 |
2130 // Increment the call count for monomorphic function calls. | 2120 // Increment the call count for monomorphic function calls. |
2131 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2121 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2132 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); | 2122 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); |
2133 __ ldr(r3, FieldMemOperand(r2, 0)); | 2123 __ ldr(r3, FieldMemOperand(r2, 0)); |
2134 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2124 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2135 __ str(r3, FieldMemOperand(r2, 0)); | 2125 __ str(r3, FieldMemOperand(r2, 0)); |
2136 | 2126 |
2137 __ bind(&call_function); | 2127 __ bind(&call_function); |
| 2128 __ mov(r0, Operand(argc)); |
2138 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | 2129 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), |
2139 tail_call_mode()), | 2130 tail_call_mode()), |
2140 RelocInfo::CODE_TARGET); | 2131 RelocInfo::CODE_TARGET); |
2141 | 2132 |
2142 __ bind(&extra_checks_or_miss); | 2133 __ bind(&extra_checks_or_miss); |
2143 Label uninitialized, miss, not_allocation_site; | 2134 Label uninitialized, miss, not_allocation_site; |
2144 | 2135 |
2145 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); | 2136 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); |
2146 __ b(eq, &call); | 2137 __ b(eq, &call); |
2147 | 2138 |
(...skipping 19 matching lines...) Expand all Loading... |
2167 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 2158 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
2168 // to handle it here. More complex cases are dealt with in the runtime. | 2159 // to handle it here. More complex cases are dealt with in the runtime. |
2169 __ AssertNotSmi(r4); | 2160 __ AssertNotSmi(r4); |
2170 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); | 2161 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
2171 __ b(ne, &miss); | 2162 __ b(ne, &miss); |
2172 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2163 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2173 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 2164 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
2174 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2165 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2175 | 2166 |
2176 __ bind(&call); | 2167 __ bind(&call); |
| 2168 __ mov(r0, Operand(argc)); |
2177 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | 2169 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), |
2178 RelocInfo::CODE_TARGET); | 2170 RelocInfo::CODE_TARGET); |
2179 | 2171 |
2180 __ bind(&uninitialized); | 2172 __ bind(&uninitialized); |
2181 | 2173 |
2182 // We are going monomorphic, provided we actually have a JSFunction. | 2174 // We are going monomorphic, provided we actually have a JSFunction. |
2183 __ JumpIfSmi(r1, &miss); | 2175 __ JumpIfSmi(r1, &miss); |
2184 | 2176 |
2185 // Goto miss case if we do not have a function. | 2177 // Goto miss case if we do not have a function. |
2186 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 2178 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
(...skipping 17 matching lines...) Expand all Loading... |
2204 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2196 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2205 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize)); | 2197 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize)); |
2206 | 2198 |
2207 // Store the function. Use a stub since we need a frame for allocation. | 2199 // Store the function. Use a stub since we need a frame for allocation. |
2208 // r2 - vector | 2200 // r2 - vector |
2209 // r3 - slot | 2201 // r3 - slot |
2210 // r1 - function | 2202 // r1 - function |
2211 { | 2203 { |
2212 FrameScope scope(masm, StackFrame::INTERNAL); | 2204 FrameScope scope(masm, StackFrame::INTERNAL); |
2213 CreateWeakCellStub create_stub(masm->isolate()); | 2205 CreateWeakCellStub create_stub(masm->isolate()); |
2214 __ SmiTag(r0); | |
2215 __ Push(r0); | |
2216 __ Push(r1); | 2206 __ Push(r1); |
2217 __ CallStub(&create_stub); | 2207 __ CallStub(&create_stub); |
2218 __ Pop(r1); | 2208 __ Pop(r1); |
2219 __ Pop(r0); | |
2220 __ SmiUntag(r0); | |
2221 } | 2209 } |
2222 | 2210 |
2223 __ jmp(&call_function); | 2211 __ jmp(&call_function); |
2224 | 2212 |
2225 // We are here because tracing is on or we encountered a MISS case we can't | 2213 // We are here because tracing is on or we encountered a MISS case we can't |
2226 // handle here. | 2214 // handle here. |
2227 __ bind(&miss); | 2215 __ bind(&miss); |
2228 GenerateMiss(masm); | 2216 GenerateMiss(masm); |
2229 | 2217 |
2230 __ jmp(&call); | 2218 __ jmp(&call); |
2231 } | 2219 } |
2232 | 2220 |
2233 | 2221 |
2234 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2222 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2235 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2223 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
2236 | 2224 |
2237 // Store the number of arguments that is required later. | |
2238 __ SmiTag(r0); | |
2239 __ Push(r0); | |
2240 | |
2241 // Push the receiver and the function and feedback info. | 2225 // Push the receiver and the function and feedback info. |
2242 __ Push(r1, r2, r3); | 2226 __ Push(r1, r2, r3); |
2243 | 2227 |
2244 // Call the entry. | 2228 // Call the entry. |
2245 __ CallRuntime(Runtime::kCallIC_Miss); | 2229 __ CallRuntime(Runtime::kCallIC_Miss); |
2246 | 2230 |
2247 // Move result to edi and exit the internal frame. | 2231 // Move result to edi and exit the internal frame. |
2248 __ mov(r1, r0); | 2232 __ mov(r1, r0); |
2249 | |
2250 // Restore back the number of arguments to r0. | |
2251 __ Pop(r0); | |
2252 __ SmiUntag(r0); | |
2253 } | 2233 } |
2254 | 2234 |
2255 | 2235 |
2256 // StringCharCodeAtGenerator | 2236 // StringCharCodeAtGenerator |
2257 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 2237 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
2258 // If the receiver is a smi trigger the non-string case. | 2238 // If the receiver is a smi trigger the non-string case. |
2259 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 2239 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
2260 __ JumpIfSmi(object_, receiver_not_string_); | 2240 __ JumpIfSmi(object_, receiver_not_string_); |
2261 | 2241 |
2262 // Fetch the instance type of the receiver into result register. | 2242 // Fetch the instance type of the receiver into result register. |
(...skipping 3369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5632 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5612 kStackUnwindSpace, NULL, return_value_operand, NULL); |
5633 } | 5613 } |
5634 | 5614 |
5635 | 5615 |
5636 #undef __ | 5616 #undef __ |
5637 | 5617 |
5638 } // namespace internal | 5618 } // namespace internal |
5639 } // namespace v8 | 5619 } // namespace v8 |
5640 | 5620 |
5641 #endif // V8_TARGET_ARCH_ARM | 5621 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |