OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2049 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2060 | 2060 |
2061 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 2061 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2062 // r1 - function | 2062 // r1 - function |
2063 // r3 - slot id | 2063 // r3 - slot id |
2064 // r2 - vector | 2064 // r2 - vector |
2065 // r4 - allocation site (loaded from vector[slot]) | 2065 // r4 - allocation site (loaded from vector[slot]) |
2066 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); | 2066 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); |
2067 __ cmp(r1, r5); | 2067 __ cmp(r1, r5); |
2068 __ b(ne, miss); | 2068 __ b(ne, miss); |
2069 | 2069 |
2070 __ mov(r0, Operand(arg_count())); | |
2071 | |
2072 // Increment the call count for monomorphic function calls. | 2070 // Increment the call count for monomorphic function calls. |
2073 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2071 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2074 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); | 2072 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); |
2075 __ ldr(r3, FieldMemOperand(r2, 0)); | 2073 __ ldr(r3, FieldMemOperand(r2, 0)); |
2076 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2074 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2077 __ str(r3, FieldMemOperand(r2, 0)); | 2075 __ str(r3, FieldMemOperand(r2, 0)); |
2078 | 2076 |
2079 __ mov(r2, r4); | 2077 __ mov(r2, r4); |
2080 __ mov(r3, r1); | 2078 __ mov(r3, r1); |
2081 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2079 if (argc_in_register()) { |
2082 __ TailCallStub(&stub); | 2080 // Pass a default ArgumentCountKey::Any since the argc is only available |
| 2081 // in r0. We do not have the actual count here. |
| 2082 ArrayConstructorStub stub(masm->isolate()); |
| 2083 __ TailCallStub(&stub); |
| 2084 } else { |
| 2085 // arg_count() is expected in r0 if the arg_count() >= 2 |
| 2086 // (ArgumentCountKey::MORE_THAN_ONE). |
| 2087 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
| 2088 __ TailCallStub(&stub); |
| 2089 } |
2083 } | 2090 } |
2084 | 2091 |
2085 | 2092 |
2086 void CallICStub::Generate(MacroAssembler* masm) { | 2093 void CallICStub::Generate(MacroAssembler* masm) { |
| 2094 // r0 - number of arguments if argc_in_register() is true |
2087 // r1 - function | 2095 // r1 - function |
2088 // r3 - slot id (Smi) | 2096 // r3 - slot id (Smi) |
2089 // r2 - vector | 2097 // r2 - vector |
2090 Label extra_checks_or_miss, call, call_function; | 2098 Label extra_checks_or_miss, call, call_function; |
2091 int argc = arg_count(); | 2099 if (!argc_in_register()) { |
2092 ParameterCount actual(argc); | 2100 int argc = arg_count(); |
| 2101 __ mov(r0, Operand(argc)); |
| 2102 } |
2093 | 2103 |
2094 // The checks. First, does r1 match the recorded monomorphic target? | 2104 // The checks. First, does r1 match the recorded monomorphic target? |
2095 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2105 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2096 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2106 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2097 | 2107 |
2098 // We don't know that we have a weak cell. We might have a private symbol | 2108 // We don't know that we have a weak cell. We might have a private symbol |
2099 // or an AllocationSite, but the memory is safe to examine. | 2109 // or an AllocationSite, but the memory is safe to examine. |
2100 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to | 2110 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
2101 // FixedArray. | 2111 // FixedArray. |
2102 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) | 2112 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
(...skipping 15 matching lines...) Expand all Loading... |
2118 __ JumpIfSmi(r1, &extra_checks_or_miss); | 2128 __ JumpIfSmi(r1, &extra_checks_or_miss); |
2119 | 2129 |
2120 // Increment the call count for monomorphic function calls. | 2130 // Increment the call count for monomorphic function calls. |
2121 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2131 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2122 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); | 2132 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); |
2123 __ ldr(r3, FieldMemOperand(r2, 0)); | 2133 __ ldr(r3, FieldMemOperand(r2, 0)); |
2124 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2134 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2125 __ str(r3, FieldMemOperand(r2, 0)); | 2135 __ str(r3, FieldMemOperand(r2, 0)); |
2126 | 2136 |
2127 __ bind(&call_function); | 2137 __ bind(&call_function); |
2128 __ mov(r0, Operand(argc)); | |
2129 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), | 2138 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), |
2130 tail_call_mode()), | 2139 tail_call_mode()), |
2131 RelocInfo::CODE_TARGET); | 2140 RelocInfo::CODE_TARGET); |
2132 | 2141 |
2133 __ bind(&extra_checks_or_miss); | 2142 __ bind(&extra_checks_or_miss); |
2134 Label uninitialized, miss, not_allocation_site; | 2143 Label uninitialized, miss, not_allocation_site; |
2135 | 2144 |
2136 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); | 2145 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); |
2137 __ b(eq, &call); | 2146 __ b(eq, &call); |
2138 | 2147 |
(...skipping 19 matching lines...) Expand all Loading... |
2158 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 2167 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
2159 // to handle it here. More complex cases are dealt with in the runtime. | 2168 // to handle it here. More complex cases are dealt with in the runtime. |
2160 __ AssertNotSmi(r4); | 2169 __ AssertNotSmi(r4); |
2161 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); | 2170 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
2162 __ b(ne, &miss); | 2171 __ b(ne, &miss); |
2163 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2172 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2164 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 2173 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
2165 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2174 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2166 | 2175 |
2167 __ bind(&call); | 2176 __ bind(&call); |
2168 __ mov(r0, Operand(argc)); | |
2169 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), | 2177 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), |
2170 RelocInfo::CODE_TARGET); | 2178 RelocInfo::CODE_TARGET); |
2171 | 2179 |
2172 __ bind(&uninitialized); | 2180 __ bind(&uninitialized); |
2173 | 2181 |
2174 // We are going monomorphic, provided we actually have a JSFunction. | 2182 // We are going monomorphic, provided we actually have a JSFunction. |
2175 __ JumpIfSmi(r1, &miss); | 2183 __ JumpIfSmi(r1, &miss); |
2176 | 2184 |
2177 // Goto miss case if we do not have a function. | 2185 // Goto miss case if we do not have a function. |
2178 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 2186 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
(...skipping 17 matching lines...) Expand all Loading... |
2196 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2204 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2197 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize)); | 2205 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize)); |
2198 | 2206 |
2199 // Store the function. Use a stub since we need a frame for allocation. | 2207 // Store the function. Use a stub since we need a frame for allocation. |
2200 // r2 - vector | 2208 // r2 - vector |
2201 // r3 - slot | 2209 // r3 - slot |
2202 // r1 - function | 2210 // r1 - function |
2203 { | 2211 { |
2204 FrameScope scope(masm, StackFrame::INTERNAL); | 2212 FrameScope scope(masm, StackFrame::INTERNAL); |
2205 CreateWeakCellStub create_stub(masm->isolate()); | 2213 CreateWeakCellStub create_stub(masm->isolate()); |
| 2214 __ SmiTag(r0); |
| 2215 __ Push(r0); |
2206 __ Push(r1); | 2216 __ Push(r1); |
2207 __ CallStub(&create_stub); | 2217 __ CallStub(&create_stub); |
2208 __ Pop(r1); | 2218 __ Pop(r1); |
| 2219 __ Pop(r0); |
| 2220 __ SmiUntag(r0); |
2209 } | 2221 } |
2210 | 2222 |
2211 __ jmp(&call_function); | 2223 __ jmp(&call_function); |
2212 | 2224 |
2213 // We are here because tracing is on or we encountered a MISS case we can't | 2225 // We are here because tracing is on or we encountered a MISS case we can't |
2214 // handle here. | 2226 // handle here. |
2215 __ bind(&miss); | 2227 __ bind(&miss); |
2216 GenerateMiss(masm); | 2228 GenerateMiss(masm); |
2217 | 2229 |
2218 __ jmp(&call); | 2230 __ jmp(&call); |
2219 } | 2231 } |
2220 | 2232 |
2221 | 2233 |
2222 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2234 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2223 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2235 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
2224 | 2236 |
| 2237 // Store the number of arguments that is required later. |
| 2238 __ SmiTag(r0); |
| 2239 __ Push(r0); |
| 2240 |
2225 // Push the receiver and the function and feedback info. | 2241 // Push the receiver and the function and feedback info. |
2226 __ Push(r1, r2, r3); | 2242 __ Push(r1, r2, r3); |
2227 | 2243 |
2228 // Call the entry. | 2244 // Call the entry. |
2229 __ CallRuntime(Runtime::kCallIC_Miss); | 2245 __ CallRuntime(Runtime::kCallIC_Miss); |
2230 | 2246 |
2231 // Move result to edi and exit the internal frame. | 2247 // Move result to edi and exit the internal frame. |
2232 __ mov(r1, r0); | 2248 __ mov(r1, r0); |
| 2249 |
| 2250 // Restore back the number of arguments to r0. |
| 2251 __ Pop(r0); |
| 2252 __ SmiUntag(r0); |
2233 } | 2253 } |
2234 | 2254 |
2235 | 2255 |
2236 // StringCharCodeAtGenerator | 2256 // StringCharCodeAtGenerator |
2237 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 2257 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
2238 // If the receiver is a smi trigger the non-string case. | 2258 // If the receiver is a smi trigger the non-string case. |
2239 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 2259 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
2240 __ JumpIfSmi(object_, receiver_not_string_); | 2260 __ JumpIfSmi(object_, receiver_not_string_); |
2241 | 2261 |
2242 // Fetch the instance type of the receiver into result register. | 2262 // Fetch the instance type of the receiver into result register. |
(...skipping 3369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5612 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5632 kStackUnwindSpace, NULL, return_value_operand, NULL); |
5613 } | 5633 } |
5614 | 5634 |
5615 | 5635 |
5616 #undef __ | 5636 #undef __ |
5617 | 5637 |
5618 } // namespace internal | 5638 } // namespace internal |
5619 } // namespace v8 | 5639 } // namespace v8 |
5620 | 5640 |
5621 #endif // V8_TARGET_ARCH_ARM | 5641 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |