Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(104)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 196133017: Experimental parser: merge r19949 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/debug-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
158 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( 158 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
159 Isolate* isolate, 159 Isolate* isolate,
160 CodeStubInterfaceDescriptor* descriptor) { 160 CodeStubInterfaceDescriptor* descriptor) {
161 static Register registers[] = { r1 }; 161 static Register registers[] = { r1 };
162 descriptor->register_param_count_ = 1; 162 descriptor->register_param_count_ = 1;
163 descriptor->register_params_ = registers; 163 descriptor->register_params_ = registers;
164 descriptor->deoptimization_handler_ = NULL; 164 descriptor->deoptimization_handler_ = NULL;
165 } 165 }
166 166
167 167
168 void StringLengthStub::InitializeInterfaceDescriptor(
169 Isolate* isolate,
170 CodeStubInterfaceDescriptor* descriptor) {
171 static Register registers[] = { r0, r2 };
172 descriptor->register_param_count_ = 2;
173 descriptor->register_params_ = registers;
174 descriptor->deoptimization_handler_ = NULL;
175 }
176
177
178 void KeyedStringLengthStub::InitializeInterfaceDescriptor(
179 Isolate* isolate,
180 CodeStubInterfaceDescriptor* descriptor) {
181 static Register registers[] = { r1, r0 };
182 descriptor->register_param_count_ = 2;
183 descriptor->register_params_ = registers;
184 descriptor->deoptimization_handler_ = NULL;
185 }
186
187
168 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( 188 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
169 Isolate* isolate, 189 Isolate* isolate,
170 CodeStubInterfaceDescriptor* descriptor) { 190 CodeStubInterfaceDescriptor* descriptor) {
171 static Register registers[] = { r2, r1, r0 }; 191 static Register registers[] = { r2, r1, r0 };
172 descriptor->register_param_count_ = 3; 192 descriptor->register_param_count_ = 3;
173 descriptor->register_params_ = registers; 193 descriptor->register_params_ = registers;
174 descriptor->deoptimization_handler_ = 194 descriptor->deoptimization_handler_ =
175 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); 195 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
176 } 196 }
177 197
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
483 503
484 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 504 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
485 // Update the static counter each time a new code stub is generated. 505 // Update the static counter each time a new code stub is generated.
486 Isolate* isolate = masm->isolate(); 506 Isolate* isolate = masm->isolate();
487 isolate->counters()->code_stubs()->Increment(); 507 isolate->counters()->code_stubs()->Increment();
488 508
489 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 509 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
490 int param_count = descriptor->register_param_count_; 510 int param_count = descriptor->register_param_count_;
491 { 511 {
492 // Call the runtime system in a fresh internal frame. 512 // Call the runtime system in a fresh internal frame.
493 FrameScope scope(masm, StackFrame::INTERNAL); 513 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
494 ASSERT(descriptor->register_param_count_ == 0 || 514 ASSERT(descriptor->register_param_count_ == 0 ||
495 r0.is(descriptor->register_params_[param_count - 1])); 515 r0.is(descriptor->register_params_[param_count - 1]));
496 // Push arguments 516 // Push arguments
497 for (int i = 0; i < param_count; ++i) { 517 for (int i = 0; i < param_count; ++i) {
498 __ push(descriptor->register_params_[i]); 518 __ push(descriptor->register_params_[i]);
499 } 519 }
500 ExternalReference miss = descriptor->miss_handler(); 520 ExternalReference miss = descriptor->miss_handler();
501 __ CallExternalReference(miss, descriptor->register_param_count_); 521 __ CallExternalReference(miss, descriptor->register_param_count_);
502 } 522 }
503 523
(...skipping 1095 matching lines...) Expand 10 before | Expand all | Expand 10 after
1599 1619
1600 // Retrieve the pending exception. 1620 // Retrieve the pending exception.
1601 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 1621 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1602 isolate))); 1622 isolate)));
1603 __ ldr(r0, MemOperand(ip)); 1623 __ ldr(r0, MemOperand(ip));
1604 1624
1605 // See if we just retrieved an OOM exception. 1625 // See if we just retrieved an OOM exception.
1606 JumpIfOOM(masm, r0, ip, throw_out_of_memory_exception); 1626 JumpIfOOM(masm, r0, ip, throw_out_of_memory_exception);
1607 1627
1608 // Clear the pending exception. 1628 // Clear the pending exception.
1609 __ mov(r3, Operand(isolate->factory()->the_hole_value())); 1629 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
1610 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 1630 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1611 isolate))); 1631 isolate)));
1612 __ str(r3, MemOperand(ip)); 1632 __ str(r3, MemOperand(ip));
1613 1633
1614 // Special handling of termination exceptions which are uncatchable 1634 // Special handling of termination exceptions which are uncatchable
1615 // by javascript code. 1635 // by javascript code.
1616 __ cmp(r0, Operand(isolate->factory()->termination_exception())); 1636 __ LoadRoot(r3, Heap::kTerminationExceptionRootIndex);
1637 __ cmp(r0, r3);
1617 __ b(eq, throw_termination_exception); 1638 __ b(eq, throw_termination_exception);
1618 1639
1619 // Handle normal exception. 1640 // Handle normal exception.
1620 __ jmp(throw_normal_exception); 1641 __ jmp(throw_normal_exception);
1621 1642
1622 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying 1643 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
1623 } 1644 }
1624 1645
1625 1646
1626 void CEntryStub::Generate(MacroAssembler* masm) { 1647 void CEntryStub::Generate(MacroAssembler* masm) {
(...skipping 11 matching lines...) Expand all
1638 // NOTE: Invocations of builtins may return failure objects 1659 // NOTE: Invocations of builtins may return failure objects
1639 // instead of a proper result. The builtin entry handles 1660 // instead of a proper result. The builtin entry handles
1640 // this by performing a garbage collection and retrying the 1661 // this by performing a garbage collection and retrying the
1641 // builtin once. 1662 // builtin once.
1642 1663
1643 // Compute the argv pointer in a callee-saved register. 1664 // Compute the argv pointer in a callee-saved register.
1644 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); 1665 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
1645 __ sub(r6, r6, Operand(kPointerSize)); 1666 __ sub(r6, r6, Operand(kPointerSize));
1646 1667
1647 // Enter the exit frame that transitions from JavaScript to C++. 1668 // Enter the exit frame that transitions from JavaScript to C++.
1648 FrameScope scope(masm, StackFrame::MANUAL); 1669 FrameAndConstantPoolScope scope(masm, StackFrame::MANUAL);
1649 __ EnterExitFrame(save_doubles_); 1670 __ EnterExitFrame(save_doubles_);
1650 1671
1651 // Set up argc and the builtin function in callee-saved registers. 1672 // Set up argc and the builtin function in callee-saved registers.
1652 __ mov(r4, Operand(r0)); 1673 __ mov(r4, Operand(r0));
1653 __ mov(r5, Operand(r1)); 1674 __ mov(r5, Operand(r1));
1654 1675
1655 // r4: number of arguments (C callee-saved) 1676 // r4: number of arguments (C callee-saved)
1656 // r5: pointer to builtin function (C callee-saved) 1677 // r5: pointer to builtin function (C callee-saved)
1657 // r6: pointer to first argument (C callee-saved) 1678 // r6: pointer to first argument (C callee-saved)
1658 1679
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
1891 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); 1912 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
1892 } 1913 }
1893 1914
1894 1915
1895 // Uses registers r0 to r4. 1916 // Uses registers r0 to r4.
1896 // Expected input (depending on whether args are in registers or on the stack): 1917 // Expected input (depending on whether args are in registers or on the stack):
1897 // * object: r0 or at sp + 1 * kPointerSize. 1918 // * object: r0 or at sp + 1 * kPointerSize.
1898 // * function: r1 or at sp. 1919 // * function: r1 or at sp.
1899 // 1920 //
1900 // An inlined call site may have been generated before calling this stub. 1921 // An inlined call site may have been generated before calling this stub.
1901 // In this case the offset to the inline site to patch is passed on the stack, 1922 // In this case the offset to the inline site to patch is passed in r5.
1902 // in the safepoint slot for register r4.
1903 // (See LCodeGen::DoInstanceOfKnownGlobal) 1923 // (See LCodeGen::DoInstanceOfKnownGlobal)
1904 void InstanceofStub::Generate(MacroAssembler* masm) { 1924 void InstanceofStub::Generate(MacroAssembler* masm) {
1905 // Call site inlining and patching implies arguments in registers. 1925 // Call site inlining and patching implies arguments in registers.
1906 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); 1926 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
1907 // ReturnTrueFalse is only implemented for inlined call sites. 1927 // ReturnTrueFalse is only implemented for inlined call sites.
1908 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck()); 1928 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
1909 1929
1910 // Fixed register usage throughout the stub: 1930 // Fixed register usage throughout the stub:
1911 const Register object = r0; // Object (lhs). 1931 const Register object = r0; // Object (lhs).
1912 Register map = r3; // Map of the object. 1932 Register map = r3; // Map of the object.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1951 1971
1952 // Update the global instanceof or call site inlined cache with the current 1972 // Update the global instanceof or call site inlined cache with the current
1953 // map and function. The cached answer will be set when it is known below. 1973 // map and function. The cached answer will be set when it is known below.
1954 if (!HasCallSiteInlineCheck()) { 1974 if (!HasCallSiteInlineCheck()) {
1955 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 1975 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1956 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); 1976 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
1957 } else { 1977 } else {
1958 ASSERT(HasArgsInRegisters()); 1978 ASSERT(HasArgsInRegisters());
1959 // Patch the (relocated) inlined map check. 1979 // Patch the (relocated) inlined map check.
1960 1980
1961 // The offset was stored in r4 safepoint slot. 1981 // The offset was stored in r5
1962 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal) 1982 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
1963 __ LoadFromSafepointRegisterSlot(scratch, r4); 1983 const Register offset = r5;
1964 __ sub(inline_site, lr, scratch); 1984 __ sub(inline_site, lr, offset);
1965 // Get the map location in scratch and patch it. 1985 // Get the map location in r5 and patch it.
1966 __ GetRelocatedValueLocation(inline_site, scratch); 1986 __ GetRelocatedValueLocation(inline_site, offset);
1967 __ ldr(scratch, MemOperand(scratch)); 1987 __ ldr(offset, MemOperand(offset));
1968 __ str(map, FieldMemOperand(scratch, Cell::kValueOffset)); 1988 __ str(map, FieldMemOperand(offset, Cell::kValueOffset));
1969 } 1989 }
1970 1990
1971 // Register mapping: r3 is object map and r4 is function prototype. 1991 // Register mapping: r3 is object map and r4 is function prototype.
1972 // Get prototype of object into r2. 1992 // Get prototype of object into r2.
1973 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); 1993 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
1974 1994
1975 // We don't need map any more. Use it as a scratch register. 1995 // We don't need map any more. Use it as a scratch register.
1976 Register scratch2 = map; 1996 Register scratch2 = map;
1977 map = no_reg; 1997 map = no_reg;
1978 1998
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
2051 2071
2052 // Slow-case. Tail call builtin. 2072 // Slow-case. Tail call builtin.
2053 __ bind(&slow); 2073 __ bind(&slow);
2054 if (!ReturnTrueFalseObject()) { 2074 if (!ReturnTrueFalseObject()) {
2055 if (HasArgsInRegisters()) { 2075 if (HasArgsInRegisters()) {
2056 __ Push(r0, r1); 2076 __ Push(r0, r1);
2057 } 2077 }
2058 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 2078 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2059 } else { 2079 } else {
2060 { 2080 {
2061 FrameScope scope(masm, StackFrame::INTERNAL); 2081 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2062 __ Push(r0, r1); 2082 __ Push(r0, r1);
2063 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 2083 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2064 } 2084 }
2065 __ cmp(r0, Operand::Zero()); 2085 __ cmp(r0, Operand::Zero());
2066 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); 2086 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
2067 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); 2087 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
2068 __ Ret(HasArgsInRegisters() ? 0 : 2); 2088 __ Ret(HasArgsInRegisters() ? 0 : 2);
2069 } 2089 }
2070 } 2090 }
2071 2091
(...skipping 21 matching lines...) Expand all
2093 receiver = r0; 2113 receiver = r0;
2094 } 2114 }
2095 2115
2096 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r3, r4, &miss); 2116 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r3, r4, &miss);
2097 __ bind(&miss); 2117 __ bind(&miss);
2098 StubCompiler::TailCallBuiltin( 2118 StubCompiler::TailCallBuiltin(
2099 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); 2119 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
2100 } 2120 }
2101 2121
2102 2122
2103 void StringLengthStub::Generate(MacroAssembler* masm) {
2104 Label miss;
2105 Register receiver;
2106 if (kind() == Code::KEYED_LOAD_IC) {
2107 // ----------- S t a t e -------------
2108 // -- lr : return address
2109 // -- r0 : key
2110 // -- r1 : receiver
2111 // -----------------------------------
2112 __ cmp(r0, Operand(masm->isolate()->factory()->length_string()));
2113 __ b(ne, &miss);
2114 receiver = r1;
2115 } else {
2116 ASSERT(kind() == Code::LOAD_IC);
2117 // ----------- S t a t e -------------
2118 // -- r2 : name
2119 // -- lr : return address
2120 // -- r0 : receiver
2121 // -- sp[0] : receiver
2122 // -----------------------------------
2123 receiver = r0;
2124 }
2125
2126 StubCompiler::GenerateLoadStringLength(masm, receiver, r3, r4, &miss);
2127
2128 __ bind(&miss);
2129 StubCompiler::TailCallBuiltin(
2130 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
2131 }
2132
2133
2134 void StoreArrayLengthStub::Generate(MacroAssembler* masm) { 2123 void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
2135 // This accepts as a receiver anything JSArray::SetElementsLength accepts 2124 // This accepts as a receiver anything JSArray::SetElementsLength accepts
2136 // (currently anything except for external arrays which means anything with 2125 // (currently anything except for external arrays which means anything with
2137 // elements of FixedArray type). Value must be a number, but only smis are 2126 // elements of FixedArray type). Value must be a number, but only smis are
2138 // accepted as the most common case. 2127 // accepted as the most common case.
2139 Label miss; 2128 Label miss;
2140 2129
2141 Register receiver; 2130 Register receiver;
2142 Register value; 2131 Register value;
2143 if (kind() == Code::KEYED_STORE_IC) { 2132 if (kind() == Code::KEYED_STORE_IC) {
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
2252 __ Jump(lr); 2241 __ Jump(lr);
2253 2242
2254 // Slow-case: Handle non-smi or out-of-bounds access to arguments 2243 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2255 // by calling the runtime system. 2244 // by calling the runtime system.
2256 __ bind(&slow); 2245 __ bind(&slow);
2257 __ push(r1); 2246 __ push(r1);
2258 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 2247 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2259 } 2248 }
2260 2249
2261 2250
2262 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { 2251 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
2263 // sp[0] : number of parameters 2252 // sp[0] : number of parameters
2264 // sp[4] : receiver displacement 2253 // sp[4] : receiver displacement
2265 // sp[8] : function 2254 // sp[8] : function
2266 2255
2267 // Check if the calling frame is an arguments adaptor frame. 2256 // Check if the calling frame is an arguments adaptor frame.
2268 Label runtime; 2257 Label runtime;
2269 __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2258 __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2270 __ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset)); 2259 __ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset));
2271 __ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2260 __ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2272 __ b(ne, &runtime); 2261 __ b(ne, &runtime);
2273 2262
2274 // Patch the arguments.length and the parameters pointer in the current frame. 2263 // Patch the arguments.length and the parameters pointer in the current frame.
2275 __ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2264 __ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
2276 __ str(r2, MemOperand(sp, 0 * kPointerSize)); 2265 __ str(r2, MemOperand(sp, 0 * kPointerSize));
2277 __ add(r3, r3, Operand(r2, LSL, 1)); 2266 __ add(r3, r3, Operand(r2, LSL, 1));
2278 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); 2267 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
2279 __ str(r3, MemOperand(sp, 1 * kPointerSize)); 2268 __ str(r3, MemOperand(sp, 1 * kPointerSize));
2280 2269
2281 __ bind(&runtime); 2270 __ bind(&runtime);
2282 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2271 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2283 } 2272 }
2284 2273
2285 2274
2286 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { 2275 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
2287 // Stack layout: 2276 // Stack layout:
2288 // sp[0] : number of parameters (tagged) 2277 // sp[0] : number of parameters (tagged)
2289 // sp[4] : address of receiver argument 2278 // sp[4] : address of receiver argument
2290 // sp[8] : function 2279 // sp[8] : function
2291 // Registers used over whole function: 2280 // Registers used over whole function:
2292 // r6 : allocated object (tagged) 2281 // r6 : allocated object (tagged)
2293 // r9 : mapped parameter count (tagged) 2282 // r9 : mapped parameter count (tagged)
2294 2283
2295 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); 2284 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
2296 // r1 = parameter count (tagged) 2285 // r1 = parameter count (tagged)
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
2330 __ cmp(r1, Operand(Smi::FromInt(0))); 2319 __ cmp(r1, Operand(Smi::FromInt(0)));
2331 __ mov(r9, Operand::Zero(), LeaveCC, eq); 2320 __ mov(r9, Operand::Zero(), LeaveCC, eq);
2332 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne); 2321 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne);
2333 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne); 2322 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
2334 2323
2335 // 2. Backing store. 2324 // 2. Backing store.
2336 __ add(r9, r9, Operand(r2, LSL, 1)); 2325 __ add(r9, r9, Operand(r2, LSL, 1));
2337 __ add(r9, r9, Operand(FixedArray::kHeaderSize)); 2326 __ add(r9, r9, Operand(FixedArray::kHeaderSize));
2338 2327
2339 // 3. Arguments object. 2328 // 3. Arguments object.
2340 __ add(r9, r9, Operand(Heap::kArgumentsObjectSize)); 2329 __ add(r9, r9, Operand(Heap::kSloppyArgumentsObjectSize));
2341 2330
2342 // Do the allocation of all three objects in one go. 2331 // Do the allocation of all three objects in one go.
2343 __ Allocate(r9, r0, r3, r4, &runtime, TAG_OBJECT); 2332 __ Allocate(r9, r0, r3, r4, &runtime, TAG_OBJECT);
2344 2333
2345 // r0 = address of new object(s) (tagged) 2334 // r0 = address of new object(s) (tagged)
2346 // r2 = argument count (tagged) 2335 // r2 = argument count (tagged)
2347 // Get the arguments boilerplate from the current native context into r4. 2336 // Get the arguments boilerplate from the current native context into r4.
2348 const int kNormalOffset = 2337 const int kNormalOffset =
2349 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); 2338 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_BOILERPLATE_INDEX);
2350 const int kAliasedOffset = 2339 const int kAliasedOffset =
2351 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); 2340 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
2352 2341
2353 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2342 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2354 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); 2343 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2355 __ cmp(r1, Operand::Zero()); 2344 __ cmp(r1, Operand::Zero());
2356 __ ldr(r4, MemOperand(r4, kNormalOffset), eq); 2345 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
2357 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); 2346 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
2358 2347
2359 // r0 = address of new object (tagged) 2348 // r0 = address of new object (tagged)
(...skipping 15 matching lines...) Expand all
2375 2364
2376 // Use the length (smi tagged) and set that as an in-object property too. 2365 // Use the length (smi tagged) and set that as an in-object property too.
2377 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2366 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2378 const int kLengthOffset = JSObject::kHeaderSize + 2367 const int kLengthOffset = JSObject::kHeaderSize +
2379 Heap::kArgumentsLengthIndex * kPointerSize; 2368 Heap::kArgumentsLengthIndex * kPointerSize;
2380 __ str(r2, FieldMemOperand(r0, kLengthOffset)); 2369 __ str(r2, FieldMemOperand(r0, kLengthOffset));
2381 2370
2382 // Set up the elements pointer in the allocated arguments object. 2371 // Set up the elements pointer in the allocated arguments object.
2383 // If we allocated a parameter map, r4 will point there, otherwise 2372 // If we allocated a parameter map, r4 will point there, otherwise
2384 // it will point to the backing store. 2373 // it will point to the backing store.
2385 __ add(r4, r0, Operand(Heap::kArgumentsObjectSize)); 2374 __ add(r4, r0, Operand(Heap::kSloppyArgumentsObjectSize));
2386 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2375 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2387 2376
2388 // r0 = address of new object (tagged) 2377 // r0 = address of new object (tagged)
2389 // r1 = mapped parameter count (tagged) 2378 // r1 = mapped parameter count (tagged)
2390 // r2 = argument count (tagged) 2379 // r2 = argument count (tagged)
2391 // r4 = address of parameter map or backing store (tagged) 2380 // r4 = address of parameter map or backing store (tagged)
2392 // Initialize parameter map. If there are no mapped arguments, we're done. 2381 // Initialize parameter map. If there are no mapped arguments, we're done.
2393 Label skip_parameter_map; 2382 Label skip_parameter_map;
2394 __ cmp(r1, Operand(Smi::FromInt(0))); 2383 __ cmp(r1, Operand(Smi::FromInt(0)));
2395 // Move backing store address to r3, because it is 2384 // Move backing store address to r3, because it is
2396 // expected there when filling in the unmapped arguments. 2385 // expected there when filling in the unmapped arguments.
2397 __ mov(r3, r4, LeaveCC, eq); 2386 __ mov(r3, r4, LeaveCC, eq);
2398 __ b(eq, &skip_parameter_map); 2387 __ b(eq, &skip_parameter_map);
2399 2388
2400 __ LoadRoot(r6, Heap::kNonStrictArgumentsElementsMapRootIndex); 2389 __ LoadRoot(r6, Heap::kSloppyArgumentsElementsMapRootIndex);
2401 __ str(r6, FieldMemOperand(r4, FixedArray::kMapOffset)); 2390 __ str(r6, FieldMemOperand(r4, FixedArray::kMapOffset));
2402 __ add(r6, r1, Operand(Smi::FromInt(2))); 2391 __ add(r6, r1, Operand(Smi::FromInt(2)));
2403 __ str(r6, FieldMemOperand(r4, FixedArray::kLengthOffset)); 2392 __ str(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
2404 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize)); 2393 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
2405 __ add(r6, r4, Operand(r1, LSL, 1)); 2394 __ add(r6, r4, Operand(r1, LSL, 1));
2406 __ add(r6, r6, Operand(kParameterMapHeaderSize)); 2395 __ add(r6, r6, Operand(kParameterMapHeaderSize));
2407 __ str(r6, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize)); 2396 __ str(r6, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
2408 2397
2409 // Copy the parameter slots and the holes in the arguments. 2398 // Copy the parameter slots and the holes in the arguments.
2410 // We need to fill in mapped_parameter_count slots. They index the context, 2399 // We need to fill in mapped_parameter_count slots. They index the context,
2411 // where parameters are stored in reverse order, at 2400 // where parameters are stored in reverse order, at
2412 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 2401 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
2413 // The mapped parameter thus need to get indices 2402 // The mapped parameter thus need to get indices
2414 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 2403 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
2415 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 2404 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
2416 // We loop from right to left. 2405 // We loop from right to left.
2417 Label parameters_loop, parameters_test; 2406 Label parameters_loop, parameters_test;
2418 __ mov(r6, r1); 2407 __ mov(r6, r1);
2419 __ ldr(r9, MemOperand(sp, 0 * kPointerSize)); 2408 __ ldr(r9, MemOperand(sp, 0 * kPointerSize));
2420 __ add(r9, r9, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); 2409 __ add(r9, r9, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
2421 __ sub(r9, r9, Operand(r1)); 2410 __ sub(r9, r9, Operand(r1));
2422 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); 2411 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2423 __ add(r3, r4, Operand(r6, LSL, 1)); 2412 __ add(r3, r4, Operand(r6, LSL, 1));
2424 __ add(r3, r3, Operand(kParameterMapHeaderSize)); 2413 __ add(r3, r3, Operand(kParameterMapHeaderSize));
2425 2414
2426 // r6 = loop variable (tagged) 2415 // r6 = loop variable (tagged)
2427 // r1 = mapping index (tagged) 2416 // r1 = mapping index (tagged)
2428 // r3 = address of backing store (tagged) 2417 // r3 = address of backing store (tagged)
2429 // r4 = address of parameter map (tagged), which is also the address of new 2418 // r4 = address of parameter map (tagged), which is also the address of new
2430 // object + Heap::kArgumentsObjectSize (tagged) 2419 // object + Heap::kSloppyArgumentsObjectSize (tagged)
2431 // r0 = temporary scratch (a.o., for address calculation) 2420 // r0 = temporary scratch (a.o., for address calculation)
2432 // r5 = the hole value 2421 // r5 = the hole value
2433 __ jmp(&parameters_test); 2422 __ jmp(&parameters_test);
2434 2423
2435 __ bind(&parameters_loop); 2424 __ bind(&parameters_loop);
2436 __ sub(r6, r6, Operand(Smi::FromInt(1))); 2425 __ sub(r6, r6, Operand(Smi::FromInt(1)));
2437 __ mov(r0, Operand(r6, LSL, 1)); 2426 __ mov(r0, Operand(r6, LSL, 1));
2438 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); 2427 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
2439 __ str(r9, MemOperand(r4, r0)); 2428 __ str(r9, MemOperand(r4, r0));
2440 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); 2429 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
2441 __ str(r5, MemOperand(r3, r0)); 2430 __ str(r5, MemOperand(r3, r0));
2442 __ add(r9, r9, Operand(Smi::FromInt(1))); 2431 __ add(r9, r9, Operand(Smi::FromInt(1)));
2443 __ bind(&parameters_test); 2432 __ bind(&parameters_test);
2444 __ cmp(r6, Operand(Smi::FromInt(0))); 2433 __ cmp(r6, Operand(Smi::FromInt(0)));
2445 __ b(ne, &parameters_loop); 2434 __ b(ne, &parameters_loop);
2446 2435
2447 // Restore r0 = new object (tagged) 2436 // Restore r0 = new object (tagged)
2448 __ sub(r0, r4, Operand(Heap::kArgumentsObjectSize)); 2437 __ sub(r0, r4, Operand(Heap::kSloppyArgumentsObjectSize));
2449 2438
2450 __ bind(&skip_parameter_map); 2439 __ bind(&skip_parameter_map);
2451 // r0 = address of new object (tagged) 2440 // r0 = address of new object (tagged)
2452 // r2 = argument count (tagged) 2441 // r2 = argument count (tagged)
2453 // r3 = address of backing store (tagged) 2442 // r3 = address of backing store (tagged)
2454 // r5 = scratch 2443 // r5 = scratch
2455 // Copy arguments header and remaining slots (if there are any). 2444 // Copy arguments header and remaining slots (if there are any).
2456 __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex); 2445 __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex);
2457 __ str(r5, FieldMemOperand(r3, FixedArray::kMapOffset)); 2446 __ str(r5, FieldMemOperand(r3, FixedArray::kMapOffset));
2458 __ str(r2, FieldMemOperand(r3, FixedArray::kLengthOffset)); 2447 __ str(r2, FieldMemOperand(r3, FixedArray::kLengthOffset));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2511 __ str(r3, MemOperand(sp, 1 * kPointerSize)); 2500 __ str(r3, MemOperand(sp, 1 * kPointerSize));
2512 2501
2513 // Try the new space allocation. Start out with computing the size 2502 // Try the new space allocation. Start out with computing the size
2514 // of the arguments object and the elements array in words. 2503 // of the arguments object and the elements array in words.
2515 Label add_arguments_object; 2504 Label add_arguments_object;
2516 __ bind(&try_allocate); 2505 __ bind(&try_allocate);
2517 __ SmiUntag(r1, SetCC); 2506 __ SmiUntag(r1, SetCC);
2518 __ b(eq, &add_arguments_object); 2507 __ b(eq, &add_arguments_object);
2519 __ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize)); 2508 __ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
2520 __ bind(&add_arguments_object); 2509 __ bind(&add_arguments_object);
2521 __ add(r1, r1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize)); 2510 __ add(r1, r1, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize));
2522 2511
2523 // Do the allocation of both objects in one go. 2512 // Do the allocation of both objects in one go.
2524 __ Allocate(r1, r0, r2, r3, &runtime, 2513 __ Allocate(r1, r0, r2, r3, &runtime,
2525 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); 2514 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
2526 2515
2527 // Get the arguments boilerplate from the current native context. 2516 // Get the arguments boilerplate from the current native context.
2528 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2517 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2529 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); 2518 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2530 __ ldr(r4, MemOperand(r4, Context::SlotOffset( 2519 __ ldr(r4, MemOperand(r4, Context::SlotOffset(
2531 Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX))); 2520 Context::STRICT_ARGUMENTS_BOILERPLATE_INDEX)));
2532 2521
2533 // Copy the JS object part. 2522 // Copy the JS object part.
2534 __ CopyFields(r0, r4, d0, JSObject::kHeaderSize / kPointerSize); 2523 __ CopyFields(r0, r4, d0, JSObject::kHeaderSize / kPointerSize);
2535 2524
2536 // Get the length (smi tagged) and set that as an in-object property too. 2525 // Get the length (smi tagged) and set that as an in-object property too.
2537 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2526 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2538 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); 2527 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
2539 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + 2528 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize +
2540 Heap::kArgumentsLengthIndex * kPointerSize)); 2529 Heap::kArgumentsLengthIndex * kPointerSize));
2541 2530
2542 // If there are no actual arguments, we're done. 2531 // If there are no actual arguments, we're done.
2543 Label done; 2532 Label done;
2544 __ cmp(r1, Operand::Zero()); 2533 __ cmp(r1, Operand::Zero());
2545 __ b(eq, &done); 2534 __ b(eq, &done);
2546 2535
2547 // Get the parameters pointer from the stack. 2536 // Get the parameters pointer from the stack.
2548 __ ldr(r2, MemOperand(sp, 1 * kPointerSize)); 2537 __ ldr(r2, MemOperand(sp, 1 * kPointerSize));
2549 2538
2550 // Set up the elements pointer in the allocated arguments object and 2539 // Set up the elements pointer in the allocated arguments object and
2551 // initialize the header in the elements fixed array. 2540 // initialize the header in the elements fixed array.
2552 __ add(r4, r0, Operand(Heap::kArgumentsObjectSizeStrict)); 2541 __ add(r4, r0, Operand(Heap::kStrictArgumentsObjectSize));
2553 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2542 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2554 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex); 2543 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
2555 __ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset)); 2544 __ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset));
2556 __ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset)); 2545 __ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset));
2557 __ SmiUntag(r1); 2546 __ SmiUntag(r1);
2558 2547
2559 // Copy the fixed array slots. 2548 // Copy the fixed array slots.
2560 Label loop; 2549 Label loop;
2561 // Set up r4 to point to the first array slot. 2550 // Set up r4 to point to the first array slot.
2562 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 2551 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
(...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after
3008 // Cache the called function in a feedback vector slot. Cache states 2997 // Cache the called function in a feedback vector slot. Cache states
3009 // are uninitialized, monomorphic (indicated by a JSFunction), and 2998 // are uninitialized, monomorphic (indicated by a JSFunction), and
3010 // megamorphic. 2999 // megamorphic.
3011 // r0 : number of arguments to the construct function 3000 // r0 : number of arguments to the construct function
3012 // r1 : the function to call 3001 // r1 : the function to call
3013 // r2 : Feedback vector 3002 // r2 : Feedback vector
3014 // r3 : slot in feedback vector (Smi) 3003 // r3 : slot in feedback vector (Smi)
3015 Label initialize, done, miss, megamorphic, not_array_function; 3004 Label initialize, done, miss, megamorphic, not_array_function;
3016 3005
3017 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), 3006 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3018 masm->isolate()->heap()->undefined_value()); 3007 masm->isolate()->heap()->megamorphic_symbol());
3019 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), 3008 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
3020 masm->isolate()->heap()->the_hole_value()); 3009 masm->isolate()->heap()->uninitialized_symbol());
3021 3010
3022 // Load the cache state into r4. 3011 // Load the cache state into r4.
3023 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); 3012 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3024 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); 3013 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
3025 3014
3026 // A monomorphic cache hit or an already megamorphic state: invoke the 3015 // A monomorphic cache hit or an already megamorphic state: invoke the
3027 // function without changing the state. 3016 // function without changing the state.
3028 __ cmp(r4, r1); 3017 __ cmp(r4, r1);
3029 __ b(eq, &done); 3018 __ b(eq, &done);
3030 3019
3031 // If we came here, we need to see if we are the array function. 3020 // If we came here, we need to see if we are the array function.
3032 // If we didn't have a matching function, and we didn't find the megamorph 3021 // If we didn't have a matching function, and we didn't find the megamorph
3033 // sentinel, then we have in the slot either some other function or an 3022 // sentinel, then we have in the slot either some other function or an
3034 // AllocationSite. Do a map check on the object in ecx. 3023 // AllocationSite. Do a map check on the object in ecx.
3035 __ ldr(r5, FieldMemOperand(r4, 0)); 3024 __ ldr(r5, FieldMemOperand(r4, 0));
3036 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); 3025 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
3037 __ b(ne, &miss); 3026 __ b(ne, &miss);
3038 3027
3039 // Make sure the function is the Array() function 3028 // Make sure the function is the Array() function
3040 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); 3029 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
3041 __ cmp(r1, r4); 3030 __ cmp(r1, r4);
3042 __ b(ne, &megamorphic); 3031 __ b(ne, &megamorphic);
3043 __ jmp(&done); 3032 __ jmp(&done);
3044 3033
3045 __ bind(&miss); 3034 __ bind(&miss);
3046 3035
3047 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3036 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3048 // megamorphic. 3037 // megamorphic.
3049 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); 3038 __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex);
3050 __ b(eq, &initialize); 3039 __ b(eq, &initialize);
3051 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3040 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3052 // write-barrier is needed. 3041 // write-barrier is needed.
3053 __ bind(&megamorphic); 3042 __ bind(&megamorphic);
3054 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); 3043 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
3055 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 3044 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
3056 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); 3045 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
3057 __ jmp(&done); 3046 __ jmp(&done);
3058 3047
3059 // An uninitialized cache is patched with the function or sentinel to 3048 // An uninitialized cache is patched with the function or sentinel to
3060 // indicate the ElementsKind if function is the Array constructor. 3049 // indicate the ElementsKind if function is the Array constructor.
3061 __ bind(&initialize); 3050 __ bind(&initialize);
3062 // Make sure the function is the Array() function 3051 // Make sure the function is the Array() function
3063 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); 3052 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
3064 __ cmp(r1, r4); 3053 __ cmp(r1, r4);
3065 __ b(ne, &not_array_function); 3054 __ b(ne, &not_array_function);
3066 3055
3067 // The target function is the Array constructor, 3056 // The target function is the Array constructor,
3068 // Create an AllocationSite if we don't already have it, store it in the slot. 3057 // Create an AllocationSite if we don't already have it, store it in the slot.
3069 { 3058 {
3070 FrameScope scope(masm, StackFrame::INTERNAL); 3059 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3071 3060
3072 // Arguments register must be smi-tagged to call out. 3061 // Arguments register must be smi-tagged to call out.
3073 __ SmiTag(r0); 3062 __ SmiTag(r0);
3074 __ Push(r3, r2, r1, r0); 3063 __ Push(r3, r2, r1, r0);
3075 3064
3076 CreateAllocationSiteStub create_stub; 3065 CreateAllocationSiteStub create_stub;
3077 __ CallStub(&create_stub); 3066 __ CallStub(&create_stub);
3078 3067
3079 __ Pop(r3, r2, r1, r0); 3068 __ Pop(r3, r2, r1, r0);
3080 __ SmiUntag(r0); 3069 __ SmiUntag(r0);
(...skipping 11 matching lines...) Expand all
3092 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3081 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3093 __ Pop(r4, r2, r1); 3082 __ Pop(r4, r2, r1);
3094 3083
3095 __ bind(&done); 3084 __ bind(&done);
3096 } 3085 }
3097 3086
3098 3087
3099 void CallFunctionStub::Generate(MacroAssembler* masm) { 3088 void CallFunctionStub::Generate(MacroAssembler* masm) {
3100 // r1 : the function to call 3089 // r1 : the function to call
3101 // r2 : feedback vector 3090 // r2 : feedback vector
3102 // r3 : (only if r2 is not undefined) slot in feedback vector (Smi) 3091 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback
3092 // vector (Smi)
3103 Label slow, non_function, wrap, cont; 3093 Label slow, non_function, wrap, cont;
3104 3094
3105 if (NeedsChecks()) { 3095 if (NeedsChecks()) {
3106 // Check that the function is really a JavaScript function. 3096 // Check that the function is really a JavaScript function.
3107 // r1: pushed function (to be verified) 3097 // r1: pushed function (to be verified)
3108 __ JumpIfSmi(r1, &non_function); 3098 __ JumpIfSmi(r1, &non_function);
3109 3099
3110 // Goto slow case if we do not have a function. 3100 // Goto slow case if we do not have a function.
3111 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); 3101 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
3112 __ b(ne, &slow); 3102 __ b(ne, &slow);
(...skipping 14 matching lines...) Expand all
3127 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); 3117 __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset));
3128 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 3118 __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
3129 kSmiTagSize))); 3119 kSmiTagSize)));
3130 __ b(ne, &cont); 3120 __ b(ne, &cont);
3131 3121
3132 // Do not transform the receiver for native (Compilerhints already in r3). 3122 // Do not transform the receiver for native (Compilerhints already in r3).
3133 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 3123 __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
3134 __ b(ne, &cont); 3124 __ b(ne, &cont);
3135 } 3125 }
3136 3126
3137 // Compute the receiver in non-strict mode. 3127 // Compute the receiver in sloppy mode.
3138 __ ldr(r3, MemOperand(sp, argc_ * kPointerSize)); 3128 __ ldr(r3, MemOperand(sp, argc_ * kPointerSize));
3139 3129
3140 if (NeedsChecks()) { 3130 if (NeedsChecks()) {
3141 __ JumpIfSmi(r3, &wrap); 3131 __ JumpIfSmi(r3, &wrap);
3142 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); 3132 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3143 __ b(lt, &wrap); 3133 __ b(lt, &wrap);
3144 } else { 3134 } else {
3145 __ jmp(&wrap); 3135 __ jmp(&wrap);
3146 } 3136 }
3147 3137
3148 __ bind(&cont); 3138 __ bind(&cont);
3149 } 3139 }
3150 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); 3140 __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
3151 3141
3152 if (NeedsChecks()) { 3142 if (NeedsChecks()) {
3153 // Slow-case: Non-function called. 3143 // Slow-case: Non-function called.
3154 __ bind(&slow); 3144 __ bind(&slow);
3155 if (RecordCallTarget()) { 3145 if (RecordCallTarget()) {
3156 // If there is a call target cache, mark it megamorphic in the 3146 // If there is a call target cache, mark it megamorphic in the
3157 // non-function case. MegamorphicSentinel is an immortal immovable 3147 // non-function case. MegamorphicSentinel is an immortal immovable
3158 // object (undefined) so no write barrier is needed. 3148 // object (megamorphic symbol) so no write barrier is needed.
3159 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), 3149 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3160 masm->isolate()->heap()->undefined_value()); 3150 masm->isolate()->heap()->megamorphic_symbol());
3161 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); 3151 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
3162 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 3152 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
3163 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); 3153 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
3164 } 3154 }
3165 // Check for function proxy. 3155 // Check for function proxy.
3166 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); 3156 __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE));
3167 __ b(ne, &non_function); 3157 __ b(ne, &non_function);
3168 __ push(r1); // put proxy as additional argument 3158 __ push(r1); // put proxy as additional argument
3169 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); 3159 __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32));
3170 __ mov(r2, Operand::Zero()); 3160 __ mov(r2, Operand::Zero());
3171 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); 3161 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
3172 { 3162 {
3173 Handle<Code> adaptor = 3163 Handle<Code> adaptor =
3174 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3164 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3175 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3165 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3176 } 3166 }
3177 3167
3178 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3168 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3179 // of the original receiver from the call site). 3169 // of the original receiver from the call site).
3180 __ bind(&non_function); 3170 __ bind(&non_function);
3181 __ str(r1, MemOperand(sp, argc_ * kPointerSize)); 3171 __ str(r1, MemOperand(sp, argc_ * kPointerSize));
3182 __ mov(r0, Operand(argc_)); // Set up the number of arguments. 3172 __ mov(r0, Operand(argc_)); // Set up the number of arguments.
3183 __ mov(r2, Operand::Zero()); 3173 __ mov(r2, Operand::Zero());
3184 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); 3174 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
3185 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3175 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3186 RelocInfo::CODE_TARGET); 3176 RelocInfo::CODE_TARGET);
3187 } 3177 }
3188 3178
3189 if (CallAsMethod()) { 3179 if (CallAsMethod()) {
3190 __ bind(&wrap); 3180 __ bind(&wrap);
3191 // Wrap the receiver and patch it back onto the stack. 3181 // Wrap the receiver and patch it back onto the stack.
3192 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 3182 { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
3193 __ Push(r1, r3); 3183 __ Push(r1, r3);
3194 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 3184 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3195 __ pop(r1); 3185 __ pop(r1);
3196 } 3186 }
3197 __ str(r0, MemOperand(sp, argc_ * kPointerSize)); 3187 __ str(r0, MemOperand(sp, argc_ * kPointerSize));
3198 __ jmp(&cont); 3188 __ jmp(&cont);
3199 } 3189 }
3200 } 3190 }
3201 3191
3202 3192
3203 void CallConstructStub::Generate(MacroAssembler* masm) { 3193 void CallConstructStub::Generate(MacroAssembler* masm) {
3204 // r0 : number of arguments 3194 // r0 : number of arguments
3205 // r1 : the function to call 3195 // r1 : the function to call
3206 // r2 : feedback vector 3196 // r2 : feedback vector
3207 // r3 : (only if r2 is not undefined) slot in feedback vector (Smi) 3197 // r3 : (only if r2 is not the megamorphic symbol) slot in feedback
3198 // vector (Smi)
3208 Label slow, non_function_call; 3199 Label slow, non_function_call;
3209 3200
3210 // Check that the function is not a smi. 3201 // Check that the function is not a smi.
3211 __ JumpIfSmi(r1, &non_function_call); 3202 __ JumpIfSmi(r1, &non_function_call);
3212 // Check that the function is a JSFunction. 3203 // Check that the function is a JSFunction.
3213 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); 3204 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
3214 __ b(ne, &slow); 3205 __ b(ne, &slow);
3215 3206
3216 if (RecordCallTarget()) { 3207 if (RecordCallTarget()) {
3217 GenerateRecordCallTarget(masm); 3208 GenerateRecordCallTarget(masm);
(...skipping 1248 matching lines...) Expand 10 before | Expand all | Expand 10 after
4466 } 4457 }
4467 4458
4468 4459
4469 4460
4470 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 4461 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4471 { 4462 {
4472 // Call the runtime system in a fresh internal frame. 4463 // Call the runtime system in a fresh internal frame.
4473 ExternalReference miss = 4464 ExternalReference miss =
4474 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 4465 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4475 4466
4476 FrameScope scope(masm, StackFrame::INTERNAL); 4467 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4477 __ Push(r1, r0); 4468 __ Push(r1, r0);
4478 __ Push(lr, r1, r0); 4469 __ Push(lr, r1, r0);
4479 __ mov(ip, Operand(Smi::FromInt(op_))); 4470 __ mov(ip, Operand(Smi::FromInt(op_)));
4480 __ push(ip); 4471 __ push(ip);
4481 __ CallExternalReference(miss, 3); 4472 __ CallExternalReference(miss, 3);
4482 // Compute the entry point of the rewritten stub. 4473 // Compute the entry point of the rewritten stub.
4483 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 4474 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
4484 // Restore registers. 4475 // Restore registers.
4485 __ pop(lr); 4476 __ pop(lr);
4486 __ Pop(r1, r0); 4477 __ Pop(r1, r0);
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after
4831 __ CheckPageFlag(regs_.object(), 4822 __ CheckPageFlag(regs_.object(),
4832 regs_.scratch0(), 4823 regs_.scratch0(),
4833 1 << MemoryChunk::SCAN_ON_SCAVENGE, 4824 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4834 ne, 4825 ne,
4835 &dont_need_remembered_set); 4826 &dont_need_remembered_set);
4836 4827
4837 // First notify the incremental marker if necessary, then update the 4828 // First notify the incremental marker if necessary, then update the
4838 // remembered set. 4829 // remembered set.
4839 CheckNeedsToInformIncrementalMarker( 4830 CheckNeedsToInformIncrementalMarker(
4840 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 4831 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4841 InformIncrementalMarker(masm, mode); 4832 InformIncrementalMarker(masm);
4842 regs_.Restore(masm); 4833 regs_.Restore(masm);
4843 __ RememberedSetHelper(object_, 4834 __ RememberedSetHelper(object_,
4844 address_, 4835 address_,
4845 value_, 4836 value_,
4846 save_fp_regs_mode_, 4837 save_fp_regs_mode_,
4847 MacroAssembler::kReturnAtEnd); 4838 MacroAssembler::kReturnAtEnd);
4848 4839
4849 __ bind(&dont_need_remembered_set); 4840 __ bind(&dont_need_remembered_set);
4850 } 4841 }
4851 4842
4852 CheckNeedsToInformIncrementalMarker( 4843 CheckNeedsToInformIncrementalMarker(
4853 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 4844 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4854 InformIncrementalMarker(masm, mode); 4845 InformIncrementalMarker(masm);
4855 regs_.Restore(masm); 4846 regs_.Restore(masm);
4856 __ Ret(); 4847 __ Ret();
4857 } 4848 }
4858 4849
4859 4850
4860 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { 4851 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4861 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4852 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4862 int argument_count = 3; 4853 int argument_count = 3;
4863 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 4854 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4864 Register address = 4855 Register address =
4865 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); 4856 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
4866 ASSERT(!address.is(regs_.object())); 4857 ASSERT(!address.is(regs_.object()));
4867 ASSERT(!address.is(r0)); 4858 ASSERT(!address.is(r0));
4868 __ Move(address, regs_.address()); 4859 __ Move(address, regs_.address());
4869 __ Move(r0, regs_.object()); 4860 __ Move(r0, regs_.object());
4870 __ Move(r1, address); 4861 __ Move(r1, address);
4871 __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate()))); 4862 __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
4872 4863
4873 AllowExternalCallThatCantCauseGC scope(masm); 4864 AllowExternalCallThatCantCauseGC scope(masm);
4874 if (mode == INCREMENTAL_COMPACTION) { 4865 __ CallCFunction(
4875 __ CallCFunction( 4866 ExternalReference::incremental_marking_record_write_function(
4876 ExternalReference::incremental_evacuation_record_write_function( 4867 masm->isolate()),
4877 masm->isolate()), 4868 argument_count);
4878 argument_count);
4879 } else {
4880 ASSERT(mode == INCREMENTAL);
4881 __ CallCFunction(
4882 ExternalReference::incremental_marking_record_write_function(
4883 masm->isolate()),
4884 argument_count);
4885 }
4886 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4869 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4887 } 4870 }
4888 4871
4889 4872
4890 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4873 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4891 MacroAssembler* masm, 4874 MacroAssembler* masm,
4892 OnNoNeedToInformIncrementalMarker on_no_need, 4875 OnNoNeedToInformIncrementalMarker on_no_need,
4893 Mode mode) { 4876 Mode mode) {
4894 Label on_black; 4877 Label on_black;
4895 Label need_incremental; 4878 Label need_incremental;
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
5290 } else { 5273 } else {
5291 UNREACHABLE(); 5274 UNREACHABLE();
5292 } 5275 }
5293 } 5276 }
5294 5277
5295 5278
5296 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5279 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5297 // ----------- S t a t e ------------- 5280 // ----------- S t a t e -------------
5298 // -- r0 : argc (only if argument_count_ == ANY) 5281 // -- r0 : argc (only if argument_count_ == ANY)
5299 // -- r1 : constructor 5282 // -- r1 : constructor
5300 // -- r2 : feedback vector (fixed array or undefined) 5283 // -- r2 : feedback vector (fixed array or megamorphic symbol)
5301 // -- r3 : slot index (if r2 is fixed array) 5284 // -- r3 : slot index (if r2 is fixed array)
5302 // -- sp[0] : return address 5285 // -- sp[0] : return address
5303 // -- sp[4] : last argument 5286 // -- sp[4] : last argument
5304 // ----------------------------------- 5287 // -----------------------------------
5288
5289 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
5290 masm->isolate()->heap()->megamorphic_symbol());
5291
5305 if (FLAG_debug_code) { 5292 if (FLAG_debug_code) {
5306 // The array construct code is only set for the global and natives 5293 // The array construct code is only set for the global and natives
5307 // builtin Array functions which always have maps. 5294 // builtin Array functions which always have maps.
5308 5295
5309 // Initial map for the builtin Array function should be a map. 5296 // Initial map for the builtin Array function should be a map.
5310 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 5297 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
5311 // Will both indicate a NULL and a Smi. 5298 // Will both indicate a NULL and a Smi.
5312 __ tst(r4, Operand(kSmiTagMask)); 5299 __ tst(r4, Operand(kSmiTagMask));
5313 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); 5300 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
5314 __ CompareObjectType(r4, r4, r5, MAP_TYPE); 5301 __ CompareObjectType(r4, r4, r5, MAP_TYPE);
5315 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); 5302 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
5316 5303
5317 // We should either have undefined in ebx or a valid fixed array. 5304 // We should either have the megamorphic symbol in ebx or a valid
5305 // fixed array.
5318 Label okay_here; 5306 Label okay_here;
5319 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); 5307 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5320 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 5308 __ CompareRoot(r2, Heap::kMegamorphicSymbolRootIndex);
5321 __ b(eq, &okay_here); 5309 __ b(eq, &okay_here);
5322 __ ldr(r4, FieldMemOperand(r2, 0)); 5310 __ ldr(r4, FieldMemOperand(r2, 0));
5323 __ cmp(r4, Operand(fixed_array_map)); 5311 __ cmp(r4, Operand(fixed_array_map));
5324 __ Assert(eq, kExpectedFixedArrayInRegisterR2); 5312 __ Assert(eq, kExpectedFixedArrayInRegisterR2);
5325 5313
5326 // r3 should be a smi if we don't have undefined in r2 5314 // r3 should be a smi if we don't have undefined in r2
5327 __ AssertSmi(r3); 5315 __ AssertSmi(r3);
5328 5316
5329 __ bind(&okay_here); 5317 __ bind(&okay_here);
5330 } 5318 }
5331 5319
5332 Label no_info; 5320 Label no_info;
5333 // Get the elements kind and case on that. 5321 // Get the elements kind and case on that.
5334 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 5322 __ CompareRoot(r2, Heap::kMegamorphicSymbolRootIndex);
5335 __ b(eq, &no_info); 5323 __ b(eq, &no_info);
5336 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); 5324 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
5337 __ ldr(r2, FieldMemOperand(r2, FixedArray::kHeaderSize)); 5325 __ ldr(r2, FieldMemOperand(r2, FixedArray::kHeaderSize));
5338 5326
5339 // If the feedback vector is undefined, or contains anything other than an 5327 // If the feedback vector is undefined, or contains anything other than an
5340 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5328 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5341 __ ldr(r4, FieldMemOperand(r2, 0)); 5329 __ ldr(r4, FieldMemOperand(r2, 0));
5342 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); 5330 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex);
5343 __ b(ne, &no_info); 5331 __ b(ne, &no_info);
5344 5332
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
5491 // holder 5479 // holder
5492 __ push(holder); 5480 __ push(holder);
5493 5481
5494 // Prepare arguments. 5482 // Prepare arguments.
5495 __ mov(scratch, sp); 5483 __ mov(scratch, sp);
5496 5484
5497 // Allocate the v8::Arguments structure in the arguments' space since 5485 // Allocate the v8::Arguments structure in the arguments' space since
5498 // it's not controlled by GC. 5486 // it's not controlled by GC.
5499 const int kApiStackSpace = 4; 5487 const int kApiStackSpace = 4;
5500 5488
5501 FrameScope frame_scope(masm, StackFrame::MANUAL); 5489 FrameAndConstantPoolScope frame_scope(masm, StackFrame::MANUAL);
5502 __ EnterExitFrame(false, kApiStackSpace); 5490 __ EnterExitFrame(false, kApiStackSpace);
5503 5491
5504 ASSERT(!api_function_address.is(r0) && !scratch.is(r0)); 5492 ASSERT(!api_function_address.is(r0) && !scratch.is(r0));
5505 // r0 = FunctionCallbackInfo& 5493 // r0 = FunctionCallbackInfo&
5506 // Arguments is after the return address. 5494 // Arguments is after the return address.
5507 __ add(r0, sp, Operand(1 * kPointerSize)); 5495 __ add(r0, sp, Operand(1 * kPointerSize));
5508 // FunctionCallbackInfo::implicit_args_ 5496 // FunctionCallbackInfo::implicit_args_
5509 __ str(scratch, MemOperand(r0, 0 * kPointerSize)); 5497 __ str(scratch, MemOperand(r0, 0 * kPointerSize));
5510 // FunctionCallbackInfo::values_ 5498 // FunctionCallbackInfo::values_
5511 __ add(ip, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); 5499 __ add(ip, scratch, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
5551 // -- ... 5539 // -- ...
5552 // -- r2 : api_function_address 5540 // -- r2 : api_function_address
5553 // ----------------------------------- 5541 // -----------------------------------
5554 5542
5555 Register api_function_address = r2; 5543 Register api_function_address = r2;
5556 5544
5557 __ mov(r0, sp); // r0 = Handle<Name> 5545 __ mov(r0, sp); // r0 = Handle<Name>
5558 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = PCA 5546 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = PCA
5559 5547
5560 const int kApiStackSpace = 1; 5548 const int kApiStackSpace = 1;
5561 FrameScope frame_scope(masm, StackFrame::MANUAL); 5549 FrameAndConstantPoolScope frame_scope(masm, StackFrame::MANUAL);
5562 __ EnterExitFrame(false, kApiStackSpace); 5550 __ EnterExitFrame(false, kApiStackSpace);
5563 5551
5564 // Create PropertyAccessorInfo instance on the stack above the exit frame with 5552 // Create PropertyAccessorInfo instance on the stack above the exit frame with
5565 // r1 (internal::Object** args_) as the data. 5553 // r1 (internal::Object** args_) as the data.
5566 __ str(r1, MemOperand(sp, 1 * kPointerSize)); 5554 __ str(r1, MemOperand(sp, 1 * kPointerSize));
5567 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo& 5555 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
5568 5556
5569 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; 5557 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5570 5558
5571 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); 5559 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
5572 ExternalReference::Type thunk_type = 5560 ExternalReference::Type thunk_type =
5573 ExternalReference::PROFILING_GETTER_CALL; 5561 ExternalReference::PROFILING_GETTER_CALL;
5574 ApiFunction thunk_fun(thunk_address); 5562 ApiFunction thunk_fun(thunk_address);
5575 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, 5563 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5576 masm->isolate()); 5564 masm->isolate());
5577 __ CallApiFunctionAndReturn(api_function_address, 5565 __ CallApiFunctionAndReturn(api_function_address,
5578 thunk_ref, 5566 thunk_ref,
5579 kStackUnwindSpace, 5567 kStackUnwindSpace,
5580 MemOperand(fp, 6 * kPointerSize), 5568 MemOperand(fp, 6 * kPointerSize),
5581 NULL); 5569 NULL);
5582 } 5570 }
5583 5571
5584 5572
5585 #undef __ 5573 #undef __
5586 5574
5587 } } // namespace v8::internal 5575 } } // namespace v8::internal
5588 5576
5589 #endif // V8_TARGET_ARCH_ARM 5577 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/debug-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698