Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(590)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 7024047: [Arguments] Port fast arguments creation stubs to X64 and ARM. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/arguments
Patch Set: Better ARM code. Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/arm/code-stubs-arm.cc ('K') | « src/objects-printer.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1941 matching lines...) Expand 10 before | Expand all | Expand 10 after
1952 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1952 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1953 // by calling the runtime system. 1953 // by calling the runtime system.
1954 __ bind(&slow); 1954 __ bind(&slow);
1955 __ pop(rbx); // Return address. 1955 __ pop(rbx); // Return address.
1956 __ push(rdx); 1956 __ push(rdx);
1957 __ push(rbx); 1957 __ push(rbx);
1958 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 1958 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1959 } 1959 }
1960 1960
1961 1961
1962 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
1963 // rsp[0] : return address
1964 // rsp[8] : number of parameters (tagged)
1965 // rsp[16] : receiver displacement
1966 // rsp[24] : function
1967
1968 Factory* factory = masm->isolate()->factory();
1969
1970 // rbx = parameter count (untagged)
Lasse Reichstein 2011/06/07 12:20:19 I would move this comment to after the code that e
Karl Klose 2011/06/10 11:32:22 Done.
1971 __ movq(rbx, Operand(rsp, 1 * kPointerSize));
1972 __ SmiToInteger64(rbx, rbx);
Lasse Reichstein 2011/06/07 12:20:19 There is a SmiToInteger64(Register,Operand) macro
Karl Klose 2011/06/10 11:32:22 Done.
1973
1974 // Check if the calling frame is an arguments adaptor frame.
1975 Label runtime;
1976 Label adaptor_frame, try_allocate;
1977 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1978 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
1979 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1980 __ j(equal, &adaptor_frame);
1981
1982 // No adaptor, parameter count = argument count.
1983 __ movq(rcx, rbx);
1984 __ jmp(&try_allocate, Label::kNear);
1985
1986 // We have an adaptor frame. Patch the parameters pointer.
1987 __ bind(&adaptor_frame);
1988 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1989 __ SmiToInteger64(rcx, rcx);
Lasse Reichstein 2011/06/07 12:20:19 Ditto here.
1990 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
1991 StandardFrameConstants::kCallerSPOffset));
1992 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
1993
1994 // rbx = parameter count (untagged)
1995 // rcx = argument count (untagged)
1996 // rsp[8] = parameter count (tagged)
1997 // rsp[16] = address of receiver argument
1998 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
1999 __ cmpq(rbx, rcx);
2000 __ j(less_equal, &try_allocate, Label::kNear);
2001 __ movq(rbx, rcx);
2002
2003 __ bind(&try_allocate);
2004
2005 // Save mapped parameter count.
2006 __ push(rbx);
2007
2008 // Compute the sizes of backing store, parameter map, and arguments object.
2009 // 1. Parameter map, has 2 extra words containing context and backing store.
2010 const int kParameterMapHeaderSize =
2011 FixedArray::kHeaderSize + 2 * kPointerSize;
2012 Label no_parameter_map;
2013 __ testq(rbx, rbx);
Lasse Reichstein 2011/06/07 12:20:19 You could get away with using just testl, since rb
Karl Klose 2011/06/10 11:32:22 I will keep the current code.
2014 __ j(zero, &no_parameter_map, Label::kNear);
2015 __ lea(rbx, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
2016 __ bind(&no_parameter_map);
2017
2018 // 2. Backing store.
2019 __ lea(rbx, Operand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize));
2020
2021 // 3. Arguments object.
2022 __ addq(rbx, Immediate(Heap::kArgumentsObjectSize));
2023
2024 // Do the allocation of all three objects in one go.
2025 __ AllocateInNewSpace(rbx, rax, rdx, rdi, &runtime, TAG_OBJECT);
2026
2027 // rax = address of new object(s) (tagged)
2028 // rcx = argument count (untagged)
2029 // rsp[0] = mapped parameter count (tagged)
2030 // rsp[16] = parameter count (tagged)
2031 // rsp[24] = address of receiver argument
2032 // Get the arguments boilerplate from the current (global) context into rdi.
2033 Label has_mapped_parameters, copy;
2034 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2035 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2036 __ movq(rbx, Operand(rsp, 0 * kPointerSize));
2037 __ SmiToInteger64(rbx, rbx);
Lasse Reichstein 2011/06/07 12:20:19 And another 64-bit load.
Karl Klose 2011/06/10 11:32:22 Done.
2038 __ testq(rbx, rbx);
2039 __ j(not_zero, &has_mapped_parameters, Label::kNear);
2040
2041 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
2042 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
2043 __ jmp(&copy, Label::kNear);
2044
2045 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
2046 __ bind(&has_mapped_parameters);
2047 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
2048 __ bind(&copy);
2049
2050 // rax = address of new object (tagged)
2051 // rbx = mapped parameter count (untagged)
2052 // rcx = argument count (untagged)
2053 // rdi = address of boilerplate object (tagged)
2054 // rsp[0] = mapped parameter count (tagged)
2055 // rsp[16] = parameter count (tagged)
2056 // rsp[24] = address of receiver argument
2057 // Copy the JS object part.
2058 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2059 __ movq(rdx, FieldOperand(rdi, i));
2060 __ movq(FieldOperand(rax, i), rdx);
2061 }
2062
2063 // Setup the callee in-object property.
2064 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2065 __ movq(rdx, Operand(rsp, 4 * kPointerSize));
2066 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2067 Heap::kArgumentsCalleeIndex * kPointerSize),
2068 rdx);
2069
2070 // Use the length (smi tagged) and set that as an in-object property too.
2071 // Note: rcx is tagged from here on.
2072 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2073 __ Integer64PlusConstantToSmi(rcx, rcx, 0);
Lasse Reichstein 2011/06/07 12:20:19 Just use Integer32ToSmi when the costant is zero.
Karl Klose 2011/06/10 11:32:22 Done.
2074 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2075 Heap::kArgumentsLengthIndex * kPointerSize),
2076 rcx);
2077
2078 // Setup the elements pointer in the allocated arguments object.
2079 // If we allocated a parameter map, edi will point there, otherwise to the
2080 // backing store.
2081 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
2082 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2083
2084 // rax = address of new object (tagged)
2085 // rbx = mapped parameter count (untagged)
2086 // rcx = argument count (tagged)
2087 // rdi = address of parameter map or backing store (tagged)
2088 // rsp[0] = mapped parameter count (tagged)
2089 // rsp[16] = parameter count (tagged)
2090 // rsp[24] = address of receiver argument
2091 // Free a register.
2092 __ push(rax);
2093
2094 // Initialize parameter map. If there are no mapped arguments, we're done.
2095 Label skip_parameter_map;
2096 __ testq(rbx, rbx);
2097 __ j(zero, &skip_parameter_map);
2098
2099 __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
2100 // rbx contains the untagged argument count. Add 2 and tag to write.
2101 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2102 __ Integer64PlusConstantToSmi(rax, rbx, 2);
2103 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rax);
2104 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
2105 __ lea(rax, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
2106 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), rax);
2107
2108 // Copy the parameter slots and the holes in the arguments.
2109 // We need to fill in mapped_parameter_count slots. They index the context,
2110 // where parameters are stored in reverse order, at
2111 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
2112 // The mapped parameter thus need to get indices
2113 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
2114 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
2115 // We loop from right to left.
2116 Label parameters_loop, parameters_test;
2117 __ push(rcx);
2118 // Load tagged parameter count into rax.
2119 __ movq(rax, Operand(rsp, 2 * kPointerSize));
2120 __ Move(rbx, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
2121 __ addq(rbx, Operand(rsp, 4 * kPointerSize));
2122 __ subq(rbx, rax);
2123 __ Move(rcx, factory->the_hole_value());
2124 __ movq(rdx, rdi);
2125 __ SmiToInteger64(kScratchRegister, rax);
2126 __ lea(rdi, Operand(rdi, kScratchRegister,
2127 times_pointer_size,
2128 kParameterMapHeaderSize));
2129 // rax = loop variable (tagged)
2130 // rbx = mapping index (tagged)
2131 // rcx = the hole value
2132 // rdx = address of parameter map (tagged)
2133 // rdi = address of backing store (tagged)
2134 // rsp[0] = argument count (tagged)
2135 // rsp[8] = address of new object (tagged)
2136 // rsp[16] = mapped parameter count (tagged)
2137 // rsp[32] = parameter count (tagged)
2138 // rsp[40] = address of receiver argument
2139 __ jmp(&parameters_test, Label::kNear);
2140
2141 __ bind(&parameters_loop);
2142 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
2143 __ SmiToInteger64(kScratchRegister, rax);
2144 __ movq(FieldOperand(rdx, kScratchRegister,
2145 times_pointer_size,
2146 kParameterMapHeaderSize),
2147 rbx);
2148 __ movq(FieldOperand(rdi, kScratchRegister,
2149 times_pointer_size,
2150 FixedArray::kHeaderSize),
2151 rcx);
2152 __ SmiAddConstant(rbx, rbx, Smi::FromInt(1));
2153 __ bind(&parameters_test);
2154 __ testq(rax, rax);
Lasse Reichstein 2011/06/07 12:20:19 If rax is a smi, use SmiTest(rax); for readability
Karl Klose 2011/06/10 11:32:22 Done.
2155 __ j(not_zero, &parameters_loop, Label::kNear);
2156 __ pop(rcx);
2157
2158 __ bind(&skip_parameter_map);
2159
2160 // rcx = argument count (tagged)
2161 // rdi = address of backing store (tagged)
2162 // rsp[0] = address of new object (tagged)
2163 // rsp[8] = mapped parameter count (tagged)
2164 // rsp[24] = parameter count (tagged)
2165 // rsp[32] = address of receiver argument
2166 // Copy arguments header and remaining slots (if there are any).
2167 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
2168 factory->fixed_array_map());
2169 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2170
2171 Label arguments_loop, arguments_test;
2172 __ movq(rbx, Operand(rsp, 1 * kPointerSize));
2173 __ movq(rdx, Operand(rsp, 4 * kPointerSize));
2174 // Untag rcx and rbx for the loop below.
2175 __ SmiToInteger64(rcx, rcx);
2176 __ SmiToInteger64(rbx, rbx);
2177 __ lea(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2178 __ subq(rdx, kScratchRegister);
2179 __ jmp(&arguments_test, Label::kNear);
2180
2181 __ bind(&arguments_loop);
2182 __ subq(rdx, Immediate(kPointerSize));
2183 __ movq(rax, Operand(rdx, 0));
2184 __ movq(FieldOperand(rdi, rbx,
2185 times_pointer_size,
2186 FixedArray::kHeaderSize),
2187 rax);
2188 __ addq(rbx, Immediate(1));
2189
2190 __ bind(&arguments_test);
2191 __ cmpq(rbx, rcx);
2192 __ j(less, &arguments_loop, Label::kNear);
2193
2194 // Restore.
2195 __ pop(rax); // Address of arguments object.
2196 __ pop(rbx); // Parameter count.
2197
2198 // Return and remove the on-stack parameters.
2199 __ ret(3 * kPointerSize);
2200
2201 // Do the runtime call to allocate the arguments object.
2202 // rcx = argument count (untagged)
2203 __ bind(&runtime);
2204 __ pop(rax); // Remove saved parameter count.
2205 __ Integer64PlusConstantToSmi(rcx, rcx, 0);
Lasse Reichstein 2011/06/07 12:20:19 Integer32ToSmi.
Karl Klose 2011/06/10 11:32:22 Done.
2206 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
2207 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2208 }
2209
2210
2211 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2212 // esp[0] : return address
2213 // esp[8] : number of parameters
2214 // esp[16] : receiver displacement
2215 // esp[24] : function
2216
2217 // Check if the calling frame is an arguments adaptor frame.
2218 Label runtime;
2219 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2220 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2221 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2222 __ j(not_equal, &runtime);
2223
2224 // Patch the arguments.length and the parameters pointer.
2225 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2226 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
2227 __ SmiToInteger64(rcx, rcx);
2228 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2229 StandardFrameConstants::kCallerSPOffset));
2230 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2231
2232 __ bind(&runtime);
2233 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2234 }
2235
2236
1962 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 2237 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1963 UNIMPLEMENTED();
1964 }
1965
1966
1967 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
1968 UNIMPLEMENTED();
1969 }
1970
1971
1972 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
1973 // rsp[0] : return address 2238 // rsp[0] : return address
1974 // rsp[8] : number of parameters 2239 // rsp[8] : number of parameters
1975 // rsp[16] : receiver displacement 2240 // rsp[16] : receiver displacement
1976 // rsp[24] : function 2241 // rsp[24] : function
1977 2242
1978 // The displacement is used for skipping the return address and the
1979 // frame pointer on the stack. It is the offset of the last
1980 // parameter (if any) relative to the frame pointer.
1981 static const int kDisplacement = 2 * kPointerSize;
1982
1983 // Check if the calling frame is an arguments adaptor frame. 2243 // Check if the calling frame is an arguments adaptor frame.
1984 Label adaptor_frame, try_allocate, runtime; 2244 Label adaptor_frame, try_allocate, runtime;
1985 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2245 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1986 __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset), 2246 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
1987 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2247 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1988 __ j(equal, &adaptor_frame); 2248 __ j(equal, &adaptor_frame);
1989 2249
1990 // Get the length from the frame. 2250 // Get the length from the frame.
1991 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); 2251 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2252 __ SmiToInteger64(rcx, rcx);
1992 __ jmp(&try_allocate); 2253 __ jmp(&try_allocate);
1993 2254
1994 // Patch the arguments.length and the parameters pointer. 2255 // Patch the arguments.length and the parameters pointer.
1995 __ bind(&adaptor_frame); 2256 __ bind(&adaptor_frame);
1996 __ SmiToInteger32(rcx, 2257 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1997 Operand(rdx, 2258 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
1998 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2259 __ SmiToInteger64(rcx, rcx);
1999 // Space on stack must already hold a smi. 2260 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2000 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx); 2261 StandardFrameConstants::kCallerSPOffset));
2001 // Do not clobber the length index for the indexing operation since
2002 // it is used compute the size for allocation later.
2003 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
2004 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2262 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2005 2263
2006 // Try the new space allocation. Start out with computing the size of 2264 // Try the new space allocation. Start out with computing the size of
2007 // the arguments object and the elements array. 2265 // the arguments object and the elements array.
2008 Label add_arguments_object; 2266 Label add_arguments_object;
2009 __ bind(&try_allocate); 2267 __ bind(&try_allocate);
2010 if (type_ == NEW_NON_STRICT_SLOW || type_ == NEW_NON_STRICT_FAST) { 2268 __ testq(rcx, rcx);
2011 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2269 __ j(zero, &add_arguments_object, Label::kNear);
2012 } else { 2270 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2013 __ testl(rcx, rcx); 2271 __ bind(&add_arguments_object);
2014 __ j(zero, &add_arguments_object); 2272 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
2015 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 2273
2016 __ bind(&add_arguments_object); 2274 // Do the allocation of both objects in one go.
2017 __ addl(rcx, Immediate(Heap::kArgumentsObjectSize)); 2275 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2018 2276
2019 // Do the allocation of both objects in one go. 2277 // Get the arguments boilerplate from the current (global) context.
2020 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 2278 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2021 2279 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2022 // Get the arguments boilerplate from the current (global) context. 2280 const int offset =
2023 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2281 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2024 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); 2282 __ movq(rdi, Operand(rdi, offset));
2025 __ movq(rdi, Operand(rdi, Context::SlotOffset( 2283
2026 Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX))); 2284 // Copy the JS object part.
2027 2285 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2028 // Copy the JS object part. 2286 __ movq(rbx, FieldOperand(rdi, i));
2029 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize); 2287 __ movq(FieldOperand(rax, i), rbx);
2030 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
2031 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
2032 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
2033 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
2034 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2035 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2036
2037 if (type_ == NEW_NON_STRICT_SLOW || type_ == NEW_NON_STRICT_FAST) {
2038 // Setup the callee in-object property.
2039 ASSERT(Heap::kArgumentsCalleeIndex == 1);
2040 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2041 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2042 Heap::kArgumentsCalleeIndex * kPointerSize),
2043 kScratchRegister);
2044 }
2045
2046 // Get the length (smi tagged) and set that as an in-object property too.
2047 ASSERT(Heap::kArgumentsLengthIndex == 0);
2048 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2049 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2050 Heap::kArgumentsLengthIndex * kPointerSize),
2051 rcx);
2052
2053 // If there are no actual arguments, we're done.
2054 Label done;
2055 __ SmiTest(rcx);
2056 __ j(zero, &done);
2057
2058 // Get the parameters pointer from the stack and untag the length.
2059 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2060
2061 // Setup the elements pointer in the allocated arguments object and
2062 // initialize the header in the elements fixed array.
2063 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
2064 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2065 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2066 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2067 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2068 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
2069
2070 // Copy the fixed array slots.
2071 Label loop;
2072 __ bind(&loop);
2073 // -1 ~ skip the receiver.
2074 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize));
2075 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
2076 __ addq(rdi, Immediate(kPointerSize));
2077 __ subq(rdx, Immediate(kPointerSize));
2078 __ decl(rcx);
2079 __ j(not_zero, &loop);
2080
2081 // Return and remove the on-stack parameters.
2082 __ bind(&done);
2083 __ ret(3 * kPointerSize);
2084
2085 // Do the runtime call to allocate the arguments object.
2086 __ bind(&runtime);
2087 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2088 } 2288 }
2289
2290 // Get the length (smi tagged) and set that as an in-object property too.
2291 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2292 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2293 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2294 Heap::kArgumentsLengthIndex * kPointerSize),
2295 rcx);
2296
2297 // If there are no actual arguments, we're done.
2298 Label done;
2299 __ testq(rcx, rcx);
2300 __ j(zero, &done);
2301
2302 // Get the parameters pointer from the stack.
2303 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2304
2305 // Setup the elements pointer in the allocated arguments object and
2306 // initialize the header in the elements fixed array.
2307 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
2308 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2309 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2310 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2311
2312
2313 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2314 // Untag the length for the loop below.
2315 __ SmiToInteger64(rcx, rcx);
Lasse Reichstein 2011/06/07 12:20:19 It seems rcx could be just a 32-bit value here. It
2316
2317 // Copy the fixed array slots.
2318 Label loop;
2319 __ bind(&loop);
2320 __ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
2321 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
2322 __ addq(rdi, Immediate(kPointerSize));
2323 __ subq(rdx, Immediate(kPointerSize));
2324 __ decq(rcx);
2325 __ j(not_zero, &loop);
2326
2327 // Return and remove the on-stack parameters.
2328 __ bind(&done);
2329 __ ret(3 * kPointerSize);
2330
2331 // Do the runtime call to allocate the arguments object.
2332 __ bind(&runtime);
2333 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2089 } 2334 }
2090 2335
2091 2336
2092 void RegExpExecStub::Generate(MacroAssembler* masm) { 2337 void RegExpExecStub::Generate(MacroAssembler* masm) {
2093 // Just jump directly to runtime if native RegExp is not selected at compile 2338 // Just jump directly to runtime if native RegExp is not selected at compile
2094 // time or if regexp entry in generated code is turned off runtime switch or 2339 // time or if regexp entry in generated code is turned off runtime switch or
2095 // at compilation. 2340 // at compilation.
2096 #ifdef V8_INTERPRETED_REGEXP 2341 #ifdef V8_INTERPRETED_REGEXP
2097 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2342 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2098 #else // V8_INTERPRETED_REGEXP 2343 #else // V8_INTERPRETED_REGEXP
(...skipping 3043 matching lines...) Expand 10 before | Expand all | Expand 10 after
5142 __ Drop(1); 5387 __ Drop(1);
5143 __ ret(2 * kPointerSize); 5388 __ ret(2 * kPointerSize);
5144 } 5389 }
5145 5390
5146 5391
5147 #undef __ 5392 #undef __
5148 5393
5149 } } // namespace v8::internal 5394 } } // namespace v8::internal
5150 5395
5151 #endif // V8_TARGET_ARCH_X64 5396 #endif // V8_TARGET_ARCH_X64
OLDNEW
« src/arm/code-stubs-arm.cc ('K') | « src/objects-printer.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698