Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(414)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 7172030: Revert "Merge arguments branch to bleeding merge." (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/variables.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1927 matching lines...) Expand 10 before | Expand all | Expand 10 after
1938 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1938 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1939 // by calling the runtime system. 1939 // by calling the runtime system.
1940 __ bind(&slow); 1940 __ bind(&slow);
1941 __ pop(rbx); // Return address. 1941 __ pop(rbx); // Return address.
1942 __ push(rdx); 1942 __ push(rdx);
1943 __ push(rbx); 1943 __ push(rbx);
1944 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 1944 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1945 } 1945 }
1946 1946
1947 1947
1948 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { 1948 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
1949 // Stack layout:
1950 // rsp[0] : return address
1951 // rsp[8] : number of parameters (tagged)
1952 // rsp[16] : receiver displacement
1953 // rsp[24] : function
1954 // Registers used over the whole function:
1955 // rbx: the mapped parameter count (untagged)
1956 // rax: the allocated object (tagged).
1957
1958 Factory* factory = masm->isolate()->factory();
1959
1960 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize));
1961 // rbx = parameter count (untagged)
1962
1963 // Check if the calling frame is an arguments adaptor frame.
1964 Label runtime;
1965 Label adaptor_frame, try_allocate;
1966 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1967 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
1968 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1969 __ j(equal, &adaptor_frame);
1970
1971 // No adaptor, parameter count = argument count.
1972 __ movq(rcx, rbx);
1973 __ jmp(&try_allocate, Label::kNear);
1974
1975 // We have an adaptor frame. Patch the parameters pointer.
1976 __ bind(&adaptor_frame);
1977 __ SmiToInteger64(rcx,
1978 Operand(rdx,
1979 ArgumentsAdaptorFrameConstants::kLengthOffset));
1980 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
1981 StandardFrameConstants::kCallerSPOffset));
1982 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
1983
1984 // rbx = parameter count (untagged)
1985 // rcx = argument count (untagged)
1986 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
1987 __ cmpq(rbx, rcx);
1988 __ j(less_equal, &try_allocate, Label::kNear);
1989 __ movq(rbx, rcx);
1990
1991 __ bind(&try_allocate);
1992
1993 // Compute the sizes of backing store, parameter map, and arguments object.
1994 // 1. Parameter map, has 2 extra words containing context and backing store.
1995 const int kParameterMapHeaderSize =
1996 FixedArray::kHeaderSize + 2 * kPointerSize;
1997 Label no_parameter_map;
1998 __ testq(rbx, rbx);
1999 __ j(zero, &no_parameter_map, Label::kNear);
2000 __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
2001 __ bind(&no_parameter_map);
2002
2003 // 2. Backing store.
2004 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
2005
2006 // 3. Arguments object.
2007 __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
2008
2009 // Do the allocation of all three objects in one go.
2010 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
2011
2012 // rax = address of new object(s) (tagged)
2013 // rcx = argument count (untagged)
2014 // Get the arguments boilerplate from the current (global) context into rdi.
2015 Label has_mapped_parameters, copy;
2016 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2017 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2018 __ testq(rbx, rbx);
2019 __ j(not_zero, &has_mapped_parameters, Label::kNear);
2020
2021 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
2022 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
2023 __ jmp(&copy, Label::kNear);
2024
2025 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
2026 __ bind(&has_mapped_parameters);
2027 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
2028 __ bind(&copy);
2029
2030 // rax = address of new object (tagged)
2031 // rbx = mapped parameter count (untagged)
2032 // rcx = argument count (untagged)
2033 // rdi = address of boilerplate object (tagged)
2034 // Copy the JS object part.
2035 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2036 __ movq(rdx, FieldOperand(rdi, i));
2037 __ movq(FieldOperand(rax, i), rdx);
2038 }
2039
2040 // Setup the callee in-object property.
2041 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2042 __ movq(rdx, Operand(rsp, 3 * kPointerSize));
2043 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2044 Heap::kArgumentsCalleeIndex * kPointerSize),
2045 rdx);
2046
2047 // Use the length (smi tagged) and set that as an in-object property too.
2048 // Note: rcx is tagged from here on.
2049 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2050 __ Integer32ToSmi(rcx, rcx);
2051 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2052 Heap::kArgumentsLengthIndex * kPointerSize),
2053 rcx);
2054
2055 // Setup the elements pointer in the allocated arguments object.
2056 // If we allocated a parameter map, edi will point there, otherwise to the
2057 // backing store.
2058 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
2059 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2060
2061 // rax = address of new object (tagged)
2062 // rbx = mapped parameter count (untagged)
2063 // rcx = argument count (tagged)
2064 // rdi = address of parameter map or backing store (tagged)
2065
2066 // Initialize parameter map. If there are no mapped arguments, we're done.
2067 Label skip_parameter_map;
2068 __ testq(rbx, rbx);
2069 __ j(zero, &skip_parameter_map);
2070
2071 __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
2072 // rbx contains the untagged argument count. Add 2 and tag to write.
2073 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2074 __ Integer64PlusConstantToSmi(r9, rbx, 2);
2075 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
2076 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
2077 __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
2078 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
2079
2080 // Copy the parameter slots and the holes in the arguments.
2081 // We need to fill in mapped_parameter_count slots. They index the context,
2082 // where parameters are stored in reverse order, at
2083 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
2084 // The mapped parameter thus need to get indices
2085 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
2086 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
2087 // We loop from right to left.
2088 Label parameters_loop, parameters_test;
2089
2090 // Load tagged parameter count into r9.
2091 __ movq(r9, Operand(rsp, 1 * kPointerSize));
2092 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
2093 __ addq(r8, Operand(rsp, 3 * kPointerSize));
2094 __ subq(r8, r9);
2095 __ Move(r11, factory->the_hole_value());
2096 __ movq(rdx, rdi);
2097 __ SmiToInteger64(kScratchRegister, r9);
2098 __ lea(rdi, Operand(rdi, kScratchRegister,
2099 times_pointer_size,
2100 kParameterMapHeaderSize));
2101 // r9 = loop variable (tagged)
2102 // r8 = mapping index (tagged)
2103 // r11 = the hole value
2104 // rdx = address of parameter map (tagged)
2105 // rdi = address of backing store (tagged)
2106 __ jmp(&parameters_test, Label::kNear);
2107
2108 __ bind(&parameters_loop);
2109 __ SmiSubConstant(r9, r9, Smi::FromInt(1));
2110 __ SmiToInteger64(kScratchRegister, r9);
2111 __ movq(FieldOperand(rdx, kScratchRegister,
2112 times_pointer_size,
2113 kParameterMapHeaderSize),
2114 r8);
2115 __ movq(FieldOperand(rdi, kScratchRegister,
2116 times_pointer_size,
2117 FixedArray::kHeaderSize),
2118 r11);
2119 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
2120 __ bind(&parameters_test);
2121 __ SmiTest(r9);
2122 __ j(not_zero, &parameters_loop, Label::kNear);
2123
2124 __ bind(&skip_parameter_map);
2125
2126 // rcx = argument count (tagged)
2127 // rdi = address of backing store (tagged)
2128 // Copy arguments header and remaining slots (if there are any).
2129 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
2130 factory->fixed_array_map());
2131 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2132
2133 Label arguments_loop, arguments_test;
2134 __ movq(r8, rbx);
2135 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2136 // Untag rcx and r8 for the loop below.
2137 __ SmiToInteger64(rcx, rcx);
2138 __ SmiToInteger64(r8, r8);
2139 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
2140 __ subq(rdx, kScratchRegister);
2141 __ jmp(&arguments_test, Label::kNear);
2142
2143 __ bind(&arguments_loop);
2144 __ subq(rdx, Immediate(kPointerSize));
2145 __ movq(r9, Operand(rdx, 0));
2146 __ movq(FieldOperand(rdi, r8,
2147 times_pointer_size,
2148 FixedArray::kHeaderSize),
2149 r9);
2150 __ addq(r8, Immediate(1));
2151
2152 __ bind(&arguments_test);
2153 __ cmpq(r8, rcx);
2154 __ j(less, &arguments_loop, Label::kNear);
2155
2156 // Return and remove the on-stack parameters.
2157 __ ret(3 * kPointerSize);
2158
2159 // Do the runtime call to allocate the arguments object.
2160 // rcx = argument count (untagged)
2161 __ bind(&runtime);
2162 __ Integer32ToSmi(rcx, rcx);
2163 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
2164 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
2165 }
2166
2167
2168 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2169 // esp[0] : return address
2170 // esp[8] : number of parameters
2171 // esp[16] : receiver displacement
2172 // esp[24] : function
2173
2174 // Check if the calling frame is an arguments adaptor frame.
2175 Label runtime;
2176 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2177 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2178 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2179 __ j(not_equal, &runtime);
2180
2181 // Patch the arguments.length and the parameters pointer.
2182 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2183 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
2184 __ SmiToInteger64(rcx, rcx);
2185 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2186 StandardFrameConstants::kCallerSPOffset));
2187 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2188
2189 __ bind(&runtime);
2190 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2191 }
2192
2193
2194 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2195 // rsp[0] : return address 1949 // rsp[0] : return address
2196 // rsp[8] : number of parameters 1950 // rsp[8] : number of parameters
2197 // rsp[16] : receiver displacement 1951 // rsp[16] : receiver displacement
2198 // rsp[24] : function 1952 // rsp[24] : function
2199 1953
1954 // The displacement is used for skipping the return address and the
1955 // frame pointer on the stack. It is the offset of the last
1956 // parameter (if any) relative to the frame pointer.
1957 static const int kDisplacement = 2 * kPointerSize;
1958
2200 // Check if the calling frame is an arguments adaptor frame. 1959 // Check if the calling frame is an arguments adaptor frame.
2201 Label adaptor_frame, try_allocate, runtime; 1960 Label adaptor_frame, try_allocate, runtime;
2202 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 1961 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2203 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 1962 __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
2204 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1963 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2205 __ j(equal, &adaptor_frame); 1964 __ j(equal, &adaptor_frame);
2206 1965
2207 // Get the length from the frame. 1966 // Get the length from the frame.
2208 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 1967 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
2209 __ SmiToInteger64(rcx, rcx);
2210 __ jmp(&try_allocate); 1968 __ jmp(&try_allocate);
2211 1969
2212 // Patch the arguments.length and the parameters pointer. 1970 // Patch the arguments.length and the parameters pointer.
2213 __ bind(&adaptor_frame); 1971 __ bind(&adaptor_frame);
2214 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1972 __ SmiToInteger32(rcx,
2215 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 1973 Operand(rdx,
2216 __ SmiToInteger64(rcx, rcx); 1974 ArgumentsAdaptorFrameConstants::kLengthOffset));
2217 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 1975 // Space on stack must already hold a smi.
2218 StandardFrameConstants::kCallerSPOffset)); 1976 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
1977 // Do not clobber the length index for the indexing operation since
1978 // it is used compute the size for allocation later.
1979 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
2219 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 1980 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2220 1981
2221 // Try the new space allocation. Start out with computing the size of 1982 // Try the new space allocation. Start out with computing the size of
2222 // the arguments object and the elements array. 1983 // the arguments object and the elements array.
2223 Label add_arguments_object; 1984 Label add_arguments_object;
2224 __ bind(&try_allocate); 1985 __ bind(&try_allocate);
2225 __ testq(rcx, rcx); 1986 __ testl(rcx, rcx);
2226 __ j(zero, &add_arguments_object, Label::kNear); 1987 __ j(zero, &add_arguments_object);
2227 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 1988 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2228 __ bind(&add_arguments_object); 1989 __ bind(&add_arguments_object);
2229 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); 1990 __ addl(rcx, Immediate(GetArgumentsObjectSize()));
2230 1991
2231 // Do the allocation of both objects in one go. 1992 // Do the allocation of both objects in one go.
2232 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 1993 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2233 1994
2234 // Get the arguments boilerplate from the current (global) context. 1995 // Get the arguments boilerplate from the current (global) context.
2235 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 1996 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2236 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); 1997 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2237 const int offset = 1998 __ movq(rdi, Operand(rdi,
2238 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 1999 Context::SlotOffset(GetArgumentsBoilerplateIndex())));
2239 __ movq(rdi, Operand(rdi, offset));
2240 2000
2241 // Copy the JS object part. 2001 // Copy the JS object part.
2242 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2002 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
2243 __ movq(rbx, FieldOperand(rdi, i)); 2003 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
2244 __ movq(FieldOperand(rax, i), rbx); 2004 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
2005 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
2006 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
2007 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2008 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2009
2010 if (type_ == NEW_NON_STRICT) {
2011 // Setup the callee in-object property.
2012 ASSERT(Heap::kArgumentsCalleeIndex == 1);
2013 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2014 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2015 Heap::kArgumentsCalleeIndex * kPointerSize),
2016 kScratchRegister);
2245 } 2017 }
2246 2018
2247 // Get the length (smi tagged) and set that as an in-object property too. 2019 // Get the length (smi tagged) and set that as an in-object property too.
2248 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2020 ASSERT(Heap::kArgumentsLengthIndex == 0);
2249 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2021 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2250 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2022 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2251 Heap::kArgumentsLengthIndex * kPointerSize), 2023 Heap::kArgumentsLengthIndex * kPointerSize),
2252 rcx); 2024 rcx);
2253 2025
2254 // If there are no actual arguments, we're done. 2026 // If there are no actual arguments, we're done.
2255 Label done; 2027 Label done;
2256 __ testq(rcx, rcx); 2028 __ SmiTest(rcx);
2257 __ j(zero, &done); 2029 __ j(zero, &done);
2258 2030
2259 // Get the parameters pointer from the stack. 2031 // Get the parameters pointer from the stack and untag the length.
2260 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2032 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2261 2033
2262 // Setup the elements pointer in the allocated arguments object and 2034 // Setup the elements pointer in the allocated arguments object and
2263 // initialize the header in the elements fixed array. 2035 // initialize the header in the elements fixed array.
2264 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); 2036 __ lea(rdi, Operand(rax, GetArgumentsObjectSize()));
2265 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); 2037 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2266 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 2038 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2267 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 2039 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2268
2269
2270 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2040 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2271 // Untag the length for the loop below. 2041 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
2272 __ SmiToInteger64(rcx, rcx);
2273 2042
2274 // Copy the fixed array slots. 2043 // Copy the fixed array slots.
2275 Label loop; 2044 Label loop;
2276 __ bind(&loop); 2045 __ bind(&loop);
2277 __ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver. 2046 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
2278 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx); 2047 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
2279 __ addq(rdi, Immediate(kPointerSize)); 2048 __ addq(rdi, Immediate(kPointerSize));
2280 __ subq(rdx, Immediate(kPointerSize)); 2049 __ subq(rdx, Immediate(kPointerSize));
2281 __ decq(rcx); 2050 __ decl(rcx);
2282 __ j(not_zero, &loop); 2051 __ j(not_zero, &loop);
2283 2052
2284 // Return and remove the on-stack parameters. 2053 // Return and remove the on-stack parameters.
2285 __ bind(&done); 2054 __ bind(&done);
2286 __ ret(3 * kPointerSize); 2055 __ ret(3 * kPointerSize);
2287 2056
2288 // Do the runtime call to allocate the arguments object. 2057 // Do the runtime call to allocate the arguments object.
2289 __ bind(&runtime); 2058 __ bind(&runtime);
2290 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); 2059 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2291 } 2060 }
2292 2061
2293 2062
2294 void RegExpExecStub::Generate(MacroAssembler* masm) { 2063 void RegExpExecStub::Generate(MacroAssembler* masm) {
2295 // Just jump directly to runtime if native RegExp is not selected at compile 2064 // Just jump directly to runtime if native RegExp is not selected at compile
2296 // time or if regexp entry in generated code is turned off runtime switch or 2065 // time or if regexp entry in generated code is turned off runtime switch or
2297 // at compilation. 2066 // at compilation.
2298 #ifdef V8_INTERPRETED_REGEXP 2067 #ifdef V8_INTERPRETED_REGEXP
2299 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2068 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2300 #else // V8_INTERPRETED_REGEXP 2069 #else // V8_INTERPRETED_REGEXP
(...skipping 3043 matching lines...) Expand 10 before | Expand all | Expand 10 after
5344 __ Drop(1); 5113 __ Drop(1);
5345 __ ret(2 * kPointerSize); 5114 __ ret(2 * kPointerSize);
5346 } 5115 }
5347 5116
5348 5117
5349 #undef __ 5118 #undef __
5350 5119
5351 } } // namespace v8::internal 5120 } } // namespace v8::internal
5352 5121
5353 #endif // V8_TARGET_ARCH_X64 5122 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/variables.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698