Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(349)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 137403009: Adding a type vector to replace type cells. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed feedback. Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/typing.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 descriptor->register_param_count_ = 4; 99 descriptor->register_param_count_ = 4;
100 descriptor->register_params_ = registers; 100 descriptor->register_params_ = registers;
101 descriptor->deoptimization_handler_ = 101 descriptor->deoptimization_handler_ =
102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
103 } 103 }
104 104
105 105
106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( 106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
107 Isolate* isolate, 107 Isolate* isolate,
108 CodeStubInterfaceDescriptor* descriptor) { 108 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = { rbx }; 109 static Register registers[] = { rbx, rdx };
110 descriptor->register_param_count_ = 1; 110 descriptor->register_param_count_ = 2;
111 descriptor->register_params_ = registers; 111 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ = NULL; 112 descriptor->deoptimization_handler_ = NULL;
113 } 113 }
114 114
115 115
116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
117 Isolate* isolate, 117 Isolate* isolate,
118 CodeStubInterfaceDescriptor* descriptor) { 118 CodeStubInterfaceDescriptor* descriptor) {
119 static Register registers[] = { rdx, rax }; 119 static Register registers[] = { rdx, rax };
120 descriptor->register_param_count_ = 2; 120 descriptor->register_param_count_ = 2;
(...skipping 2033 matching lines...) Expand 10 before | Expand all | Expand 10 after
2154 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 2154 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2155 // tagged as a small integer. 2155 // tagged as a small integer.
2156 __ InvokeBuiltin(builtin, JUMP_FUNCTION); 2156 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2157 2157
2158 __ bind(&miss); 2158 __ bind(&miss);
2159 GenerateMiss(masm); 2159 GenerateMiss(masm);
2160 } 2160 }
2161 2161
2162 2162
2163 static void GenerateRecordCallTarget(MacroAssembler* masm) { 2163 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2164 // Cache the called function in a global property cell. Cache states 2164 // Cache the called function in a feedback vector slot. Cache states
2165 // are uninitialized, monomorphic (indicated by a JSFunction), and 2165 // are uninitialized, monomorphic (indicated by a JSFunction), and
2166 // megamorphic. 2166 // megamorphic.
2167 // rax : number of arguments to the construct function 2167 // rax : number of arguments to the construct function
2168 // rbx : cache cell for call target 2168 // rbx : Feedback vector
2169 // rdx : slot in feedback vector (Smi)
2169 // rdi : the function to call 2170 // rdi : the function to call
2170 Isolate* isolate = masm->isolate(); 2171 Isolate* isolate = masm->isolate();
2171 Label initialize, done, miss, megamorphic, not_array_function; 2172 Label initialize, done, miss, megamorphic, not_array_function,
2173 done_no_smi_convert;
2172 2174
2173 // Load the cache state into rcx. 2175 // Load the cache state into rcx.
2174 __ movp(rcx, FieldOperand(rbx, Cell::kValueOffset)); 2176 __ SmiToInteger32(rdx, rdx);
2177 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2178 FixedArray::kHeaderSize));
2175 2179
2176 // A monomorphic cache hit or an already megamorphic state: invoke the 2180 // A monomorphic cache hit or an already megamorphic state: invoke the
2177 // function without changing the state. 2181 // function without changing the state.
2178 __ cmpq(rcx, rdi); 2182 __ cmpq(rcx, rdi);
2179 __ j(equal, &done); 2183 __ j(equal, &done);
2180 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); 2184 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
2181 __ j(equal, &done); 2185 __ j(equal, &done);
2182 2186
2183 // If we came here, we need to see if we are the array function. 2187 // If we came here, we need to see if we are the array function.
2184 // If we didn't have a matching function, and we didn't find the megamorph 2188 // If we didn't have a matching function, and we didn't find the megamorph
2185 // sentinel, then we have in the cell either some other function or an 2189 // sentinel, then we have in the slot either some other function or an
2186 // AllocationSite. Do a map check on the object in rcx. 2190 // AllocationSite. Do a map check on the object in rcx.
2187 Handle<Map> allocation_site_map = 2191 Handle<Map> allocation_site_map =
2188 masm->isolate()->factory()->allocation_site_map(); 2192 masm->isolate()->factory()->allocation_site_map();
2189 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); 2193 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
2190 __ j(not_equal, &miss); 2194 __ j(not_equal, &miss);
2191 2195
2192 // Make sure the function is the Array() function 2196 // Make sure the function is the Array() function
2193 __ LoadArrayFunction(rcx); 2197 __ LoadArrayFunction(rcx);
2194 __ cmpq(rdi, rcx); 2198 __ cmpq(rdi, rcx);
2195 __ j(not_equal, &megamorphic); 2199 __ j(not_equal, &megamorphic);
2196 __ jmp(&done); 2200 __ jmp(&done);
2197 2201
2198 __ bind(&miss); 2202 __ bind(&miss);
2199 2203
2200 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 2204 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2201 // megamorphic. 2205 // megamorphic.
2202 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); 2206 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
2203 __ j(equal, &initialize); 2207 __ j(equal, &initialize);
2204 // MegamorphicSentinel is an immortal immovable object (undefined) so no 2208 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2205 // write-barrier is needed. 2209 // write-barrier is needed.
2206 __ bind(&megamorphic); 2210 __ bind(&megamorphic);
2207 __ Move(FieldOperand(rbx, Cell::kValueOffset), 2211 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2208 TypeFeedbackCells::MegamorphicSentinel(isolate)); 2212 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2209 __ jmp(&done); 2213 __ jmp(&done);
2210 2214
2211 // An uninitialized cache is patched with the function or sentinel to 2215 // An uninitialized cache is patched with the function or sentinel to
2212 // indicate the ElementsKind if function is the Array constructor. 2216 // indicate the ElementsKind if function is the Array constructor.
2213 __ bind(&initialize); 2217 __ bind(&initialize);
2214 // Make sure the function is the Array() function 2218 // Make sure the function is the Array() function
2215 __ LoadArrayFunction(rcx); 2219 __ LoadArrayFunction(rcx);
2216 __ cmpq(rdi, rcx); 2220 __ cmpq(rdi, rcx);
2217 __ j(not_equal, &not_array_function); 2221 __ j(not_equal, &not_array_function);
2218 2222
2219 // The target function is the Array constructor, 2223 // The target function is the Array constructor,
2220 // Create an AllocationSite if we don't already have it, store it in the cell 2224 // Create an AllocationSite if we don't already have it, store it in the slot.
2221 { 2225 {
2222 FrameScope scope(masm, StackFrame::INTERNAL); 2226 FrameScope scope(masm, StackFrame::INTERNAL);
2223 2227
2224 // Arguments register must be smi-tagged to call out. 2228 // Arguments register must be smi-tagged to call out.
2225 __ Integer32ToSmi(rax, rax); 2229 __ Integer32ToSmi(rax, rax);
2226 __ push(rax); 2230 __ push(rax);
2227 __ push(rdi); 2231 __ push(rdi);
2232 __ Integer32ToSmi(rdx, rdx);
2233 __ push(rdx);
2228 __ push(rbx); 2234 __ push(rbx);
2229 2235
2230 CreateAllocationSiteStub create_stub; 2236 CreateAllocationSiteStub create_stub;
2231 __ CallStub(&create_stub); 2237 __ CallStub(&create_stub);
2232 2238
2233 __ pop(rbx); 2239 __ pop(rbx);
2240 __ pop(rdx);
2234 __ pop(rdi); 2241 __ pop(rdi);
2235 __ pop(rax); 2242 __ pop(rax);
2236 __ SmiToInteger32(rax, rax); 2243 __ SmiToInteger32(rax, rax);
2237 } 2244 }
2238 __ jmp(&done); 2245 __ jmp(&done_no_smi_convert);
2239 2246
2240 __ bind(&not_array_function); 2247 __ bind(&not_array_function);
2241 __ movp(FieldOperand(rbx, Cell::kValueOffset), rdi); 2248 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2242 // No need for a write barrier here - cells are rescanned. 2249 rdi);
2250
2251 // We won't need rdx or rbx anymore, just save rdi
2252 __ push(rdi);
2253 __ push(rbx);
2254 __ push(rdx);
2255 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
2256 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2257 __ pop(rdx);
2258 __ pop(rbx);
2259 __ pop(rdi);
2243 2260
2244 __ bind(&done); 2261 __ bind(&done);
2262 __ Integer32ToSmi(rdx, rdx);
2263
2264 __ bind(&done_no_smi_convert);
2245 } 2265 }
2246 2266
2247 2267
2248 void CallFunctionStub::Generate(MacroAssembler* masm) { 2268 void CallFunctionStub::Generate(MacroAssembler* masm) {
2249 // rbx : cache cell for call target 2269 // rbx : feedback vector
2270 // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
2250 // rdi : the function to call 2271 // rdi : the function to call
2251 Isolate* isolate = masm->isolate(); 2272 Isolate* isolate = masm->isolate();
2252 Label slow, non_function, wrap, cont; 2273 Label slow, non_function, wrap, cont;
2253 StackArgumentsAccessor args(rsp, argc_); 2274 StackArgumentsAccessor args(rsp, argc_);
2254 2275
2255 if (NeedsChecks()) { 2276 if (NeedsChecks()) {
2256 // Check that the function really is a JavaScript function. 2277 // Check that the function really is a JavaScript function.
2257 __ JumpIfSmi(rdi, &non_function); 2278 __ JumpIfSmi(rdi, &non_function);
2258 2279
2259 // Goto slow case if we do not have a function. 2280 // Goto slow case if we do not have a function.
(...skipping 16 matching lines...) Expand all
2276 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 2297 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2277 __ j(not_equal, &cont); 2298 __ j(not_equal, &cont);
2278 2299
2279 // Do not transform the receiver for natives. 2300 // Do not transform the receiver for natives.
2280 // SharedFunctionInfo is already loaded into rcx. 2301 // SharedFunctionInfo is already loaded into rcx.
2281 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), 2302 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2282 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 2303 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2283 __ j(not_equal, &cont); 2304 __ j(not_equal, &cont);
2284 } 2305 }
2285 2306
2307
2286 // Load the receiver from the stack. 2308 // Load the receiver from the stack.
2287 __ movp(rax, args.GetReceiverOperand()); 2309 __ movp(rax, args.GetReceiverOperand());
2288 2310
2289 if (NeedsChecks()) { 2311 if (NeedsChecks()) {
2290 __ JumpIfSmi(rax, &wrap); 2312 __ JumpIfSmi(rax, &wrap);
2291 2313
2292 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2314 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2293 __ j(below, &wrap); 2315 __ j(below, &wrap);
2294 } else { 2316 } else {
2295 __ jmp(&wrap); 2317 __ jmp(&wrap);
2296 } 2318 }
2297 2319
2298 __ bind(&cont); 2320 __ bind(&cont);
2299 } 2321 }
2300 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); 2322 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2301 2323
2302 if (NeedsChecks()) { 2324 if (NeedsChecks()) {
2303 // Slow-case: Non-function called. 2325 // Slow-case: Non-function called.
2304 __ bind(&slow); 2326 __ bind(&slow);
2305 if (RecordCallTarget()) { 2327 if (RecordCallTarget()) {
2306 // If there is a call target cache, mark it megamorphic in the 2328 // If there is a call target cache, mark it megamorphic in the
2307 // non-function case. MegamorphicSentinel is an immortal immovable 2329 // non-function case. MegamorphicSentinel is an immortal immovable
2308 // object (undefined) so no write barrier is needed. 2330 // object (undefined) so no write barrier is needed.
2309 __ Move(FieldOperand(rbx, Cell::kValueOffset), 2331 __ SmiToInteger32(rdx, rdx);
2310 TypeFeedbackCells::MegamorphicSentinel(isolate)); 2332 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2333 FixedArray::kHeaderSize),
2334 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2335 __ Integer32ToSmi(rdx, rdx);
2311 } 2336 }
2312 // Check for function proxy. 2337 // Check for function proxy.
2313 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2338 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2314 __ j(not_equal, &non_function); 2339 __ j(not_equal, &non_function);
2315 __ PopReturnAddressTo(rcx); 2340 __ PopReturnAddressTo(rcx);
2316 __ push(rdi); // put proxy as additional argument under return address 2341 __ push(rdi); // put proxy as additional argument under return address
2317 __ PushReturnAddressFrom(rcx); 2342 __ PushReturnAddressFrom(rcx);
2318 __ Set(rax, argc_ + 1); 2343 __ Set(rax, argc_ + 1);
2319 __ Set(rbx, 0); 2344 __ Set(rbx, 0);
2320 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2345 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
(...skipping 25 matching lines...) Expand all
2346 __ pop(rdi); 2371 __ pop(rdi);
2347 } 2372 }
2348 __ movp(args.GetReceiverOperand(), rax); 2373 __ movp(args.GetReceiverOperand(), rax);
2349 __ jmp(&cont); 2374 __ jmp(&cont);
2350 } 2375 }
2351 } 2376 }
2352 2377
2353 2378
2354 void CallConstructStub::Generate(MacroAssembler* masm) { 2379 void CallConstructStub::Generate(MacroAssembler* masm) {
2355 // rax : number of arguments 2380 // rax : number of arguments
2356 // rbx : cache cell for call target 2381 // rbx : feedback vector
2382 // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
2357 // rdi : constructor function 2383 // rdi : constructor function
2358 Label slow, non_function_call; 2384 Label slow, non_function_call;
2359 2385
2360 // Check that function is not a smi. 2386 // Check that function is not a smi.
2361 __ JumpIfSmi(rdi, &non_function_call); 2387 __ JumpIfSmi(rdi, &non_function_call);
2362 // Check that function is a JSFunction. 2388 // Check that function is a JSFunction.
2363 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2389 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2364 __ j(not_equal, &slow); 2390 __ j(not_equal, &slow);
2365 2391
2366 if (RecordCallTarget()) { 2392 if (RecordCallTarget()) {
(...skipping 2493 matching lines...) Expand 10 before | Expand all | Expand 10 after
4860 ArraySingleArgumentConstructorStub stub_holey(holey_initial, 4886 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
4861 DISABLE_ALLOCATION_SITES); 4887 DISABLE_ALLOCATION_SITES);
4862 __ TailCallStub(&stub_holey); 4888 __ TailCallStub(&stub_holey);
4863 4889
4864 __ bind(&normal_sequence); 4890 __ bind(&normal_sequence);
4865 ArraySingleArgumentConstructorStub stub(initial, 4891 ArraySingleArgumentConstructorStub stub(initial,
4866 DISABLE_ALLOCATION_SITES); 4892 DISABLE_ALLOCATION_SITES);
4867 __ TailCallStub(&stub); 4893 __ TailCallStub(&stub);
4868 } else if (mode == DONT_OVERRIDE) { 4894 } else if (mode == DONT_OVERRIDE) {
4869 // We are going to create a holey array, but our kind is non-holey. 4895 // We are going to create a holey array, but our kind is non-holey.
4870 // Fix kind and retry (only if we have an allocation site in the cell). 4896 // Fix kind and retry (only if we have an allocation site in the slot).
4871 __ incl(rdx); 4897 __ incl(rdx);
4872 4898
4873 if (FLAG_debug_code) { 4899 if (FLAG_debug_code) {
4874 Handle<Map> allocation_site_map = 4900 Handle<Map> allocation_site_map =
4875 masm->isolate()->factory()->allocation_site_map(); 4901 masm->isolate()->factory()->allocation_site_map();
4876 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); 4902 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4877 __ Assert(equal, kExpectedAllocationSite); 4903 __ Assert(equal, kExpectedAllocationSite);
4878 } 4904 }
4879 4905
4880 // Save the resulting elements kind in type info. We can't just store r3 4906 // Save the resulting elements kind in type info. We can't just store r3
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
4970 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4996 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4971 } else { 4997 } else {
4972 UNREACHABLE(); 4998 UNREACHABLE();
4973 } 4999 }
4974 } 5000 }
4975 5001
4976 5002
4977 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5003 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4978 // ----------- S t a t e ------------- 5004 // ----------- S t a t e -------------
4979 // -- rax : argc 5005 // -- rax : argc
4980 // -- rbx : type info cell 5006 // -- rbx : feedback vector (fixed array or undefined)
5007 // -- rdx : slot index (if ebx is fixed array)
4981 // -- rdi : constructor 5008 // -- rdi : constructor
4982 // -- rsp[0] : return address 5009 // -- rsp[0] : return address
4983 // -- rsp[8] : last argument 5010 // -- rsp[8] : last argument
4984 // ----------------------------------- 5011 // -----------------------------------
4985 Handle<Object> undefined_sentinel( 5012 Handle<Object> undefined_sentinel(
4986 masm->isolate()->heap()->undefined_value(), 5013 masm->isolate()->heap()->undefined_value(),
4987 masm->isolate()); 5014 masm->isolate());
4988 5015
4989 if (FLAG_debug_code) { 5016 if (FLAG_debug_code) {
4990 // The array construct code is only set for the global and natives 5017 // The array construct code is only set for the global and natives
4991 // builtin Array functions which always have maps. 5018 // builtin Array functions which always have maps.
4992 5019
4993 // Initial map for the builtin Array function should be a map. 5020 // Initial map for the builtin Array function should be a map.
4994 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 5021 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4995 // Will both indicate a NULL and a Smi. 5022 // Will both indicate a NULL and a Smi.
4996 STATIC_ASSERT(kSmiTag == 0); 5023 STATIC_ASSERT(kSmiTag == 0);
4997 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 5024 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4998 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 5025 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4999 __ CmpObjectType(rcx, MAP_TYPE, rcx); 5026 __ CmpObjectType(rcx, MAP_TYPE, rcx);
5000 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 5027 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5001 5028
5002 // We should either have undefined in rbx or a valid cell 5029 // We should either have undefined in rbx or a valid fixed array.
5003 Label okay_here; 5030 Label okay_here;
5004 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 5031 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5005 __ Cmp(rbx, undefined_sentinel); 5032 __ Cmp(rbx, undefined_sentinel);
5006 __ j(equal, &okay_here); 5033 __ j(equal, &okay_here);
5007 __ Cmp(FieldOperand(rbx, 0), cell_map); 5034 __ Cmp(FieldOperand(rbx, 0), fixed_array_map);
5008 __ Assert(equal, kExpectedPropertyCellInRegisterRbx); 5035 __ Assert(equal, kExpectedFixedArrayInRegisterRbx);
5036
5037 // rdx should be a smi if we don't have undefined in rbx.
5038 __ AssertSmi(rdx);
5039
5009 __ bind(&okay_here); 5040 __ bind(&okay_here);
5010 } 5041 }
5011 5042
5012 Label no_info; 5043 Label no_info;
5013 // If the type cell is undefined, or contains anything other than an 5044 // If the feedback slot is undefined, or contains anything other than an
5014 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5045 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5015 __ Cmp(rbx, undefined_sentinel); 5046 __ Cmp(rbx, undefined_sentinel);
5016 __ j(equal, &no_info); 5047 __ j(equal, &no_info);
5017 __ movp(rbx, FieldOperand(rbx, Cell::kValueOffset)); 5048 __ SmiToInteger32(rdx, rdx);
5049 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
5050 FixedArray::kHeaderSize));
5051 __ Integer32ToSmi(rdx, rdx);
5018 __ Cmp(FieldOperand(rbx, 0), 5052 __ Cmp(FieldOperand(rbx, 0),
5019 masm->isolate()->factory()->allocation_site_map()); 5053 masm->isolate()->factory()->allocation_site_map());
5020 __ j(not_equal, &no_info); 5054 __ j(not_equal, &no_info);
5021 5055
5022 // Only look at the lower 16 bits of the transition info. 5056 // Only look at the lower 16 bits of the transition info.
5023 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); 5057 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
5024 __ SmiToInteger32(rdx, rdx); 5058 __ SmiToInteger32(rdx, rdx);
5025 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5059 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5026 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); 5060 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
5027 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 5061 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
5064 5098
5065 __ bind(&not_one_case); 5099 __ bind(&not_one_case);
5066 InternalArrayNArgumentsConstructorStub stubN(kind); 5100 InternalArrayNArgumentsConstructorStub stubN(kind);
5067 __ TailCallStub(&stubN); 5101 __ TailCallStub(&stubN);
5068 } 5102 }
5069 5103
5070 5104
5071 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 5105 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5072 // ----------- S t a t e ------------- 5106 // ----------- S t a t e -------------
5073 // -- rax : argc 5107 // -- rax : argc
5074 // -- rbx : type info cell
5075 // -- rdi : constructor 5108 // -- rdi : constructor
5076 // -- rsp[0] : return address 5109 // -- rsp[0] : return address
5077 // -- rsp[8] : last argument 5110 // -- rsp[8] : last argument
5078 // ----------------------------------- 5111 // -----------------------------------
5079 5112
5080 if (FLAG_debug_code) { 5113 if (FLAG_debug_code) {
5081 // The array construct code is only set for the global and natives 5114 // The array construct code is only set for the global and natives
5082 // builtin Array functions which always have maps. 5115 // builtin Array functions which always have maps.
5083 5116
5084 // Initial map for the builtin Array function should be a map. 5117 // Initial map for the builtin Array function should be a map.
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
5294 return_value_operand, 5327 return_value_operand,
5295 NULL); 5328 NULL);
5296 } 5329 }
5297 5330
5298 5331
5299 #undef __ 5332 #undef __
5300 5333
5301 } } // namespace v8::internal 5334 } } // namespace v8::internal
5302 5335
5303 #endif // V8_TARGET_ARCH_X64 5336 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/typing.cc ('k') | src/x64/debug-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698