Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(339)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 132963012: Pretenure call new support. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Comment response. Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
96 Isolate* isolate, 96 Isolate* isolate,
97 CodeStubInterfaceDescriptor* descriptor) { 97 CodeStubInterfaceDescriptor* descriptor) {
98 static Register registers[] = { rax, rbx, rcx, rdx }; 98 static Register registers[] = { rax, rbx, rcx, rdx };
99 descriptor->register_param_count_ = 4; 99 descriptor->register_param_count_ = 4;
100 descriptor->register_params_ = registers; 100 descriptor->register_params_ = registers;
101 descriptor->deoptimization_handler_ = 101 descriptor->deoptimization_handler_ =
102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
103 } 103 }
104 104
105 105
106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
107 Isolate* isolate,
108 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = { rbx, rdx };
110 descriptor->register_param_count_ = 2;
111 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ = NULL;
113 }
114
115
116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 106 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
117 Isolate* isolate, 107 Isolate* isolate,
118 CodeStubInterfaceDescriptor* descriptor) { 108 CodeStubInterfaceDescriptor* descriptor) {
119 static Register registers[] = { rdx, rax }; 109 static Register registers[] = { rdx, rax };
120 descriptor->register_param_count_ = 2; 110 descriptor->register_param_count_ = 2;
121 descriptor->register_params_ = registers; 111 descriptor->register_params_ = registers;
122 descriptor->deoptimization_handler_ = 112 descriptor->deoptimization_handler_ =
123 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 113 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
124 } 114 }
125 115
(...skipping 2036 matching lines...) Expand 10 before | Expand all | Expand 10 after
2162 2152
2163 static void GenerateRecordCallTarget(MacroAssembler* masm) { 2153 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2164 // Cache the called function in a feedback vector slot. Cache states 2154 // Cache the called function in a feedback vector slot. Cache states
2165 // are uninitialized, monomorphic (indicated by a JSFunction), and 2155 // are uninitialized, monomorphic (indicated by a JSFunction), and
2166 // megamorphic. 2156 // megamorphic.
2167 // rax : number of arguments to the construct function 2157 // rax : number of arguments to the construct function
2168 // rbx : Feedback vector 2158 // rbx : Feedback vector
2169 // rdx : slot in feedback vector (Smi) 2159 // rdx : slot in feedback vector (Smi)
2170 // rdi : the function to call 2160 // rdi : the function to call
2171 Isolate* isolate = masm->isolate(); 2161 Isolate* isolate = masm->isolate();
2172 Label initialize, done, miss, megamorphic, not_array_function, 2162 Label initialize, done, miss, megamorphic, not_array_function;
2173 done_no_smi_convert;
2174 2163
2175 // Load the cache state into rcx. 2164 // Load the cache state into rcx.
2176 __ SmiToInteger32(rdx, rdx); 2165 __ SmiToInteger32(rdx, rdx);
2177 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, 2166 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2178 FixedArray::kHeaderSize)); 2167 FixedArray::kHeaderSize));
2179 2168
2180 // A monomorphic cache hit or an already megamorphic state: invoke the 2169 // A monomorphic cache hit or an already megamorphic state: invoke the
2181 // function without changing the state. 2170 // function without changing the state.
2182 __ cmpq(rcx, rdi); 2171 __ cmpq(rcx, rdi);
2183 __ j(equal, &done); 2172 __ j(equal, &done);
2184 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2173 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
2185 __ j(equal, &done); 2174 __ j(equal, &done);
2186 2175
2187 // If we came here, we need to see if we are the array function.
2188 // If we didn't have a matching function, and we didn't find the megamorph
2189 // sentinel, then we have in the slot either some other function or an
2190 // AllocationSite. Do a map check on the object in rcx.
2191 Handle<Map> allocation_site_map =
2192 masm->isolate()->factory()->allocation_site_map();
2193 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
2194 __ j(not_equal, &miss);
2195
2196 // Make sure the function is the Array() function
2197 __ LoadArrayFunction(rcx);
2198 __ cmpq(rdi, rcx);
2199 __ j(not_equal, &megamorphic);
2200 __ jmp(&done);
2201
2202 __ bind(&miss); 2176 __ bind(&miss);
2203 2177
2204 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 2178 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2205 // megamorphic. 2179 // megamorphic.
2206 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); 2180 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
2207 __ j(equal, &initialize); 2181 __ j(equal, &initialize);
2208 // MegamorphicSentinel is an immortal immovable object (undefined) so no 2182 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2209 // write-barrier is needed. 2183 // write-barrier is needed.
2210 __ bind(&megamorphic); 2184 __ bind(&megamorphic);
2211 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 2185 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2212 TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2186 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2213 __ jmp(&done); 2187 __ jmp(&done);
2214 2188
2215 // An uninitialized cache is patched with the function or sentinel to 2189 // An uninitialized cache is patched with the function or sentinel to
2216 // indicate the ElementsKind if function is the Array constructor. 2190 // indicate the ElementsKind if function is the Array constructor.
2217 __ bind(&initialize); 2191 __ bind(&initialize);
2218 // Make sure the function is the Array() function
2219 __ LoadArrayFunction(rcx);
2220 __ cmpq(rdi, rcx);
2221 __ j(not_equal, &not_array_function);
2222
2223 // The target function is the Array constructor,
2224 // Create an AllocationSite if we don't already have it, store it in the slot.
2225 {
2226 FrameScope scope(masm, StackFrame::INTERNAL);
2227
2228 // Arguments register must be smi-tagged to call out.
2229 __ Integer32ToSmi(rax, rax);
2230 __ push(rax);
2231 __ push(rdi);
2232 __ Integer32ToSmi(rdx, rdx);
2233 __ push(rdx);
2234 __ push(rbx);
2235
2236 CreateAllocationSiteStub create_stub;
2237 __ CallStub(&create_stub);
2238
2239 __ pop(rbx);
2240 __ pop(rdx);
2241 __ pop(rdi);
2242 __ pop(rax);
2243 __ SmiToInteger32(rax, rax);
2244 }
2245 __ jmp(&done_no_smi_convert);
2246
2247 __ bind(&not_array_function);
2248 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 2192 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2249 rdi); 2193 rdi);
2250 2194
2251 // We won't need rdx or rbx anymore, just save rdi 2195 // We won't need rdx or rbx anymore, just save rdi
2252 __ push(rdi); 2196 __ push(rdi);
2253 __ push(rbx); 2197 __ push(rbx);
2254 __ push(rdx); 2198 __ push(rdx);
2255 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs, 2199 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
2256 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2200 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2257 __ pop(rdx); 2201 __ pop(rdx);
2258 __ pop(rbx); 2202 __ pop(rbx);
2259 __ pop(rdi); 2203 __ pop(rdi);
2260 2204
2261 __ bind(&done); 2205 __ bind(&done);
2262 __ Integer32ToSmi(rdx, rdx); 2206 __ Integer32ToSmi(rdx, rdx);
2263
2264 __ bind(&done_no_smi_convert);
2265 } 2207 }
2266 2208
2267 2209
2268 void CallFunctionStub::Generate(MacroAssembler* masm) { 2210 void CallFunctionStub::Generate(MacroAssembler* masm) {
2269 // rbx : feedback vector 2211 // rbx : feedback vector
2270 // rdx : (only if rbx is not undefined) slot in feedback vector (Smi) 2212 // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
2271 // rdi : the function to call 2213 // rdi : the function to call
2272 Isolate* isolate = masm->isolate(); 2214 Isolate* isolate = masm->isolate();
2273 Label slow, non_function, wrap, cont; 2215 Label slow, non_function, wrap, cont;
2274 StackArgumentsAccessor args(rsp, argc_); 2216 StackArgumentsAccessor args(rsp, argc_);
2275 2217
2276 if (NeedsChecks()) { 2218 if (NeedsChecks()) {
2277 // Check that the function really is a JavaScript function. 2219 // Check that the function really is a JavaScript function.
2278 __ JumpIfSmi(rdi, &non_function); 2220 __ JumpIfSmi(rdi, &non_function);
2279 2221
2280 // Goto slow case if we do not have a function. 2222 // Goto slow case if we do not have a function.
2281 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2223 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2282 __ j(not_equal, &slow); 2224 __ j(not_equal, &slow);
2283 2225
2284 if (RecordCallTarget()) { 2226 if (RecordCallTarget()) {
2285 GenerateRecordCallTarget(masm); 2227 GenerateRecordCallTarget(masm);
2228 // Type information was updated. Because we may call Array, which
2229 // expects either undefined or an AllocationSite in ebx we need
2230 // to set ebx to undefined.
2231 __ Move(rbx, handle(isolate->heap()->undefined_value()));
2286 } 2232 }
2287 } 2233 }
2288 2234
2289 // Fast-case: Just invoke the function. 2235 // Fast-case: Just invoke the function.
2290 ParameterCount actual(argc_); 2236 ParameterCount actual(argc_);
2291 2237
2292 if (CallAsMethod()) { 2238 if (CallAsMethod()) {
2293 if (NeedsChecks()) { 2239 if (NeedsChecks()) {
2294 // Do not transform the receiver for strict mode functions. 2240 // Do not transform the receiver for strict mode functions.
2295 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2241 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
2384 Label slow, non_function_call; 2330 Label slow, non_function_call;
2385 2331
2386 // Check that function is not a smi. 2332 // Check that function is not a smi.
2387 __ JumpIfSmi(rdi, &non_function_call); 2333 __ JumpIfSmi(rdi, &non_function_call);
2388 // Check that function is a JSFunction. 2334 // Check that function is a JSFunction.
2389 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2335 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2390 __ j(not_equal, &slow); 2336 __ j(not_equal, &slow);
2391 2337
2392 if (RecordCallTarget()) { 2338 if (RecordCallTarget()) {
2393 GenerateRecordCallTarget(masm); 2339 GenerateRecordCallTarget(masm);
2340 // Put the AllocationSite from the feedback vector into ebx.
2341 // By adding kPointerSize we encode that we know the AllocationSite
2342 // entry is at the feedback vector slot given by rdx + 1.
2343 __ SmiToInteger32(rdx, rdx);
2344 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2345 FixedArray::kHeaderSize + kPointerSize));
2346 __ AssertUndefinedOrAllocationSite(rbx);
2394 } 2347 }
2395 2348
2396 // Jump to the function-specific construct stub. 2349 // Jump to the function-specific construct stub.
2397 Register jmp_reg = rcx; 2350 Register jmp_reg = rcx;
2398 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2351 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2399 __ movp(jmp_reg, FieldOperand(jmp_reg, 2352 __ movp(jmp_reg, FieldOperand(jmp_reg,
2400 SharedFunctionInfo::kConstructStubOffset)); 2353 SharedFunctionInfo::kConstructStubOffset));
2401 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); 2354 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2402 __ jmp(jmp_reg); 2355 __ jmp(jmp_reg);
2403 2356
(...skipping 21 matching lines...) Expand all
2425 return false; 2378 return false;
2426 } 2379 }
2427 2380
2428 2381
2429 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2382 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2430 CEntryStub::GenerateAheadOfTime(isolate); 2383 CEntryStub::GenerateAheadOfTime(isolate);
2431 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2384 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2432 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2385 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2433 // It is important that the store buffer overflow stubs are generated first. 2386 // It is important that the store buffer overflow stubs are generated first.
2434 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 2387 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2435 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2436 BinaryOpICStub::GenerateAheadOfTime(isolate); 2388 BinaryOpICStub::GenerateAheadOfTime(isolate);
2437 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 2389 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2438 } 2390 }
2439 2391
2440 2392
2441 void CodeStub::GenerateFPStubs(Isolate* isolate) { 2393 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2442 } 2394 }
2443 2395
2444 2396
2445 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 2397 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
(...skipping 2550 matching lines...) Expand 10 before | Expand all | Expand 10 after
4996 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4948 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4997 } else { 4949 } else {
4998 UNREACHABLE(); 4950 UNREACHABLE();
4999 } 4951 }
5000 } 4952 }
5001 4953
5002 4954
5003 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 4955 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5004 // ----------- S t a t e ------------- 4956 // ----------- S t a t e -------------
5005 // -- rax : argc 4957 // -- rax : argc
5006 // -- rbx : feedback vector (fixed array or undefined) 4958 // -- rbx : AllocationSite or undefined
5007 // -- rdx : slot index (if ebx is fixed array)
5008 // -- rdi : constructor 4959 // -- rdi : constructor
5009 // -- rsp[0] : return address 4960 // -- rsp[0] : return address
5010 // -- rsp[8] : last argument 4961 // -- rsp[8] : last argument
5011 // ----------------------------------- 4962 // -----------------------------------
5012 Handle<Object> undefined_sentinel( 4963 Handle<Object> undefined_sentinel(
5013 masm->isolate()->heap()->undefined_value(), 4964 masm->isolate()->heap()->undefined_value(),
5014 masm->isolate()); 4965 masm->isolate());
5015 4966
5016 if (FLAG_debug_code) { 4967 if (FLAG_debug_code) {
5017 // The array construct code is only set for the global and natives 4968 // The array construct code is only set for the global and natives
5018 // builtin Array functions which always have maps. 4969 // builtin Array functions which always have maps.
5019 4970
5020 // Initial map for the builtin Array function should be a map. 4971 // Initial map for the builtin Array function should be a map.
5021 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4972 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
5022 // Will both indicate a NULL and a Smi. 4973 // Will both indicate a NULL and a Smi.
5023 STATIC_ASSERT(kSmiTag == 0); 4974 STATIC_ASSERT(kSmiTag == 0);
5024 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 4975 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
5025 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 4976 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
5026 __ CmpObjectType(rcx, MAP_TYPE, rcx); 4977 __ CmpObjectType(rcx, MAP_TYPE, rcx);
5027 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 4978 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5028 4979
5029 // We should either have undefined in rbx or a valid fixed array. 4980 // We should either have undefined in rbx or a valid AllocationSite
5030 Label okay_here; 4981 __ AssertUndefinedOrAllocationSite(rbx);
5031 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5032 __ Cmp(rbx, undefined_sentinel);
5033 __ j(equal, &okay_here);
5034 __ Cmp(FieldOperand(rbx, 0), fixed_array_map);
5035 __ Assert(equal, kExpectedFixedArrayInRegisterRbx);
5036
5037 // rdx should be a smi if we don't have undefined in rbx.
5038 __ AssertSmi(rdx);
5039
5040 __ bind(&okay_here);
5041 } 4982 }
5042 4983
5043 Label no_info; 4984 Label no_info;
5044 // If the feedback slot is undefined, or contains anything other than an 4985 // If the feedback slot is undefined, or contains anything other than an
5045 // AllocationSite, call an array constructor that doesn't use AllocationSites. 4986 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5046 __ Cmp(rbx, undefined_sentinel); 4987 __ Cmp(rbx, undefined_sentinel);
5047 __ j(equal, &no_info); 4988 __ j(equal, &no_info);
5048 __ SmiToInteger32(rdx, rdx);
5049 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
5050 FixedArray::kHeaderSize));
5051 __ Integer32ToSmi(rdx, rdx);
5052 __ Cmp(FieldOperand(rbx, 0),
5053 masm->isolate()->factory()->allocation_site_map());
5054 __ j(not_equal, &no_info);
5055 4989
5056 // Only look at the lower 16 bits of the transition info. 4990 // Only look at the lower 16 bits of the transition info.
5057 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); 4991 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
5058 __ SmiToInteger32(rdx, rdx); 4992 __ SmiToInteger32(rdx, rdx);
5059 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4993 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5060 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); 4994 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
5061 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 4995 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5062 4996
5063 __ bind(&no_info); 4997 __ bind(&no_info);
5064 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 4998 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
5327 return_value_operand, 5261 return_value_operand,
5328 NULL); 5262 NULL);
5329 } 5263 }
5330 5264
5331 5265
5332 #undef __ 5266 #undef __
5333 5267
5334 } } // namespace v8::internal 5268 } } // namespace v8::internal
5335 5269
5336 #endif // V8_TARGET_ARCH_X64 5270 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698