Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(13)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64-inl.h ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); 72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
73 } 73 }
74 74
75 75
76 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { 76 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e ------------- 77 // ----------- S t a t e -------------
78 // -- rax: number of arguments 78 // -- rax: number of arguments
79 // -- rdi: constructor function 79 // -- rdi: constructor function
80 // ----------------------------------- 80 // -----------------------------------
81 81
82 Label non_function_call; 82 Label slow, non_function_call;
83 // Check that function is not a smi. 83 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call); 84 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction. 85 // Check that function is a JSFunction.
86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
87 __ j(not_equal, &non_function_call); 87 __ j(not_equal, &slow);
88 88
89 // Jump to the function-specific construct stub. 89 // Jump to the function-specific construct stub.
90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset)); 91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize)); 92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
93 __ jmp(rbx); 93 __ jmp(rbx);
94 94
95 // rdi: called object 95 // rdi: called object
96 // rax: number of arguments 96 // rax: number of arguments
97 // rcx: object map
98 Label do_call;
99 __ bind(&slow);
100 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
101 __ j(not_equal, &non_function_call);
102 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
103 __ jmp(&do_call);
104
97 __ bind(&non_function_call); 105 __ bind(&non_function_call);
106 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
107 __ bind(&do_call);
98 // Set expected number of arguments to zero (not changing rax). 108 // Set expected number of arguments to zero (not changing rax).
99 __ Set(rbx, 0); 109 __ Set(rbx, 0);
100 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
101 __ SetCallKind(rcx, CALL_AS_METHOD); 110 __ SetCallKind(rcx, CALL_AS_METHOD);
102 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 111 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
103 RelocInfo::CODE_TARGET); 112 RelocInfo::CODE_TARGET);
104 } 113 }
105 114
106 115
107 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 116 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
108 bool is_api_function, 117 bool is_api_function,
109 bool count_constructions) { 118 bool count_constructions) {
110 // Should never count constructions for api objects. 119 // Should never count constructions for api objects.
111 ASSERT(!is_api_function || !count_constructions); 120 ASSERT(!is_api_function || !count_constructions);
112 121
113 // Enter a construct frame. 122 // Enter a construct frame.
114 __ EnterConstructFrame(); 123 {
115 124 FrameScope scope(masm, StackFrame::CONSTRUCT);
116 // Store a smi-tagged arguments count on the stack. 125
117 __ Integer32ToSmi(rax, rax); 126 // Store a smi-tagged arguments count on the stack.
118 __ push(rax); 127 __ Integer32ToSmi(rax, rax);
119 128 __ push(rax);
120 // Push the function to invoke on the stack. 129
121 __ push(rdi); 130 // Push the function to invoke on the stack.
122 131 __ push(rdi);
123 // Try to allocate the object without transitioning into C code. If any of the 132
124 // preconditions is not met, the code bails out to the runtime call. 133 // Try to allocate the object without transitioning into C code. If any of
125 Label rt_call, allocated; 134 // the preconditions is not met, the code bails out to the runtime call.
126 if (FLAG_inline_new) { 135 Label rt_call, allocated;
127 Label undo_allocation; 136 if (FLAG_inline_new) {
137 Label undo_allocation;
128 138
129 #ifdef ENABLE_DEBUGGER_SUPPORT 139 #ifdef ENABLE_DEBUGGER_SUPPORT
130 ExternalReference debug_step_in_fp = 140 ExternalReference debug_step_in_fp =
131 ExternalReference::debug_step_in_fp_address(masm->isolate()); 141 ExternalReference::debug_step_in_fp_address(masm->isolate());
132 __ movq(kScratchRegister, debug_step_in_fp); 142 __ movq(kScratchRegister, debug_step_in_fp);
133 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); 143 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
134 __ j(not_equal, &rt_call); 144 __ j(not_equal, &rt_call);
135 #endif 145 #endif
136 146
137 // Verified that the constructor is a JSFunction. 147 // Verified that the constructor is a JSFunction.
138 // Load the initial map and verify that it is in fact a map. 148 // Load the initial map and verify that it is in fact a map.
139 // rdi: constructor 149 // rdi: constructor
140 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 150 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
141 // Will both indicate a NULL and a Smi 151 // Will both indicate a NULL and a Smi
142 STATIC_ASSERT(kSmiTag == 0); 152 ASSERT(kSmiTag == 0);
143 __ JumpIfSmi(rax, &rt_call); 153 __ JumpIfSmi(rax, &rt_call);
144 // rdi: constructor 154 // rdi: constructor
145 // rax: initial map (if proven valid below) 155 // rax: initial map (if proven valid below)
146 __ CmpObjectType(rax, MAP_TYPE, rbx); 156 __ CmpObjectType(rax, MAP_TYPE, rbx);
147 __ j(not_equal, &rt_call); 157 __ j(not_equal, &rt_call);
148 158
149 // Check that the constructor is not constructing a JSFunction (see comments 159 // Check that the constructor is not constructing a JSFunction (see
150 // in Runtime_NewObject in runtime.cc). In which case the initial map's 160 // comments in Runtime_NewObject in runtime.cc). In which case the
151 // instance type would be JS_FUNCTION_TYPE. 161 // initial map's instance type would be JS_FUNCTION_TYPE.
152 // rdi: constructor 162 // rdi: constructor
153 // rax: initial map 163 // rax: initial map
154 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 164 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
155 __ j(equal, &rt_call); 165 __ j(equal, &rt_call);
156 166
157 if (count_constructions) { 167 if (count_constructions) {
158 Label allocate; 168 Label allocate;
159 // Decrease generous allocation count. 169 // Decrease generous allocation count.
160 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 170 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
161 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset)); 171 __ decb(FieldOperand(rcx,
162 __ j(not_zero, &allocate); 172 SharedFunctionInfo::kConstructionCountOffset));
163 173 __ j(not_zero, &allocate);
164 __ push(rax); 174
165 __ push(rdi); 175 __ push(rax);
166 176 __ push(rdi);
167 __ push(rdi); // constructor 177
168 // The call will replace the stub, so the countdown is only done once. 178 __ push(rdi); // constructor
169 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 179 // The call will replace the stub, so the countdown is only done once.
170 180 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
171 __ pop(rdi); 181
172 __ pop(rax); 182 __ pop(rdi);
173 183 __ pop(rax);
174 __ bind(&allocate); 184
185 __ bind(&allocate);
186 }
187
188 // Now allocate the JSObject on the heap.
189 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
190 __ shl(rdi, Immediate(kPointerSizeLog2));
191 // rdi: size of new object
192 __ AllocateInNewSpace(rdi,
193 rbx,
194 rdi,
195 no_reg,
196 &rt_call,
197 NO_ALLOCATION_FLAGS);
198 // Allocated the JSObject, now initialize the fields.
199 // rax: initial map
200 // rbx: JSObject (not HeapObject tagged - the actual address).
201 // rdi: start of next object
202 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
203 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
204 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
205 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
206 // Set extra fields in the newly allocated object.
207 // rax: initial map
208 // rbx: JSObject
209 // rdi: start of next object
210 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
211 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
212 if (count_constructions) {
213 __ movzxbq(rsi,
214 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
215 __ lea(rsi,
216 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
217 // rsi: offset of first field after pre-allocated fields
218 if (FLAG_debug_code) {
219 __ cmpq(rsi, rdi);
220 __ Assert(less_equal,
221 "Unexpected number of pre-allocated property fields.");
222 }
223 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
224 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
225 }
226 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
227
228 // Add the object tag to make the JSObject real, so that we can continue
229 // and jump into the continuation code at any time from now on. Any
230 // failures need to undo the allocation, so that the heap is in a
231 // consistent state and verifiable.
232 // rax: initial map
233 // rbx: JSObject
234 // rdi: start of next object
235 __ or_(rbx, Immediate(kHeapObjectTag));
236
237 // Check if a non-empty properties array is needed.
238 // Allocate and initialize a FixedArray if it is.
239 // rax: initial map
240 // rbx: JSObject
241 // rdi: start of next object
242 // Calculate total properties described map.
243 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
244 __ movzxbq(rcx,
245 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
246 __ addq(rdx, rcx);
247 // Calculate unused properties past the end of the in-object properties.
248 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
249 __ subq(rdx, rcx);
250 // Done if no extra properties are to be allocated.
251 __ j(zero, &allocated);
252 __ Assert(positive, "Property allocation count failed.");
253
254 // Scale the number of elements by pointer size and add the header for
255 // FixedArrays to the start of the next object calculation from above.
256 // rbx: JSObject
257 // rdi: start of next object (will be start of FixedArray)
258 // rdx: number of elements in properties array
259 __ AllocateInNewSpace(FixedArray::kHeaderSize,
260 times_pointer_size,
261 rdx,
262 rdi,
263 rax,
264 no_reg,
265 &undo_allocation,
266 RESULT_CONTAINS_TOP);
267
268 // Initialize the FixedArray.
269 // rbx: JSObject
270 // rdi: FixedArray
271 // rdx: number of elements
272 // rax: start of next object
273 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
274 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
275 __ Integer32ToSmi(rdx, rdx);
276 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
277
278 // Initialize the fields to undefined.
279 // rbx: JSObject
280 // rdi: FixedArray
281 // rax: start of next object
282 // rdx: number of elements
283 { Label loop, entry;
284 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
285 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
286 __ jmp(&entry);
287 __ bind(&loop);
288 __ movq(Operand(rcx, 0), rdx);
289 __ addq(rcx, Immediate(kPointerSize));
290 __ bind(&entry);
291 __ cmpq(rcx, rax);
292 __ j(below, &loop);
293 }
294
295 // Store the initialized FixedArray into the properties field of
296 // the JSObject
297 // rbx: JSObject
298 // rdi: FixedArray
299 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
300 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
301
302
303 // Continue with JSObject being successfully allocated
304 // rbx: JSObject
305 __ jmp(&allocated);
306
307 // Undo the setting of the new top so that the heap is verifiable. For
308 // example, the map's unused properties potentially do not match the
309 // allocated objects unused properties.
310 // rbx: JSObject (previous new top)
311 __ bind(&undo_allocation);
312 __ UndoAllocationInNewSpace(rbx);
175 } 313 }
176 314
177 // Now allocate the JSObject on the heap. 315 // Allocate the new receiver object using the runtime call.
178 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); 316 // rdi: function (constructor)
179 __ shl(rdi, Immediate(kPointerSizeLog2)); 317 __ bind(&rt_call);
180 // rdi: size of new object 318 // Must restore rdi (constructor) before calling runtime.
181 __ AllocateInNewSpace(rdi, 319 __ movq(rdi, Operand(rsp, 0));
182 rbx, 320 __ push(rdi);
183 rdi, 321 __ CallRuntime(Runtime::kNewObject, 1);
184 no_reg, 322 __ movq(rbx, rax); // store result in rbx
185 &rt_call, 323
186 NO_ALLOCATION_FLAGS); 324 // New object allocated.
187 // Allocated the JSObject, now initialize the fields. 325 // rbx: newly allocated object
188 // rax: initial map 326 __ bind(&allocated);
189 // rbx: JSObject (not HeapObject tagged - the actual address). 327 // Retrieve the function from the stack.
190 // rdi: start of next object 328 __ pop(rdi);
191 __ movq(Operand(rbx, JSObject::kMapOffset), rax); 329
192 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 330 // Retrieve smi-tagged arguments count from the stack.
193 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx); 331 __ movq(rax, Operand(rsp, 0));
194 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx); 332 __ SmiToInteger32(rax, rax);
195 // Set extra fields in the newly allocated object. 333
196 // rax: initial map 334 // Push the allocated receiver to the stack. We need two copies
197 // rbx: JSObject 335 // because we may have to return the original one and the calling
198 // rdi: start of next object 336 // conventions dictate that the called function pops the receiver.
199 { Label loop, entry; 337 __ push(rbx);
200 // To allow for truncation. 338 __ push(rbx);
201 if (count_constructions) { 339
202 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); 340 // Setup pointer to last argument.
203 } else { 341 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
204 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 342
205 } 343 // Copy arguments and receiver to the expression stack.
206 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); 344 Label loop, entry;
207 __ jmp(&entry); 345 __ movq(rcx, rax);
208 __ bind(&loop); 346 __ jmp(&entry);
209 __ movq(Operand(rcx, 0), rdx); 347 __ bind(&loop);
210 __ addq(rcx, Immediate(kPointerSize)); 348 __ push(Operand(rbx, rcx, times_pointer_size, 0));
211 __ bind(&entry); 349 __ bind(&entry);
212 __ cmpq(rcx, rdi); 350 __ decq(rcx);
213 __ j(less, &loop); 351 __ j(greater_equal, &loop);
352
353 // Call the function.
354 if (is_api_function) {
355 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
356 Handle<Code> code =
357 masm->isolate()->builtins()->HandleApiCallConstruct();
358 ParameterCount expected(0);
359 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
360 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
361 } else {
362 ParameterCount actual(rax);
363 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
364 NullCallWrapper(), CALL_AS_METHOD);
214 } 365 }
215 366
216 // Add the object tag to make the JSObject real, so that we can continue and 367 // Restore context from the frame.
217 // jump into the continuation code at any time from now on. Any failures 368 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
218 // need to undo the allocation, so that the heap is in a consistent state 369
219 // and verifiable. 370 // If the result is an object (in the ECMA sense), we should get rid
220 // rax: initial map 371 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
221 // rbx: JSObject 372 // on page 74.
222 // rdi: start of next object 373 Label use_receiver, exit;
223 __ or_(rbx, Immediate(kHeapObjectTag)); 374 // If the result is a smi, it is *not* an object in the ECMA sense.
224 375 __ JumpIfSmi(rax, &use_receiver);
225 // Check if a non-empty properties array is needed. 376
226 // Allocate and initialize a FixedArray if it is. 377 // If the type of the result (stored in its map) is less than
227 // rax: initial map 378 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
228 // rbx: JSObject 379 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
229 // rdi: start of next object 380 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
230 // Calculate total properties described map. 381 __ j(above_equal, &exit);
231 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); 382
232 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 383 // Throw away the result of the constructor invocation and use the
233 __ addq(rdx, rcx); 384 // on-stack receiver as the result.
234 // Calculate unused properties past the end of the in-object properties. 385 __ bind(&use_receiver);
235 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset)); 386 __ movq(rax, Operand(rsp, 0));
236 __ subq(rdx, rcx); 387
237 // Done if no extra properties are to be allocated. 388 // Restore the arguments count and leave the construct frame.
238 __ j(zero, &allocated); 389 __ bind(&exit);
239 __ Assert(positive, "Property allocation count failed."); 390 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
240 391
241 // Scale the number of elements by pointer size and add the header for 392 // Leave construct frame.
242 // FixedArrays to the start of the next object calculation from above.
243 // rbx: JSObject
244 // rdi: start of next object (will be start of FixedArray)
245 // rdx: number of elements in properties array
246 __ AllocateInNewSpace(FixedArray::kHeaderSize,
247 times_pointer_size,
248 rdx,
249 rdi,
250 rax,
251 no_reg,
252 &undo_allocation,
253 RESULT_CONTAINS_TOP);
254
255 // Initialize the FixedArray.
256 // rbx: JSObject
257 // rdi: FixedArray
258 // rdx: number of elements
259 // rax: start of next object
260 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
261 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
262 __ Integer32ToSmi(rdx, rdx);
263 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
264
265 // Initialize the fields to undefined.
266 // rbx: JSObject
267 // rdi: FixedArray
268 // rax: start of next object
269 // rdx: number of elements
270 { Label loop, entry;
271 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
272 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
273 __ jmp(&entry);
274 __ bind(&loop);
275 __ movq(Operand(rcx, 0), rdx);
276 __ addq(rcx, Immediate(kPointerSize));
277 __ bind(&entry);
278 __ cmpq(rcx, rax);
279 __ j(below, &loop);
280 }
281
282 // Store the initialized FixedArray into the properties field of
283 // the JSObject
284 // rbx: JSObject
285 // rdi: FixedArray
286 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
287 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
288
289
290 // Continue with JSObject being successfully allocated
291 // rbx: JSObject
292 __ jmp(&allocated);
293
294 // Undo the setting of the new top so that the heap is verifiable. For
295 // example, the map's unused properties potentially do not match the
296 // allocated objects unused properties.
297 // rbx: JSObject (previous new top)
298 __ bind(&undo_allocation);
299 __ UndoAllocationInNewSpace(rbx);
300 } 393 }
301 394
302 // Allocate the new receiver object using the runtime call.
303 // rdi: function (constructor)
304 __ bind(&rt_call);
305 // Must restore rdi (constructor) before calling runtime.
306 __ movq(rdi, Operand(rsp, 0));
307 __ push(rdi);
308 __ CallRuntime(Runtime::kNewObject, 1);
309 __ movq(rbx, rax); // store result in rbx
310
311 // New object allocated.
312 // rbx: newly allocated object
313 __ bind(&allocated);
314 // Retrieve the function from the stack.
315 __ pop(rdi);
316
317 // Retrieve smi-tagged arguments count from the stack.
318 __ movq(rax, Operand(rsp, 0));
319 __ SmiToInteger32(rax, rax);
320
321 // Push the allocated receiver to the stack. We need two copies
322 // because we may have to return the original one and the calling
323 // conventions dictate that the called function pops the receiver.
324 __ push(rbx);
325 __ push(rbx);
326
327 // Setup pointer to last argument.
328 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
329
330 // Copy arguments and receiver to the expression stack.
331 Label loop, entry;
332 __ movq(rcx, rax);
333 __ jmp(&entry);
334 __ bind(&loop);
335 __ push(Operand(rbx, rcx, times_pointer_size, 0));
336 __ bind(&entry);
337 __ decq(rcx);
338 __ j(greater_equal, &loop);
339
340 // Call the function.
341 if (is_api_function) {
342 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
343 Handle<Code> code =
344 masm->isolate()->builtins()->HandleApiCallConstruct();
345 ParameterCount expected(0);
346 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
347 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
348 } else {
349 ParameterCount actual(rax);
350 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
351 NullCallWrapper(), CALL_AS_METHOD);
352 }
353
354 // Restore context from the frame.
355 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
356
357 // If the result is an object (in the ECMA sense), we should get rid
358 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
359 // on page 74.
360 Label use_receiver, exit;
361 // If the result is a smi, it is *not* an object in the ECMA sense.
362 __ JumpIfSmi(rax, &use_receiver);
363
364 // If the type of the result (stored in its map) is less than
365 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
366 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
367 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
368 __ j(above_equal, &exit);
369
370 // Throw away the result of the constructor invocation and use the
371 // on-stack receiver as the result.
372 __ bind(&use_receiver);
373 __ movq(rax, Operand(rsp, 0));
374
375 // Restore the arguments count and leave the construct frame.
376 __ bind(&exit);
377 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
378 __ LeaveConstructFrame();
379
380 // Remove caller arguments from the stack and return. 395 // Remove caller arguments from the stack and return.
381 __ pop(rcx); 396 __ pop(rcx);
382 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 397 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
383 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 398 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
384 __ push(rcx); 399 __ push(rcx);
385 Counters* counters = masm->isolate()->counters(); 400 Counters* counters = masm->isolate()->counters();
386 __ IncrementCounter(counters->constructed_objects(), 1); 401 __ IncrementCounter(counters->constructed_objects(), 1);
387 __ ret(0); 402 __ ret(0);
388 } 403 }
389 404
(...skipping 16 matching lines...) Expand all
406 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 421 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
407 bool is_construct) { 422 bool is_construct) {
408 // Expects five C++ function parameters. 423 // Expects five C++ function parameters.
409 // - Address entry (ignored) 424 // - Address entry (ignored)
410 // - JSFunction* function ( 425 // - JSFunction* function (
411 // - Object* receiver 426 // - Object* receiver
412 // - int argc 427 // - int argc
413 // - Object*** argv 428 // - Object*** argv
414 // (see Handle::Invoke in execution.cc). 429 // (see Handle::Invoke in execution.cc).
415 430
416 // Platform specific argument handling. After this, the stack contains 431 // Open a C++ scope for the FrameScope.
417 // an internal frame and the pushed function and receiver, and 432 {
418 // register rax and rbx holds the argument count and argument array, 433 // Platform specific argument handling. After this, the stack contains
419 // while rdi holds the function pointer and rsi the context. 434 // an internal frame and the pushed function and receiver, and
435 // register rax and rbx holds the argument count and argument array,
436 // while rdi holds the function pointer and rsi the context.
437
420 #ifdef _WIN64 438 #ifdef _WIN64
421 // MSVC parameters in: 439 // MSVC parameters in:
422 // rcx : entry (ignored) 440 // rcx : entry (ignored)
423 // rdx : function 441 // rdx : function
424 // r8 : receiver 442 // r8 : receiver
425 // r9 : argc 443 // r9 : argc
426 // [rsp+0x20] : argv 444 // [rsp+0x20] : argv
427 445
428 // Clear the context before we push it when entering the JS frame. 446 // Clear the context before we push it when entering the internal frame.
429 __ Set(rsi, 0); 447 __ Set(rsi, 0);
430 __ EnterInternalFrame(); 448 // Enter an internal frame.
449 FrameScope scope(masm, StackFrame::INTERNAL);
431 450
432 // Load the function context into rsi. 451 // Load the function context into rsi.
433 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); 452 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
434 453
435 // Push the function and the receiver onto the stack. 454 // Push the function and the receiver onto the stack.
436 __ push(rdx); 455 __ push(rdx);
437 __ push(r8); 456 __ push(r8);
438 457
439 // Load the number of arguments and setup pointer to the arguments. 458 // Load the number of arguments and setup pointer to the arguments.
440 __ movq(rax, r9); 459 __ movq(rax, r9);
441 // Load the previous frame pointer to access C argument on stack 460 // Load the previous frame pointer to access C argument on stack
442 __ movq(kScratchRegister, Operand(rbp, 0)); 461 __ movq(kScratchRegister, Operand(rbp, 0));
443 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); 462 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
444 // Load the function pointer into rdi. 463 // Load the function pointer into rdi.
445 __ movq(rdi, rdx); 464 __ movq(rdi, rdx);
446 #else // _WIN64 465 #else // _WIN64
447 // GCC parameters in: 466 // GCC parameters in:
448 // rdi : entry (ignored) 467 // rdi : entry (ignored)
449 // rsi : function 468 // rsi : function
450 // rdx : receiver 469 // rdx : receiver
451 // rcx : argc 470 // rcx : argc
452 // r8 : argv 471 // r8 : argv
453 472
454 __ movq(rdi, rsi); 473 __ movq(rdi, rsi);
455 // rdi : function 474 // rdi : function
456 475
457 // Clear the context before we push it when entering the JS frame. 476 // Clear the context before we push it when entering the internal frame.
458 __ Set(rsi, 0); 477 __ Set(rsi, 0);
459 // Enter an internal frame. 478 // Enter an internal frame.
460 __ EnterInternalFrame(); 479 FrameScope scope(masm, StackFrame::INTERNAL);
461 480
462 // Push the function and receiver and setup the context. 481 // Push the function and receiver and setup the context.
463 __ push(rdi); 482 __ push(rdi);
464 __ push(rdx); 483 __ push(rdx);
465 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 484 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
466 485
467 // Load the number of arguments and setup pointer to the arguments. 486 // Load the number of arguments and setup pointer to the arguments.
468 __ movq(rax, rcx); 487 __ movq(rax, rcx);
469 __ movq(rbx, r8); 488 __ movq(rbx, r8);
470 #endif // _WIN64 489 #endif // _WIN64
471 490
472 // Current stack contents: 491 // Current stack contents:
473 // [rsp + 2 * kPointerSize ... ]: Internal frame 492 // [rsp + 2 * kPointerSize ... ]: Internal frame
474 // [rsp + kPointerSize] : function 493 // [rsp + kPointerSize] : function
475 // [rsp] : receiver 494 // [rsp] : receiver
476 // Current register contents: 495 // Current register contents:
477 // rax : argc 496 // rax : argc
478 // rbx : argv 497 // rbx : argv
479 // rsi : context 498 // rsi : context
480 // rdi : function 499 // rdi : function
481 500
482 // Copy arguments to the stack in a loop. 501 // Copy arguments to the stack in a loop.
483 // Register rbx points to array of pointers to handle locations. 502 // Register rbx points to array of pointers to handle locations.
484 // Push the values of these handles. 503 // Push the values of these handles.
485 Label loop, entry; 504 Label loop, entry;
486 __ Set(rcx, 0); // Set loop variable to 0. 505 __ Set(rcx, 0); // Set loop variable to 0.
487 __ jmp(&entry); 506 __ jmp(&entry);
488 __ bind(&loop); 507 __ bind(&loop);
489 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 508 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
490 __ push(Operand(kScratchRegister, 0)); // dereference handle 509 __ push(Operand(kScratchRegister, 0)); // dereference handle
491 __ addq(rcx, Immediate(1)); 510 __ addq(rcx, Immediate(1));
492 __ bind(&entry); 511 __ bind(&entry);
493 __ cmpq(rcx, rax); 512 __ cmpq(rcx, rax);
494 __ j(not_equal, &loop); 513 __ j(not_equal, &loop);
495 514
496 // Invoke the code. 515 // Invoke the code.
497 if (is_construct) { 516 if (is_construct) {
498 // Expects rdi to hold function pointer. 517 // Expects rdi to hold function pointer.
499 __ Call(masm->isolate()->builtins()->JSConstructCall(), 518 __ Call(masm->isolate()->builtins()->JSConstructCall(),
500 RelocInfo::CODE_TARGET); 519 RelocInfo::CODE_TARGET);
501 } else { 520 } else {
502 ParameterCount actual(rax); 521 ParameterCount actual(rax);
503 // Function must be in rdi. 522 // Function must be in rdi.
504 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 523 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
505 NullCallWrapper(), CALL_AS_METHOD); 524 NullCallWrapper(), CALL_AS_METHOD);
525 }
526 // Exit the internal frame. Notice that this also removes the empty
527 // context and the function left on the stack by the code
528 // invocation.
506 } 529 }
507 530
508 // Exit the JS frame. Notice that this also removes the empty
509 // context and the function left on the stack by the code
510 // invocation.
511 __ LeaveInternalFrame();
512 // TODO(X64): Is argument correct? Is there a receiver to remove? 531 // TODO(X64): Is argument correct? Is there a receiver to remove?
513 __ ret(1 * kPointerSize); // remove receiver 532 __ ret(1 * kPointerSize); // Remove receiver.
514 } 533 }
515 534
516 535
517 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 536 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
518 Generate_JSEntryTrampolineHelper(masm, false); 537 Generate_JSEntryTrampolineHelper(masm, false);
519 } 538 }
520 539
521 540
522 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 541 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
523 Generate_JSEntryTrampolineHelper(masm, true); 542 Generate_JSEntryTrampolineHelper(masm, true);
524 } 543 }
525 544
526 545
527 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 546 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
528 // Enter an internal frame. 547 // Enter an internal frame.
529 __ EnterInternalFrame(); 548 {
549 FrameScope scope(masm, StackFrame::INTERNAL);
530 550
531 // Push a copy of the function onto the stack. 551 // Push a copy of the function onto the stack.
532 __ push(rdi); 552 __ push(rdi);
533 // Push call kind information. 553 // Push call kind information.
534 __ push(rcx); 554 __ push(rcx);
535 555
536 __ push(rdi); // Function is also the parameter to the runtime call. 556 __ push(rdi); // Function is also the parameter to the runtime call.
537 __ CallRuntime(Runtime::kLazyCompile, 1); 557 __ CallRuntime(Runtime::kLazyCompile, 1);
538 558
539 // Restore call kind information. 559 // Restore call kind information.
540 __ pop(rcx); 560 __ pop(rcx);
541 // Restore receiver. 561 // Restore receiver.
542 __ pop(rdi); 562 __ pop(rdi);
543 563
544 // Tear down temporary frame. 564 // Tear down internal frame.
545 __ LeaveInternalFrame(); 565 }
546 566
547 // Do a tail-call of the compiled function. 567 // Do a tail-call of the compiled function.
548 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 568 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
549 __ jmp(rax); 569 __ jmp(rax);
550 } 570 }
551 571
552 572
553 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 573 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
554 // Enter an internal frame. 574 // Enter an internal frame.
555 __ EnterInternalFrame(); 575 {
576 FrameScope scope(masm, StackFrame::INTERNAL);
556 577
557 // Push a copy of the function onto the stack. 578 // Push a copy of the function onto the stack.
558 __ push(rdi); 579 __ push(rdi);
559 // Push call kind information. 580 // Push call kind information.
560 __ push(rcx); 581 __ push(rcx);
561 582
562 __ push(rdi); // Function is also the parameter to the runtime call. 583 __ push(rdi); // Function is also the parameter to the runtime call.
563 __ CallRuntime(Runtime::kLazyRecompile, 1); 584 __ CallRuntime(Runtime::kLazyRecompile, 1);
564 585
565 // Restore call kind information. 586 // Restore call kind information.
566 __ pop(rcx); 587 __ pop(rcx);
567 // Restore function. 588 // Restore function.
568 __ pop(rdi); 589 __ pop(rdi);
569 590
570 // Tear down temporary frame. 591 // Tear down internal frame.
571 __ LeaveInternalFrame(); 592 }
572 593
573 // Do a tail-call of the compiled function. 594 // Do a tail-call of the compiled function.
574 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 595 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
575 __ jmp(rax); 596 __ jmp(rax);
576 } 597 }
577 598
578 599
579 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 600 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
580 Deoptimizer::BailoutType type) { 601 Deoptimizer::BailoutType type) {
581 // Enter an internal frame. 602 // Enter an internal frame.
582 __ EnterInternalFrame(); 603 {
604 FrameScope scope(masm, StackFrame::INTERNAL);
583 605
584 // Pass the deoptimization type to the runtime system. 606 // Pass the deoptimization type to the runtime system.
585 __ Push(Smi::FromInt(static_cast<int>(type))); 607 __ Push(Smi::FromInt(static_cast<int>(type)));
586 608
587 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 609 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
588 // Tear down temporary frame. 610 // Tear down internal frame.
589 __ LeaveInternalFrame(); 611 }
590 612
591 // Get the full codegen state from the stack and untag it. 613 // Get the full codegen state from the stack and untag it.
592 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); 614 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
593 615
594 // Switch on the state. 616 // Switch on the state.
595 Label not_no_registers, not_tos_rax; 617 Label not_no_registers, not_tos_rax;
596 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS)); 618 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
597 __ j(not_equal, &not_no_registers, Label::kNear); 619 __ j(not_equal, &not_no_registers, Label::kNear);
598 __ ret(1 * kPointerSize); // Remove state. 620 __ ret(1 * kPointerSize); // Remove state.
599 621
(...skipping 16 matching lines...) Expand all
616 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 638 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
617 } 639 }
618 640
619 641
620 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 642 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
621 // For now, we are relying on the fact that Runtime::NotifyOSR 643 // For now, we are relying on the fact that Runtime::NotifyOSR
622 // doesn't do any garbage collection which allows us to save/restore 644 // doesn't do any garbage collection which allows us to save/restore
623 // the registers without worrying about which of them contain 645 // the registers without worrying about which of them contain
624 // pointers. This seems a bit fragile. 646 // pointers. This seems a bit fragile.
625 __ Pushad(); 647 __ Pushad();
626 __ EnterInternalFrame(); 648 {
627 __ CallRuntime(Runtime::kNotifyOSR, 0); 649 FrameScope scope(masm, StackFrame::INTERNAL);
628 __ LeaveInternalFrame(); 650 __ CallRuntime(Runtime::kNotifyOSR, 0);
651 }
629 __ Popad(); 652 __ Popad();
630 __ ret(0); 653 __ ret(0);
631 } 654 }
632 655
633 656
634 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 657 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
635 // Stack Layout: 658 // Stack Layout:
636 // rsp[0]: Return address 659 // rsp[0]: Return address
637 // rsp[1]: Argument n 660 // rsp[1]: Argument n
638 // rsp[2]: Argument n-1 661 // rsp[2]: Argument n-1
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
688 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 711 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
689 __ j(equal, &use_global_receiver); 712 __ j(equal, &use_global_receiver);
690 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 713 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
691 __ j(equal, &use_global_receiver); 714 __ j(equal, &use_global_receiver);
692 715
693 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 716 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
694 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 717 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
695 __ j(above_equal, &shift_arguments); 718 __ j(above_equal, &shift_arguments);
696 719
697 __ bind(&convert_to_object); 720 __ bind(&convert_to_object);
698 __ EnterInternalFrame(); // In order to preserve argument count. 721 {
699 __ Integer32ToSmi(rax, rax); 722 // Enter an internal frame in order to preserve argument count.
700 __ push(rax); 723 FrameScope scope(masm, StackFrame::INTERNAL);
724 __ Integer32ToSmi(rax, rax);
725 __ push(rax);
701 726
702 __ push(rbx); 727 __ push(rbx);
703 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 728 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
704 __ movq(rbx, rax); 729 __ movq(rbx, rax);
705 __ Set(rdx, 0); // indicate regular JS_FUNCTION 730 __ Set(rdx, 0); // indicate regular JS_FUNCTION
706 731
707 __ pop(rax); 732 __ pop(rax);
708 __ SmiToInteger32(rax, rax); 733 __ SmiToInteger32(rax, rax);
709 __ LeaveInternalFrame(); 734 }
735
710 // Restore the function to rdi. 736 // Restore the function to rdi.
711 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); 737 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
712 __ jmp(&patch_receiver, Label::kNear); 738 __ jmp(&patch_receiver, Label::kNear);
713 739
714 // Use the global receiver object from the called function as the 740 // Use the global receiver object from the called function as the
715 // receiver. 741 // receiver.
716 __ bind(&use_global_receiver); 742 __ bind(&use_global_receiver);
717 const int kGlobalIndex = 743 const int kGlobalIndex =
718 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 744 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
719 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); 745 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
800 NullCallWrapper(), CALL_AS_METHOD); 826 NullCallWrapper(), CALL_AS_METHOD);
801 } 827 }
802 828
803 829
804 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 830 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
805 // Stack at entry: 831 // Stack at entry:
806 // rsp: return address 832 // rsp: return address
807 // rsp+8: arguments 833 // rsp+8: arguments
808 // rsp+16: receiver ("this") 834 // rsp+16: receiver ("this")
809 // rsp+24: function 835 // rsp+24: function
810 __ EnterInternalFrame(); 836 {
811 // Stack frame: 837 FrameScope frame_scope(masm, StackFrame::INTERNAL);
812 // rbp: Old base pointer 838 // Stack frame:
813 // rbp[1]: return address 839 // rbp: Old base pointer
814 // rbp[2]: function arguments 840 // rbp[1]: return address
815 // rbp[3]: receiver 841 // rbp[2]: function arguments
816 // rbp[4]: function 842 // rbp[3]: receiver
817 static const int kArgumentsOffset = 2 * kPointerSize; 843 // rbp[4]: function
818 static const int kReceiverOffset = 3 * kPointerSize; 844 static const int kArgumentsOffset = 2 * kPointerSize;
819 static const int kFunctionOffset = 4 * kPointerSize; 845 static const int kReceiverOffset = 3 * kPointerSize;
846 static const int kFunctionOffset = 4 * kPointerSize;
820 847
821 __ push(Operand(rbp, kFunctionOffset)); 848 __ push(Operand(rbp, kFunctionOffset));
822 __ push(Operand(rbp, kArgumentsOffset)); 849 __ push(Operand(rbp, kArgumentsOffset));
823 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 850 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
824 851
825 // Check the stack for overflow. We are not trying to catch 852 // Check the stack for overflow. We are not trying to catch
826 // interruptions (e.g. debug break and preemption) here, so the "real stack 853 // interruptions (e.g. debug break and preemption) here, so the "real stack
827 // limit" is checked. 854 // limit" is checked.
828 Label okay; 855 Label okay;
829 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 856 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
830 __ movq(rcx, rsp); 857 __ movq(rcx, rsp);
831 // Make rcx the space we have left. The stack might already be overflowed 858 // Make rcx the space we have left. The stack might already be overflowed
832 // here which will cause rcx to become negative. 859 // here which will cause rcx to become negative.
833 __ subq(rcx, kScratchRegister); 860 __ subq(rcx, kScratchRegister);
834 // Make rdx the space we need for the array when it is unrolled onto the 861 // Make rdx the space we need for the array when it is unrolled onto the
835 // stack. 862 // stack.
836 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); 863 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
837 // Check if the arguments will overflow the stack. 864 // Check if the arguments will overflow the stack.
838 __ cmpq(rcx, rdx); 865 __ cmpq(rcx, rdx);
839 __ j(greater, &okay); // Signed comparison. 866 __ j(greater, &okay); // Signed comparison.
840 867
841 // Out of stack space. 868 // Out of stack space.
842 __ push(Operand(rbp, kFunctionOffset)); 869 __ push(Operand(rbp, kFunctionOffset));
843 __ push(rax); 870 __ push(rax);
844 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 871 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
845 __ bind(&okay); 872 __ bind(&okay);
846 // End of stack check. 873 // End of stack check.
847 874
848 // Push current index and limit. 875 // Push current index and limit.
849 const int kLimitOffset = 876 const int kLimitOffset =
850 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; 877 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
851 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; 878 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
852 __ push(rax); // limit 879 __ push(rax); // limit
853 __ push(Immediate(0)); // index 880 __ push(Immediate(0)); // index
854 881
855 // Get the receiver. 882 // Get the receiver.
856 __ movq(rbx, Operand(rbp, kReceiverOffset)); 883 __ movq(rbx, Operand(rbp, kReceiverOffset));
857 884
858 // Check that the function is a JS function (otherwise it must be a proxy). 885 // Check that the function is a JS function (otherwise it must be a proxy).
859 Label push_receiver; 886 Label push_receiver;
860 __ movq(rdi, Operand(rbp, kFunctionOffset)); 887 __ movq(rdi, Operand(rbp, kFunctionOffset));
861 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 888 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
862 __ j(not_equal, &push_receiver); 889 __ j(not_equal, &push_receiver);
863 890
864 // Change context eagerly to get the right global object if necessary. 891 // Change context eagerly to get the right global object if necessary.
865 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 892 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
866 893
867 // Do not transform the receiver for strict mode functions. 894 // Do not transform the receiver for strict mode functions.
868 Label call_to_object, use_global_receiver; 895 Label call_to_object, use_global_receiver;
869 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 896 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
870 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), 897 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
871 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 898 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
872 __ j(not_equal, &push_receiver); 899 __ j(not_equal, &push_receiver);
873 900
874 // Do not transform the receiver for natives. 901 // Do not transform the receiver for natives.
875 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), 902 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
876 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 903 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
877 __ j(not_equal, &push_receiver); 904 __ j(not_equal, &push_receiver);
878 905
879 // Compute the receiver in non-strict mode. 906 // Compute the receiver in non-strict mode.
880 __ JumpIfSmi(rbx, &call_to_object, Label::kNear); 907 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
881 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 908 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
882 __ j(equal, &use_global_receiver); 909 __ j(equal, &use_global_receiver);
883 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 910 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
884 __ j(equal, &use_global_receiver); 911 __ j(equal, &use_global_receiver);
885 912
886 // If given receiver is already a JavaScript object then there's no 913 // If given receiver is already a JavaScript object then there's no
887 // reason for converting it. 914 // reason for converting it.
888 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 915 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
889 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 916 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
890 __ j(above_equal, &push_receiver); 917 __ j(above_equal, &push_receiver);
891 918
892 // Convert the receiver to an object. 919 // Convert the receiver to an object.
893 __ bind(&call_to_object); 920 __ bind(&call_to_object);
894 __ push(rbx); 921 __ push(rbx);
895 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 922 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
896 __ movq(rbx, rax); 923 __ movq(rbx, rax);
897 __ jmp(&push_receiver, Label::kNear); 924 __ jmp(&push_receiver, Label::kNear);
898 925
899 // Use the current global receiver object as the receiver. 926 // Use the current global receiver object as the receiver.
900 __ bind(&use_global_receiver); 927 __ bind(&use_global_receiver);
901 const int kGlobalOffset = 928 const int kGlobalOffset =
902 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 929 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
903 __ movq(rbx, FieldOperand(rsi, kGlobalOffset)); 930 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
904 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset)); 931 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
905 __ movq(rbx, FieldOperand(rbx, kGlobalOffset)); 932 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
906 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 933 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
907 934
908 // Push the receiver. 935 // Push the receiver.
909 __ bind(&push_receiver); 936 __ bind(&push_receiver);
910 __ push(rbx); 937 __ push(rbx);
911 938
912 // Copy all arguments from the array to the stack. 939 // Copy all arguments from the array to the stack.
913 Label entry, loop; 940 Label entry, loop;
914 __ movq(rax, Operand(rbp, kIndexOffset)); 941 __ movq(rax, Operand(rbp, kIndexOffset));
915 __ jmp(&entry); 942 __ jmp(&entry);
916 __ bind(&loop); 943 __ bind(&loop);
917 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments 944 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
918 945
919 // Use inline caching to speed up access to arguments. 946 // Use inline caching to speed up access to arguments.
920 Handle<Code> ic = 947 Handle<Code> ic =
921 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); 948 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
922 __ Call(ic, RelocInfo::CODE_TARGET); 949 __ Call(ic, RelocInfo::CODE_TARGET);
923 // It is important that we do not have a test instruction after the 950 // It is important that we do not have a test instruction after the
924 // call. A test instruction after the call is used to indicate that 951 // call. A test instruction after the call is used to indicate that
925 // we have generated an inline version of the keyed load. In this 952 // we have generated an inline version of the keyed load. In this
926 // case, we know that we are not generating a test instruction next. 953 // case, we know that we are not generating a test instruction next.
927 954
928 // Push the nth argument. 955 // Push the nth argument.
929 __ push(rax); 956 __ push(rax);
930 957
931 // Update the index on the stack and in register rax. 958 // Update the index on the stack and in register rax.
932 __ movq(rax, Operand(rbp, kIndexOffset)); 959 __ movq(rax, Operand(rbp, kIndexOffset));
933 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 960 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
934 __ movq(Operand(rbp, kIndexOffset), rax); 961 __ movq(Operand(rbp, kIndexOffset), rax);
935 962
936 __ bind(&entry); 963 __ bind(&entry);
937 __ cmpq(rax, Operand(rbp, kLimitOffset)); 964 __ cmpq(rax, Operand(rbp, kLimitOffset));
938 __ j(not_equal, &loop); 965 __ j(not_equal, &loop);
939 966
940 // Invoke the function. 967 // Invoke the function.
941 Label call_proxy; 968 Label call_proxy;
942 ParameterCount actual(rax); 969 ParameterCount actual(rax);
943 __ SmiToInteger32(rax, rax); 970 __ SmiToInteger32(rax, rax);
944 __ movq(rdi, Operand(rbp, kFunctionOffset)); 971 __ movq(rdi, Operand(rbp, kFunctionOffset));
945 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 972 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
946 __ j(not_equal, &call_proxy); 973 __ j(not_equal, &call_proxy);
947 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 974 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
948 NullCallWrapper(), CALL_AS_METHOD); 975 NullCallWrapper(), CALL_AS_METHOD);
949 976
950 __ LeaveInternalFrame(); 977 frame_scope.GenerateLeaveFrame();
951 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 978 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
952 979
953 // Invoke the function proxy. 980 // Invoke the function proxy.
954 __ bind(&call_proxy); 981 __ bind(&call_proxy);
955 __ push(rdi); // add function proxy as last argument 982 __ push(rdi); // add function proxy as last argument
956 __ incq(rax); 983 __ incq(rax);
957 __ Set(rbx, 0); 984 __ Set(rbx, 0);
958 __ SetCallKind(rcx, CALL_AS_METHOD); 985 __ SetCallKind(rcx, CALL_AS_METHOD);
959 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 986 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
960 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 987 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
961 RelocInfo::CODE_TARGET); 988 RelocInfo::CODE_TARGET);
962 989
963 __ LeaveInternalFrame(); 990 // Leave internal frame.
991 }
964 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 992 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
965 } 993 }
966 994
967 995
968 // Number of empty elements to allocate for an empty array. 996 // Number of empty elements to allocate for an empty array.
969 static const int kPreallocatedArrayElements = 4; 997 static const int kPreallocatedArrayElements = 4;
970 998
971 999
972 // Allocate an empty JSArray. The allocated array is put into the result 1000 // Allocate an empty JSArray. The allocated array is put into the result
973 // register. If the parameter initial_capacity is larger than zero an elements 1001 // register. If the parameter initial_capacity is larger than zero an elements
(...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after
1513 // should perform a stack guard check so we can get interrupts while 1541 // should perform a stack guard check so we can get interrupts while
1514 // waiting for on-stack replacement. 1542 // waiting for on-stack replacement.
1515 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1543 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1516 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 1544 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1517 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); 1545 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
1518 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); 1546 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
1519 __ j(greater, &stack_check); 1547 __ j(greater, &stack_check);
1520 1548
1521 // Pass the function to optimize as the argument to the on-stack 1549 // Pass the function to optimize as the argument to the on-stack
1522 // replacement runtime function. 1550 // replacement runtime function.
1523 __ EnterInternalFrame(); 1551 {
1524 __ push(rax); 1552 FrameScope scope(masm, StackFrame::INTERNAL);
1525 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1553 __ push(rax);
1526 __ LeaveInternalFrame(); 1554 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1555 }
1527 1556
1528 // If the result was -1 it means that we couldn't optimize the 1557 // If the result was -1 it means that we couldn't optimize the
1529 // function. Just return and continue in the unoptimized version. 1558 // function. Just return and continue in the unoptimized version.
1530 Label skip; 1559 Label skip;
1531 __ SmiCompare(rax, Smi::FromInt(-1)); 1560 __ SmiCompare(rax, Smi::FromInt(-1));
1532 __ j(not_equal, &skip, Label::kNear); 1561 __ j(not_equal, &skip, Label::kNear);
1533 __ ret(0); 1562 __ ret(0);
1534 1563
1535 // If we decide not to perform on-stack replacement we perform a 1564 // If we decide not to perform on-stack replacement we perform a
1536 // stack guard check to enable interrupts. 1565 // stack guard check to enable interrupts.
1537 __ bind(&stack_check); 1566 __ bind(&stack_check);
1538 Label ok; 1567 Label ok;
1539 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 1568 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1540 __ j(above_equal, &ok, Label::kNear); 1569 __ j(above_equal, &ok, Label::kNear);
1541 1570
1542 StackCheckStub stub; 1571 StackCheckStub stub;
1543 __ TailCallStub(&stub); 1572 __ TailCallStub(&stub);
1544 __ Abort("Unreachable code: returned from tail call."); 1573 if (FLAG_debug_code) {
1574 __ Abort("Unreachable code: returned from tail call.");
1575 }
1545 __ bind(&ok); 1576 __ bind(&ok);
1546 __ ret(0); 1577 __ ret(0);
1547 1578
1548 __ bind(&skip); 1579 __ bind(&skip);
1549 // Untag the AST id and push it on the stack. 1580 // Untag the AST id and push it on the stack.
1550 __ SmiToInteger32(rax, rax); 1581 __ SmiToInteger32(rax, rax);
1551 __ push(rax); 1582 __ push(rax);
1552 1583
1553 // Generate the code for doing the frame-to-frame translation using 1584 // Generate the code for doing the frame-to-frame translation using
1554 // the deoptimizer infrastructure. 1585 // the deoptimizer infrastructure.
1555 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); 1586 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1556 generator.Generate(); 1587 generator.Generate();
1557 } 1588 }
1558 1589
1559 1590
1560 #undef __ 1591 #undef __
1561 1592
1562 } } // namespace v8::internal 1593 } } // namespace v8::internal
1563 1594
1564 #endif // V8_TARGET_ARCH_X64 1595 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/assembler-x64-inl.h ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698