Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(316)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 7050039: Revert 8122 (stub call asserts) while test failures are investigated. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/macro-assembler.h ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
103 RelocInfo::CODE_TARGET); 103 RelocInfo::CODE_TARGET);
104 } 104 }
105 105
106 106
107 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 107 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
108 bool is_api_function, 108 bool is_api_function,
109 bool count_constructions) { 109 bool count_constructions) {
110 // Should never count constructions for api objects. 110 // Should never count constructions for api objects.
111 ASSERT(!is_api_function || !count_constructions); 111 ASSERT(!is_api_function || !count_constructions);
112 112
113 // Enter a construct frame. 113 // Enter a construct frame.
114 { 114 __ EnterConstructFrame();
115 FrameScope scope(masm, StackFrame::CONSTRUCT); 115
116 116 // Store a smi-tagged arguments count on the stack.
117 // Store a smi-tagged arguments count on the stack. 117 __ Integer32ToSmi(rax, rax);
118 __ Integer32ToSmi(rax, rax); 118 __ push(rax);
119 __ push(rax); 119
120 120 // Push the function to invoke on the stack.
121 // Push the function to invoke on the stack. 121 __ push(rdi);
122 __ push(rdi); 122
123 123 // Try to allocate the object without transitioning into C code. If any of the
124 // Try to allocate the object without transitioning into C code. If any of 124 // preconditions is not met, the code bails out to the runtime call.
125 // the preconditions is not met, the code bails out to the runtime call. 125 Label rt_call, allocated;
126 Label rt_call, allocated; 126 if (FLAG_inline_new) {
127 if (FLAG_inline_new) { 127 Label undo_allocation;
128 Label undo_allocation;
129 128
130 #ifdef ENABLE_DEBUGGER_SUPPORT 129 #ifdef ENABLE_DEBUGGER_SUPPORT
131 ExternalReference debug_step_in_fp = 130 ExternalReference debug_step_in_fp =
132 ExternalReference::debug_step_in_fp_address(masm->isolate()); 131 ExternalReference::debug_step_in_fp_address(masm->isolate());
133 __ movq(kScratchRegister, debug_step_in_fp); 132 __ movq(kScratchRegister, debug_step_in_fp);
134 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); 133 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
135 __ j(not_equal, &rt_call); 134 __ j(not_equal, &rt_call);
136 #endif 135 #endif
137 136
138 // Verified that the constructor is a JSFunction. 137 // Verified that the constructor is a JSFunction.
139 // Load the initial map and verify that it is in fact a map. 138 // Load the initial map and verify that it is in fact a map.
140 // rdi: constructor 139 // rdi: constructor
141 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 140 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
142 // Will both indicate a NULL and a Smi 141 // Will both indicate a NULL and a Smi
143 ASSERT(kSmiTag == 0); 142 ASSERT(kSmiTag == 0);
144 __ JumpIfSmi(rax, &rt_call); 143 __ JumpIfSmi(rax, &rt_call);
145 // rdi: constructor 144 // rdi: constructor
146 // rax: initial map (if proven valid below) 145 // rax: initial map (if proven valid below)
147 __ CmpObjectType(rax, MAP_TYPE, rbx); 146 __ CmpObjectType(rax, MAP_TYPE, rbx);
148 __ j(not_equal, &rt_call); 147 __ j(not_equal, &rt_call);
149 148
150 // Check that the constructor is not constructing a JSFunction (see 149 // Check that the constructor is not constructing a JSFunction (see comments
151 // comments in Runtime_NewObject in runtime.cc). In which case the initial 150 // in Runtime_NewObject in runtime.cc). In which case the initial map's
152 // map's instance type would be JS_FUNCTION_TYPE. 151 // instance type would be JS_FUNCTION_TYPE.
153 // rdi: constructor 152 // rdi: constructor
154 // rax: initial map 153 // rax: initial map
155 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 154 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
156 __ j(equal, &rt_call); 155 __ j(equal, &rt_call);
157 156
157 if (count_constructions) {
158 Label allocate;
159 // Decrease generous allocation count.
160 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
161 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
162 __ j(not_zero, &allocate);
163
164 __ push(rax);
165 __ push(rdi);
166
167 __ push(rdi); // constructor
168 // The call will replace the stub, so the countdown is only done once.
169 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
170
171 __ pop(rdi);
172 __ pop(rax);
173
174 __ bind(&allocate);
175 }
176
177 // Now allocate the JSObject on the heap.
178 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
179 __ shl(rdi, Immediate(kPointerSizeLog2));
180 // rdi: size of new object
181 __ AllocateInNewSpace(rdi,
182 rbx,
183 rdi,
184 no_reg,
185 &rt_call,
186 NO_ALLOCATION_FLAGS);
187 // Allocated the JSObject, now initialize the fields.
188 // rax: initial map
189 // rbx: JSObject (not HeapObject tagged - the actual address).
190 // rdi: start of next object
191 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
192 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
193 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
194 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
195 // Set extra fields in the newly allocated object.
196 // rax: initial map
197 // rbx: JSObject
198 // rdi: start of next object
199 { Label loop, entry;
200 // To allow for truncation.
158 if (count_constructions) { 201 if (count_constructions) {
159 Label allocate; 202 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
160 // Decrease generous allocation count. 203 } else {
161 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 204 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
162 __ decb(FieldOperand(rcx,
163 SharedFunctionInfo::kConstructionCountOffset));
164 __ j(not_zero, &allocate);
165
166 __ push(rax);
167 __ push(rdi);
168
169 __ push(rdi); // constructor
170 // The call will replace the stub, so the countdown is only done once.
171 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
172
173 __ pop(rdi);
174 __ pop(rax);
175
176 __ bind(&allocate);
177 } 205 }
178 206 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
179 // Now allocate the JSObject on the heap. 207 __ jmp(&entry);
180 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); 208 __ bind(&loop);
181 __ shl(rdi, Immediate(kPointerSizeLog2)); 209 __ movq(Operand(rcx, 0), rdx);
182 // rdi: size of new object 210 __ addq(rcx, Immediate(kPointerSize));
183 __ AllocateInNewSpace(rdi, 211 __ bind(&entry);
184 rbx, 212 __ cmpq(rcx, rdi);
185 rdi, 213 __ j(less, &loop);
186 no_reg,
187 &rt_call,
188 NO_ALLOCATION_FLAGS);
189 // Allocated the JSObject, now initialize the fields.
190 // rax: initial map
191 // rbx: JSObject (not HeapObject tagged - the actual address).
192 // rdi: start of next object
193 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
194 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
195 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
196 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
197 // Set extra fields in the newly allocated object.
198 // rax: initial map
199 // rbx: JSObject
200 // rdi: start of next object
201 { Label loop, entry;
202 // To allow for truncation.
203 if (count_constructions) {
204 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
205 } else {
206 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
207 }
208 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
209 __ jmp(&entry);
210 __ bind(&loop);
211 __ movq(Operand(rcx, 0), rdx);
212 __ addq(rcx, Immediate(kPointerSize));
213 __ bind(&entry);
214 __ cmpq(rcx, rdi);
215 __ j(less, &loop);
216 }
217
218 // Add the object tag to make the JSObject real, so that we can continue
219 // and jump into the continuation code at any time from now on. Any
220 // failures need to undo the allocation, so that the heap is in a
221 // consistent state and verifiable.
222 // rax: initial map
223 // rbx: JSObject
224 // rdi: start of next object
225 __ or_(rbx, Immediate(kHeapObjectTag));
226
227 // Check if a non-empty properties array is needed.
228 // Allocate and initialize a FixedArray if it is.
229 // rax: initial map
230 // rbx: JSObject
231 // rdi: start of next object
232 // Calculate total properties described map.
233 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
234 __ movzxbq(rcx,
235 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
236 __ addq(rdx, rcx);
237 // Calculate unused properties past the end of the in-object properties.
238 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
239 __ subq(rdx, rcx);
240 // Done if no extra properties are to be allocated.
241 __ j(zero, &allocated);
242 __ Assert(positive, "Property allocation count failed.");
243
244 // Scale the number of elements by pointer size and add the header for
245 // FixedArrays to the start of the next object calculation from above.
246 // rbx: JSObject
247 // rdi: start of next object (will be start of FixedArray)
248 // rdx: number of elements in properties array
249 __ AllocateInNewSpace(FixedArray::kHeaderSize,
250 times_pointer_size,
251 rdx,
252 rdi,
253 rax,
254 no_reg,
255 &undo_allocation,
256 RESULT_CONTAINS_TOP);
257
258 // Initialize the FixedArray.
259 // rbx: JSObject
260 // rdi: FixedArray
261 // rdx: number of elements
262 // rax: start of next object
263 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
264 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
265 __ Integer32ToSmi(rdx, rdx);
266 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
267
268 // Initialize the fields to undefined.
269 // rbx: JSObject
270 // rdi: FixedArray
271 // rax: start of next object
272 // rdx: number of elements
273 { Label loop, entry;
274 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
275 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
276 __ jmp(&entry);
277 __ bind(&loop);
278 __ movq(Operand(rcx, 0), rdx);
279 __ addq(rcx, Immediate(kPointerSize));
280 __ bind(&entry);
281 __ cmpq(rcx, rax);
282 __ j(below, &loop);
283 }
284
285 // Store the initialized FixedArray into the properties field of
286 // the JSObject
287 // rbx: JSObject
288 // rdi: FixedArray
289 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
290 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
291
292
293 // Continue with JSObject being successfully allocated
294 // rbx: JSObject
295 __ jmp(&allocated);
296
297 // Undo the setting of the new top so that the heap is verifiable. For
298 // example, the map's unused properties potentially do not match the
299 // allocated objects unused properties.
300 // rbx: JSObject (previous new top)
301 __ bind(&undo_allocation);
302 __ UndoAllocationInNewSpace(rbx);
303 } 214 }
304 215
305 // Allocate the new receiver object using the runtime call. 216 // Add the object tag to make the JSObject real, so that we can continue and
306 // rdi: function (constructor) 217 // jump into the continuation code at any time from now on. Any failures
307 __ bind(&rt_call); 218 // need to undo the allocation, so that the heap is in a consistent state
308 // Must restore rdi (constructor) before calling runtime. 219 // and verifiable.
309 __ movq(rdi, Operand(rsp, 0)); 220 // rax: initial map
310 __ push(rdi); 221 // rbx: JSObject
311 __ CallRuntime(Runtime::kNewObject, 1); 222 // rdi: start of next object
312 __ movq(rbx, rax); // store result in rbx 223 __ or_(rbx, Immediate(kHeapObjectTag));
313 224
314 // New object allocated. 225 // Check if a non-empty properties array is needed.
315 // rbx: newly allocated object 226 // Allocate and initialize a FixedArray if it is.
316 __ bind(&allocated); 227 // rax: initial map
317 // Retrieve the function from the stack. 228 // rbx: JSObject
318 __ pop(rdi); 229 // rdi: start of next object
319 230 // Calculate total properties described map.
320 // Retrieve smi-tagged arguments count from the stack. 231 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
321 __ movq(rax, Operand(rsp, 0)); 232 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
322 __ SmiToInteger32(rax, rax); 233 __ addq(rdx, rcx);
323 234 // Calculate unused properties past the end of the in-object properties.
324 // Push the allocated receiver to the stack. We need two copies 235 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
325 // because we may have to return the original one and the calling 236 __ subq(rdx, rcx);
326 // conventions dictate that the called function pops the receiver. 237 // Done if no extra properties are to be allocated.
327 __ push(rbx); 238 __ j(zero, &allocated);
328 __ push(rbx); 239 __ Assert(positive, "Property allocation count failed.");
329 240
330 // Setup pointer to last argument. 241 // Scale the number of elements by pointer size and add the header for
331 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); 242 // FixedArrays to the start of the next object calculation from above.
332 243 // rbx: JSObject
333 // Copy arguments and receiver to the expression stack. 244 // rdi: start of next object (will be start of FixedArray)
334 Label loop, entry; 245 // rdx: number of elements in properties array
335 __ movq(rcx, rax); 246 __ AllocateInNewSpace(FixedArray::kHeaderSize,
336 __ jmp(&entry); 247 times_pointer_size,
337 __ bind(&loop); 248 rdx,
338 __ push(Operand(rbx, rcx, times_pointer_size, 0)); 249 rdi,
339 __ bind(&entry); 250 rax,
340 __ decq(rcx); 251 no_reg,
341 __ j(greater_equal, &loop); 252 &undo_allocation,
342 253 RESULT_CONTAINS_TOP);
343 // Call the function. 254
344 if (is_api_function) { 255 // Initialize the FixedArray.
345 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 256 // rbx: JSObject
346 Handle<Code> code = 257 // rdi: FixedArray
347 masm->isolate()->builtins()->HandleApiCallConstruct(); 258 // rdx: number of elements
348 ParameterCount expected(0); 259 // rax: start of next object
349 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET, 260 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
350 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); 261 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
351 } else { 262 __ Integer32ToSmi(rdx, rdx);
352 ParameterCount actual(rax); 263 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
353 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 264
354 NullCallWrapper(), CALL_AS_METHOD); 265 // Initialize the fields to undefined.
266 // rbx: JSObject
267 // rdi: FixedArray
268 // rax: start of next object
269 // rdx: number of elements
270 { Label loop, entry;
271 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
272 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
273 __ jmp(&entry);
274 __ bind(&loop);
275 __ movq(Operand(rcx, 0), rdx);
276 __ addq(rcx, Immediate(kPointerSize));
277 __ bind(&entry);
278 __ cmpq(rcx, rax);
279 __ j(below, &loop);
355 } 280 }
356 281
357 // Restore context from the frame. 282 // Store the initialized FixedArray into the properties field of
358 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 283 // the JSObject
359 284 // rbx: JSObject
360 // If the result is an object (in the ECMA sense), we should get rid 285 // rdi: FixedArray
361 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 286 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
362 // on page 74. 287 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
363 Label use_receiver, exit; 288
364 // If the result is a smi, it is *not* an object in the ECMA sense. 289
365 __ JumpIfSmi(rax, &use_receiver); 290 // Continue with JSObject being successfully allocated
366 291 // rbx: JSObject
367 // If the type of the result (stored in its map) is less than 292 __ jmp(&allocated);
368 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense. 293
369 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); 294 // Undo the setting of the new top so that the heap is verifiable. For
370 __ j(above_equal, &exit); 295 // example, the map's unused properties potentially do not match the
371 296 // allocated objects unused properties.
372 // Throw away the result of the constructor invocation and use the 297 // rbx: JSObject (previous new top)
373 // on-stack receiver as the result. 298 __ bind(&undo_allocation);
374 __ bind(&use_receiver); 299 __ UndoAllocationInNewSpace(rbx);
375 __ movq(rax, Operand(rsp, 0));
376
377 // Restore the arguments count and leave the construct frame.
378 __ bind(&exit);
379 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
380
381 // Leave the construct frame.
382 } 300 }
383 301
302 // Allocate the new receiver object using the runtime call.
303 // rdi: function (constructor)
304 __ bind(&rt_call);
305 // Must restore rdi (constructor) before calling runtime.
306 __ movq(rdi, Operand(rsp, 0));
307 __ push(rdi);
308 __ CallRuntime(Runtime::kNewObject, 1);
309 __ movq(rbx, rax); // store result in rbx
310
311 // New object allocated.
312 // rbx: newly allocated object
313 __ bind(&allocated);
314 // Retrieve the function from the stack.
315 __ pop(rdi);
316
317 // Retrieve smi-tagged arguments count from the stack.
318 __ movq(rax, Operand(rsp, 0));
319 __ SmiToInteger32(rax, rax);
320
321 // Push the allocated receiver to the stack. We need two copies
322 // because we may have to return the original one and the calling
323 // conventions dictate that the called function pops the receiver.
324 __ push(rbx);
325 __ push(rbx);
326
327 // Setup pointer to last argument.
328 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
329
330 // Copy arguments and receiver to the expression stack.
331 Label loop, entry;
332 __ movq(rcx, rax);
333 __ jmp(&entry);
334 __ bind(&loop);
335 __ push(Operand(rbx, rcx, times_pointer_size, 0));
336 __ bind(&entry);
337 __ decq(rcx);
338 __ j(greater_equal, &loop);
339
340 // Call the function.
341 if (is_api_function) {
342 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
343 Handle<Code> code =
344 masm->isolate()->builtins()->HandleApiCallConstruct();
345 ParameterCount expected(0);
346 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
347 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
348 } else {
349 ParameterCount actual(rax);
350 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
351 NullCallWrapper(), CALL_AS_METHOD);
352 }
353
354 // Restore context from the frame.
355 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
356
357 // If the result is an object (in the ECMA sense), we should get rid
358 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
359 // on page 74.
360 Label use_receiver, exit;
361 // If the result is a smi, it is *not* an object in the ECMA sense.
362 __ JumpIfSmi(rax, &use_receiver);
363
364 // If the type of the result (stored in its map) is less than
365 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
366 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
367 __ j(above_equal, &exit);
368
369 // Throw away the result of the constructor invocation and use the
370 // on-stack receiver as the result.
371 __ bind(&use_receiver);
372 __ movq(rax, Operand(rsp, 0));
373
374 // Restore the arguments count and leave the construct frame.
375 __ bind(&exit);
376 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
377 __ LeaveConstructFrame();
378
384 // Remove caller arguments from the stack and return. 379 // Remove caller arguments from the stack and return.
385 __ pop(rcx); 380 __ pop(rcx);
386 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 381 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
387 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 382 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
388 __ push(rcx); 383 __ push(rcx);
389 Counters* counters = masm->isolate()->counters(); 384 Counters* counters = masm->isolate()->counters();
390 __ IncrementCounter(counters->constructed_objects(), 1); 385 __ IncrementCounter(counters->constructed_objects(), 1);
391 __ ret(0); 386 __ ret(0);
392 } 387 }
393 388
(...skipping 16 matching lines...) Expand all
410 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 405 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
411 bool is_construct) { 406 bool is_construct) {
412 // Expects five C++ function parameters. 407 // Expects five C++ function parameters.
413 // - Address entry (ignored) 408 // - Address entry (ignored)
414 // - JSFunction* function ( 409 // - JSFunction* function (
415 // - Object* receiver 410 // - Object* receiver
416 // - int argc 411 // - int argc
417 // - Object*** argv 412 // - Object*** argv
418 // (see Handle::Invoke in execution.cc). 413 // (see Handle::Invoke in execution.cc).
419 414
420 // Open a C++ scope for the FrameScope. 415 // Platform specific argument handling. After this, the stack contains
421 { 416 // an internal frame and the pushed function and receiver, and
422 // Platform specific argument handling. After this, the stack contains 417 // register rax and rbx holds the argument count and argument array,
423 // an internal frame and the pushed function and receiver, and 418 // while rdi holds the function pointer and rsi the context.
424 // register rax and rbx holds the argument count and argument array, 419 #ifdef _WIN64
425 // while rdi holds the function pointer and rsi the context. 420 // MSVC parameters in:
421 // rcx : entry (ignored)
422 // rdx : function
423 // r8 : receiver
424 // r9 : argc
425 // [rsp+0x20] : argv
426 426
427 #ifdef _WIN64 427 // Clear the context before we push it when entering the JS frame.
428 // MSVC parameters in: 428 __ Set(rsi, 0);
429 // rcx : entry (ignored) 429 __ EnterInternalFrame();
430 // rdx : function
431 // r8 : receiver
432 // r9 : argc
433 // [rsp+0x20] : argv
434 430
435 // Clear the context before we push it when entering the internal frame. 431 // Load the function context into rsi.
436 __ Set(rsi, 0); 432 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
437 // Enter an internal frame.
438 FrameScope scope(masm, StackFrame::INTERNAL);
439 433
440 // Load the function context into rsi. 434 // Push the function and the receiver onto the stack.
441 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); 435 __ push(rdx);
436 __ push(r8);
442 437
443 // Push the function and the receiver onto the stack. 438 // Load the number of arguments and setup pointer to the arguments.
444 __ push(rdx); 439 __ movq(rax, r9);
445 __ push(r8); 440 // Load the previous frame pointer to access C argument on stack
441 __ movq(kScratchRegister, Operand(rbp, 0));
442 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
443 // Load the function pointer into rdi.
444 __ movq(rdi, rdx);
445 #else // _WIN64
446 // GCC parameters in:
447 // rdi : entry (ignored)
448 // rsi : function
449 // rdx : receiver
450 // rcx : argc
451 // r8 : argv
446 452
447 // Load the number of arguments and setup pointer to the arguments. 453 __ movq(rdi, rsi);
448 __ movq(rax, r9); 454 // rdi : function
449 // Load the previous frame pointer to access C argument on stack
450 __ movq(kScratchRegister, Operand(rbp, 0));
451 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
452 // Load the function pointer into rdi.
453 __ movq(rdi, rdx);
454 #else // _WIN64
455 // GCC parameters in:
456 // rdi : entry (ignored)
457 // rsi : function
458 // rdx : receiver
459 // rcx : argc
460 // r8 : argv
461 455
462 __ movq(rdi, rsi); 456 // Clear the context before we push it when entering the JS frame.
463 // rdi : function 457 __ Set(rsi, 0);
458 // Enter an internal frame.
459 __ EnterInternalFrame();
464 460
465 // Clear the context before we push it when entering the internal frame. 461 // Push the function and receiver and setup the context.
466 __ Set(rsi, 0); 462 __ push(rdi);
467 // Enter an internal frame. 463 __ push(rdx);
468 FrameScope scope(masm, StackFrame::INTERNAL); 464 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
469 465
470 // Push the function and receiver and setup the context. 466 // Load the number of arguments and setup pointer to the arguments.
471 __ push(rdi); 467 __ movq(rax, rcx);
472 __ push(rdx); 468 __ movq(rbx, r8);
473 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
474
475 // Load the number of arguments and setup pointer to the arguments.
476 __ movq(rax, rcx);
477 __ movq(rbx, r8);
478 #endif // _WIN64 469 #endif // _WIN64
479 470
480 // Current stack contents: 471 // Current stack contents:
481 // [rsp + 2 * kPointerSize ... ]: Internal frame 472 // [rsp + 2 * kPointerSize ... ]: Internal frame
482 // [rsp + kPointerSize] : function 473 // [rsp + kPointerSize] : function
483 // [rsp] : receiver 474 // [rsp] : receiver
484 // Current register contents: 475 // Current register contents:
485 // rax : argc 476 // rax : argc
486 // rbx : argv 477 // rbx : argv
487 // rsi : context 478 // rsi : context
488 // rdi : function 479 // rdi : function
489 480
490 // Copy arguments to the stack in a loop. 481 // Copy arguments to the stack in a loop.
491 // Register rbx points to array of pointers to handle locations. 482 // Register rbx points to array of pointers to handle locations.
492 // Push the values of these handles. 483 // Push the values of these handles.
493 Label loop, entry; 484 Label loop, entry;
494 __ Set(rcx, 0); // Set loop variable to 0. 485 __ Set(rcx, 0); // Set loop variable to 0.
495 __ jmp(&entry); 486 __ jmp(&entry);
496 __ bind(&loop); 487 __ bind(&loop);
497 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 488 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
498 __ push(Operand(kScratchRegister, 0)); // dereference handle 489 __ push(Operand(kScratchRegister, 0)); // dereference handle
499 __ addq(rcx, Immediate(1)); 490 __ addq(rcx, Immediate(1));
500 __ bind(&entry); 491 __ bind(&entry);
501 __ cmpq(rcx, rax); 492 __ cmpq(rcx, rax);
502 __ j(not_equal, &loop); 493 __ j(not_equal, &loop);
503 494
504 // Invoke the code. 495 // Invoke the code.
505 if (is_construct) { 496 if (is_construct) {
506 // Expects rdi to hold function pointer. 497 // Expects rdi to hold function pointer.
507 __ Call(masm->isolate()->builtins()->JSConstructCall(), 498 __ Call(masm->isolate()->builtins()->JSConstructCall(),
508 RelocInfo::CODE_TARGET); 499 RelocInfo::CODE_TARGET);
509 } else { 500 } else {
510 ParameterCount actual(rax); 501 ParameterCount actual(rax);
511 // Function must be in rdi. 502 // Function must be in rdi.
512 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 503 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
513 NullCallWrapper(), CALL_AS_METHOD); 504 NullCallWrapper(), CALL_AS_METHOD);
514 }
515 // Exit the internal frame. Notice that this also removes the empty
516 // context and the function left on the stack by the code
517 // invocation.
518 } 505 }
519 506
507 // Exit the JS frame. Notice that this also removes the empty
508 // context and the function left on the stack by the code
509 // invocation.
510 __ LeaveInternalFrame();
520 // TODO(X64): Is argument correct? Is there a receiver to remove? 511 // TODO(X64): Is argument correct? Is there a receiver to remove?
521 __ ret(1 * kPointerSize); // Remove receiver. 512 __ ret(1 * kPointerSize); // remove receiver
522 } 513 }
523 514
524 515
525 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 516 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
526 Generate_JSEntryTrampolineHelper(masm, false); 517 Generate_JSEntryTrampolineHelper(masm, false);
527 } 518 }
528 519
529 520
530 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 521 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
531 Generate_JSEntryTrampolineHelper(masm, true); 522 Generate_JSEntryTrampolineHelper(masm, true);
532 } 523 }
533 524
534 525
535 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 526 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
536 // Enter an internal frame. 527 // Enter an internal frame.
537 { 528 __ EnterInternalFrame();
538 FrameScope scope(masm, StackFrame::INTERNAL);
539 529
540 // Push a copy of the function onto the stack. 530 // Push a copy of the function onto the stack.
541 __ push(rdi); 531 __ push(rdi);
542 // Push call kind information. 532 // Push call kind information.
543 __ push(rcx); 533 __ push(rcx);
544 534
545 __ push(rdi); // Function is also the parameter to the runtime call. 535 __ push(rdi); // Function is also the parameter to the runtime call.
546 __ CallRuntime(Runtime::kLazyCompile, 1); 536 __ CallRuntime(Runtime::kLazyCompile, 1);
547 537
548 // Restore call kind information. 538 // Restore call kind information.
549 __ pop(rcx); 539 __ pop(rcx);
550 // Restore receiver. 540 // Restore receiver.
551 __ pop(rdi); 541 __ pop(rdi);
552 542
553 // Tear down internal frame. 543 // Tear down temporary frame.
554 } 544 __ LeaveInternalFrame();
555 545
556 // Do a tail-call of the compiled function. 546 // Do a tail-call of the compiled function.
557 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 547 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
558 __ jmp(rax); 548 __ jmp(rax);
559 } 549 }
560 550
561 551
562 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 552 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
563 // Enter an internal frame. 553 // Enter an internal frame.
564 { 554 __ EnterInternalFrame();
565 FrameScope scope(masm, StackFrame::INTERNAL);
566 555
567 // Push a copy of the function onto the stack. 556 // Push a copy of the function onto the stack.
568 __ push(rdi); 557 __ push(rdi);
569 // Push call kind information. 558 // Push call kind information.
570 __ push(rcx); 559 __ push(rcx);
571 560
572 __ push(rdi); // Function is also the parameter to the runtime call. 561 __ push(rdi); // Function is also the parameter to the runtime call.
573 __ CallRuntime(Runtime::kLazyRecompile, 1); 562 __ CallRuntime(Runtime::kLazyRecompile, 1);
574 563
575 // Restore call kind information. 564 // Restore call kind information.
576 __ pop(rcx); 565 __ pop(rcx);
577 // Restore function. 566 // Restore function.
578 __ pop(rdi); 567 __ pop(rdi);
579 568
580 // Tear down internal frame. 569 // Tear down temporary frame.
581 } 570 __ LeaveInternalFrame();
582 571
583 // Do a tail-call of the compiled function. 572 // Do a tail-call of the compiled function.
584 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 573 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
585 __ jmp(rax); 574 __ jmp(rax);
586 } 575 }
587 576
588 577
589 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 578 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
590 Deoptimizer::BailoutType type) { 579 Deoptimizer::BailoutType type) {
591 // Enter an internal frame. 580 // Enter an internal frame.
592 { 581 __ EnterInternalFrame();
593 FrameScope scope(masm, StackFrame::INTERNAL);
594 582
595 // Pass the deoptimization type to the runtime system. 583 // Pass the deoptimization type to the runtime system.
596 __ Push(Smi::FromInt(static_cast<int>(type))); 584 __ Push(Smi::FromInt(static_cast<int>(type)));
597 585
598 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 586 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
599 // Tear down internal frame. 587 // Tear down temporary frame.
600 } 588 __ LeaveInternalFrame();
601 589
602 // Get the full codegen state from the stack and untag it. 590 // Get the full codegen state from the stack and untag it.
603 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); 591 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
604 592
605 // Switch on the state. 593 // Switch on the state.
606 Label not_no_registers, not_tos_rax; 594 Label not_no_registers, not_tos_rax;
607 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS)); 595 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
608 __ j(not_equal, &not_no_registers, Label::kNear); 596 __ j(not_equal, &not_no_registers, Label::kNear);
609 __ ret(1 * kPointerSize); // Remove state. 597 __ ret(1 * kPointerSize); // Remove state.
610 598
(...skipping 16 matching lines...) Expand all
627 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 615 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
628 } 616 }
629 617
630 618
631 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 619 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
632 // For now, we are relying on the fact that Runtime::NotifyOSR 620 // For now, we are relying on the fact that Runtime::NotifyOSR
633 // doesn't do any garbage collection which allows us to save/restore 621 // doesn't do any garbage collection which allows us to save/restore
634 // the registers without worrying about which of them contain 622 // the registers without worrying about which of them contain
635 // pointers. This seems a bit fragile. 623 // pointers. This seems a bit fragile.
636 __ Pushad(); 624 __ Pushad();
637 { 625 __ EnterInternalFrame();
638 FrameScope scope(masm, StackFrame::INTERNAL); 626 __ CallRuntime(Runtime::kNotifyOSR, 0);
639 __ CallRuntime(Runtime::kNotifyOSR, 0); 627 __ LeaveInternalFrame();
640 }
641 __ Popad(); 628 __ Popad();
642 __ ret(0); 629 __ ret(0);
643 } 630 }
644 631
645 632
646 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 633 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
647 // Stack Layout: 634 // Stack Layout:
648 // rsp[0]: Return address 635 // rsp[0]: Return address
649 // rsp[1]: Argument n 636 // rsp[1]: Argument n
650 // rsp[2]: Argument n-1 637 // rsp[2]: Argument n-1
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
700 __ j(equal, &use_global_receiver); 687 __ j(equal, &use_global_receiver);
701 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 688 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
702 __ j(equal, &use_global_receiver); 689 __ j(equal, &use_global_receiver);
703 690
704 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); 691 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
705 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 692 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
706 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); 693 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
707 __ j(above_equal, &shift_arguments); 694 __ j(above_equal, &shift_arguments);
708 695
709 __ bind(&convert_to_object); 696 __ bind(&convert_to_object);
710 { 697 __ EnterInternalFrame(); // In order to preserve argument count.
711 // Enter an internal frame in order to preserve argument count. 698 __ Integer32ToSmi(rax, rax);
712 FrameScope scope(masm, StackFrame::INTERNAL); 699 __ push(rax);
713 __ Integer32ToSmi(rax, rax);
714 __ push(rax);
715 700
716 __ push(rbx); 701 __ push(rbx);
717 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 702 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
718 __ movq(rbx, rax); 703 __ movq(rbx, rax);
719 704
720 __ pop(rax); 705 __ pop(rax);
721 __ SmiToInteger32(rax, rax); 706 __ SmiToInteger32(rax, rax);
722 } 707 __ LeaveInternalFrame();
723
724 // Restore the function to rdi. 708 // Restore the function to rdi.
725 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); 709 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
726 __ jmp(&patch_receiver, Label::kNear); 710 __ jmp(&patch_receiver, Label::kNear);
727 711
728 // Use the global receiver object from the called function as the 712 // Use the global receiver object from the called function as the
729 // receiver. 713 // receiver.
730 __ bind(&use_global_receiver); 714 __ bind(&use_global_receiver);
731 const int kGlobalIndex = 715 const int kGlobalIndex =
732 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 716 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
733 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); 717 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
796 NullCallWrapper(), CALL_AS_METHOD); 780 NullCallWrapper(), CALL_AS_METHOD);
797 } 781 }
798 782
799 783
800 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 784 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
801 // Stack at entry: 785 // Stack at entry:
802 // rsp: return address 786 // rsp: return address
803 // rsp+8: arguments 787 // rsp+8: arguments
804 // rsp+16: receiver ("this") 788 // rsp+16: receiver ("this")
805 // rsp+24: function 789 // rsp+24: function
806 { 790 __ EnterInternalFrame();
807 FrameScope scope(masm, StackFrame::INTERNAL); 791 // Stack frame:
808 // Stack frame: 792 // rbp: Old base pointer
809 // rbp: Old base pointer 793 // rbp[1]: return address
810 // rbp[1]: return address 794 // rbp[2]: function arguments
811 // rbp[2]: function arguments 795 // rbp[3]: receiver
812 // rbp[3]: receiver 796 // rbp[4]: function
813 // rbp[4]: function 797 static const int kArgumentsOffset = 2 * kPointerSize;
814 static const int kArgumentsOffset = 2 * kPointerSize; 798 static const int kReceiverOffset = 3 * kPointerSize;
815 static const int kReceiverOffset = 3 * kPointerSize; 799 static const int kFunctionOffset = 4 * kPointerSize;
816 static const int kFunctionOffset = 4 * kPointerSize; 800 __ push(Operand(rbp, kFunctionOffset));
817 __ push(Operand(rbp, kFunctionOffset)); 801 __ push(Operand(rbp, kArgumentsOffset));
818 __ push(Operand(rbp, kArgumentsOffset)); 802 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
819 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
820 803
821 // Check the stack for overflow. We are not trying need to catch 804 // Check the stack for overflow. We are not trying need to catch
822 // interruptions (e.g. debug break and preemption) here, so the "real stack 805 // interruptions (e.g. debug break and preemption) here, so the "real stack
823 // limit" is checked. 806 // limit" is checked.
824 Label okay; 807 Label okay;
825 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 808 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
826 __ movq(rcx, rsp); 809 __ movq(rcx, rsp);
827 // Make rcx the space we have left. The stack might already be overflowed 810 // Make rcx the space we have left. The stack might already be overflowed
828 // here which will cause rcx to become negative. 811 // here which will cause rcx to become negative.
829 __ subq(rcx, kScratchRegister); 812 __ subq(rcx, kScratchRegister);
830 // Make rdx the space we need for the array when it is unrolled onto the 813 // Make rdx the space we need for the array when it is unrolled onto the
831 // stack. 814 // stack.
832 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); 815 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
833 // Check if the arguments will overflow the stack. 816 // Check if the arguments will overflow the stack.
834 __ cmpq(rcx, rdx); 817 __ cmpq(rcx, rdx);
835 __ j(greater, &okay); // Signed comparison. 818 __ j(greater, &okay); // Signed comparison.
836 819
837 // Out of stack space. 820 // Out of stack space.
838 __ push(Operand(rbp, kFunctionOffset)); 821 __ push(Operand(rbp, kFunctionOffset));
839 __ push(rax); 822 __ push(rax);
840 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 823 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
841 __ bind(&okay); 824 __ bind(&okay);
842 // End of stack check. 825 // End of stack check.
843 826
844 // Push current index and limit. 827 // Push current index and limit.
845 const int kLimitOffset = 828 const int kLimitOffset =
846 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; 829 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
847 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; 830 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
848 __ push(rax); // limit 831 __ push(rax); // limit
849 __ push(Immediate(0)); // index 832 __ push(Immediate(0)); // index
850 833
851 // Change context eagerly to get the right global object if 834 // Change context eagerly to get the right global object if
852 // necessary. 835 // necessary.
853 __ movq(rdi, Operand(rbp, kFunctionOffset)); 836 __ movq(rdi, Operand(rbp, kFunctionOffset));
854 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 837 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
855 838
856 // Compute the receiver. 839 // Compute the receiver.
857 Label call_to_object, use_global_receiver, push_receiver; 840 Label call_to_object, use_global_receiver, push_receiver;
858 __ movq(rbx, Operand(rbp, kReceiverOffset)); 841 __ movq(rbx, Operand(rbp, kReceiverOffset));
859 842
860 // Do not transform the receiver for strict mode functions. 843 // Do not transform the receiver for strict mode functions.
861 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 844 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
862 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), 845 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
863 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 846 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
864 __ j(not_equal, &push_receiver); 847 __ j(not_equal, &push_receiver);
865 848
866 // Do not transform the receiver for natives. 849 // Do not transform the receiver for natives.
867 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), 850 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
868 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 851 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
869 __ j(not_equal, &push_receiver); 852 __ j(not_equal, &push_receiver);
870 853
871 // Compute the receiver in non-strict mode. 854 // Compute the receiver in non-strict mode.
872 __ JumpIfSmi(rbx, &call_to_object, Label::kNear); 855 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
873 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 856 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
874 __ j(equal, &use_global_receiver); 857 __ j(equal, &use_global_receiver);
875 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 858 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
876 __ j(equal, &use_global_receiver); 859 __ j(equal, &use_global_receiver);
877 860
878 // If given receiver is already a JavaScript object then there's no 861 // If given receiver is already a JavaScript object then there's no
879 // reason for converting it. 862 // reason for converting it.
880 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); 863 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
881 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 864 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
882 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); 865 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
883 __ j(above_equal, &push_receiver); 866 __ j(above_equal, &push_receiver);
884 867
885 // Convert the receiver to an object. 868 // Convert the receiver to an object.
886 __ bind(&call_to_object); 869 __ bind(&call_to_object);
887 __ push(rbx); 870 __ push(rbx);
888 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 871 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
889 __ movq(rbx, rax); 872 __ movq(rbx, rax);
890 __ jmp(&push_receiver, Label::kNear); 873 __ jmp(&push_receiver, Label::kNear);
891 874
892 // Use the current global receiver object as the receiver. 875 // Use the current global receiver object as the receiver.
893 __ bind(&use_global_receiver); 876 __ bind(&use_global_receiver);
894 const int kGlobalOffset = 877 const int kGlobalOffset =
895 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 878 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
896 __ movq(rbx, FieldOperand(rsi, kGlobalOffset)); 879 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
897 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset)); 880 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
898 __ movq(rbx, FieldOperand(rbx, kGlobalOffset)); 881 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
899 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 882 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
900 883
901 // Push the receiver. 884 // Push the receiver.
902 __ bind(&push_receiver); 885 __ bind(&push_receiver);
903 __ push(rbx); 886 __ push(rbx);
904 887
905 // Copy all arguments from the array to the stack. 888 // Copy all arguments from the array to the stack.
906 Label entry, loop; 889 Label entry, loop;
907 __ movq(rax, Operand(rbp, kIndexOffset)); 890 __ movq(rax, Operand(rbp, kIndexOffset));
908 __ jmp(&entry); 891 __ jmp(&entry);
909 __ bind(&loop); 892 __ bind(&loop);
910 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments 893 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
911 894
912 // Use inline caching to speed up access to arguments. 895 // Use inline caching to speed up access to arguments.
913 Handle<Code> ic = 896 Handle<Code> ic =
914 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); 897 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
915 __ Call(ic, RelocInfo::CODE_TARGET); 898 __ Call(ic, RelocInfo::CODE_TARGET);
916 // It is important that we do not have a test instruction after the 899 // It is important that we do not have a test instruction after the
917 // call. A test instruction after the call is used to indicate that 900 // call. A test instruction after the call is used to indicate that
918 // we have generated an inline version of the keyed load. In this 901 // we have generated an inline version of the keyed load. In this
919 // case, we know that we are not generating a test instruction next. 902 // case, we know that we are not generating a test instruction next.
920 903
921 // Push the nth argument. 904 // Push the nth argument.
922 __ push(rax); 905 __ push(rax);
923 906
924 // Update the index on the stack and in register rax. 907 // Update the index on the stack and in register rax.
925 __ movq(rax, Operand(rbp, kIndexOffset)); 908 __ movq(rax, Operand(rbp, kIndexOffset));
926 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 909 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
927 __ movq(Operand(rbp, kIndexOffset), rax); 910 __ movq(Operand(rbp, kIndexOffset), rax);
928 911
929 __ bind(&entry); 912 __ bind(&entry);
930 __ cmpq(rax, Operand(rbp, kLimitOffset)); 913 __ cmpq(rax, Operand(rbp, kLimitOffset));
931 __ j(not_equal, &loop); 914 __ j(not_equal, &loop);
932 915
933 // Invoke the function. 916 // Invoke the function.
934 ParameterCount actual(rax); 917 ParameterCount actual(rax);
935 __ SmiToInteger32(rax, rax); 918 __ SmiToInteger32(rax, rax);
936 __ movq(rdi, Operand(rbp, kFunctionOffset)); 919 __ movq(rdi, Operand(rbp, kFunctionOffset));
937 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 920 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
938 NullCallWrapper(), CALL_AS_METHOD); 921 NullCallWrapper(), CALL_AS_METHOD);
939 922
940 // Leave internal frame. 923 __ LeaveInternalFrame();
941 }
942 __ ret(3 * kPointerSize); // remove function, receiver, and arguments 924 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
943 } 925 }
944 926
945 927
946 // Number of empty elements to allocate for an empty array. 928 // Number of empty elements to allocate for an empty array.
947 static const int kPreallocatedArrayElements = 4; 929 static const int kPreallocatedArrayElements = 4;
948 930
949 931
950 // Allocate an empty JSArray. The allocated array is put into the result 932 // Allocate an empty JSArray. The allocated array is put into the result
951 // register. If the parameter initial_capacity is larger than zero an elements 933 // register. If the parameter initial_capacity is larger than zero an elements
(...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after
1491 // should perform a stack guard check so we can get interrupts while 1473 // should perform a stack guard check so we can get interrupts while
1492 // waiting for on-stack replacement. 1474 // waiting for on-stack replacement.
1493 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1475 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1494 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 1476 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1495 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); 1477 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
1496 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); 1478 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
1497 __ j(greater, &stack_check); 1479 __ j(greater, &stack_check);
1498 1480
1499 // Pass the function to optimize as the argument to the on-stack 1481 // Pass the function to optimize as the argument to the on-stack
1500 // replacement runtime function. 1482 // replacement runtime function.
1501 { 1483 __ EnterInternalFrame();
1502 FrameScope scope(masm, StackFrame::INTERNAL); 1484 __ push(rax);
1503 __ push(rax); 1485 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1504 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1486 __ LeaveInternalFrame();
1505 }
1506 1487
1507 // If the result was -1 it means that we couldn't optimize the 1488 // If the result was -1 it means that we couldn't optimize the
1508 // function. Just return and continue in the unoptimized version. 1489 // function. Just return and continue in the unoptimized version.
1509 Label skip; 1490 Label skip;
1510 __ SmiCompare(rax, Smi::FromInt(-1)); 1491 __ SmiCompare(rax, Smi::FromInt(-1));
1511 __ j(not_equal, &skip, Label::kNear); 1492 __ j(not_equal, &skip, Label::kNear);
1512 __ ret(0); 1493 __ ret(0);
1513 1494
1514 // If we decide not to perform on-stack replacement we perform a 1495 // If we decide not to perform on-stack replacement we perform a
1515 // stack guard check to enable interrupts. 1496 // stack guard check to enable interrupts.
1516 __ bind(&stack_check); 1497 __ bind(&stack_check);
1517 Label ok; 1498 Label ok;
1518 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 1499 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1519 __ j(above_equal, &ok, Label::kNear); 1500 __ j(above_equal, &ok, Label::kNear);
1520 1501
1521 StackCheckStub stub; 1502 StackCheckStub stub;
1522 __ TailCallStub(&stub); 1503 __ TailCallStub(&stub);
1523 if (FLAG_debug_code) { 1504 __ Abort("Unreachable code: returned from tail call.");
1524 __ Abort("Unreachable code: returned from tail call.");
1525 }
1526 __ bind(&ok); 1505 __ bind(&ok);
1527 __ ret(0); 1506 __ ret(0);
1528 1507
1529 __ bind(&skip); 1508 __ bind(&skip);
1530 // Untag the AST id and push it on the stack. 1509 // Untag the AST id and push it on the stack.
1531 __ SmiToInteger32(rax, rax); 1510 __ SmiToInteger32(rax, rax);
1532 __ push(rax); 1511 __ push(rax);
1533 1512
1534 // Generate the code for doing the frame-to-frame translation using 1513 // Generate the code for doing the frame-to-frame translation using
1535 // the deoptimizer infrastructure. 1514 // the deoptimizer infrastructure.
1536 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); 1515 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1537 generator.Generate(); 1516 generator.Generate();
1538 } 1517 }
1539 1518
1540 1519
1541 #undef __ 1520 #undef __
1542 1521
1543 } } // namespace v8::internal 1522 } } // namespace v8::internal
1544 1523
1545 #endif // V8_TARGET_ARCH_X64 1524 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/macro-assembler.h ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698