Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(568)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 105503006: Replace movq with movp for X64 when the operand size is kPointerSize (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/x64/code-stubs-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
82 // Function is also the parameter to the runtime call. 82 // Function is also the parameter to the runtime call.
83 __ push(rdi); 83 __ push(rdi);
84 84
85 __ CallRuntime(function_id, 1); 85 __ CallRuntime(function_id, 1);
86 // Restore receiver. 86 // Restore receiver.
87 __ pop(rdi); 87 __ pop(rdi);
88 } 88 }
89 89
90 90
91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
92 __ movq(kScratchRegister, 92 __ movp(kScratchRegister,
93 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 93 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
94 __ movq(kScratchRegister, 94 __ movp(kScratchRegister,
95 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); 95 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
96 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); 96 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
97 __ jmp(kScratchRegister); 97 __ jmp(kScratchRegister);
98 } 98 }
99 99
100 100
101 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { 101 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
102 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 102 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
103 __ jmp(rax); 103 __ jmp(rax);
104 } 104 }
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
154 ExternalReference debug_step_in_fp = 154 ExternalReference debug_step_in_fp =
155 ExternalReference::debug_step_in_fp_address(masm->isolate()); 155 ExternalReference::debug_step_in_fp_address(masm->isolate());
156 __ Move(kScratchRegister, debug_step_in_fp); 156 __ Move(kScratchRegister, debug_step_in_fp);
157 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); 157 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
158 __ j(not_equal, &rt_call); 158 __ j(not_equal, &rt_call);
159 #endif 159 #endif
160 160
161 // Verified that the constructor is a JSFunction. 161 // Verified that the constructor is a JSFunction.
162 // Load the initial map and verify that it is in fact a map. 162 // Load the initial map and verify that it is in fact a map.
163 // rdi: constructor 163 // rdi: constructor
164 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 164 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
165 // Will both indicate a NULL and a Smi 165 // Will both indicate a NULL and a Smi
166 ASSERT(kSmiTag == 0); 166 ASSERT(kSmiTag == 0);
167 __ JumpIfSmi(rax, &rt_call); 167 __ JumpIfSmi(rax, &rt_call);
168 // rdi: constructor 168 // rdi: constructor
169 // rax: initial map (if proven valid below) 169 // rax: initial map (if proven valid below)
170 __ CmpObjectType(rax, MAP_TYPE, rbx); 170 __ CmpObjectType(rax, MAP_TYPE, rbx);
171 __ j(not_equal, &rt_call); 171 __ j(not_equal, &rt_call);
172 172
173 // Check that the constructor is not constructing a JSFunction (see 173 // Check that the constructor is not constructing a JSFunction (see
174 // comments in Runtime_NewObject in runtime.cc). In which case the 174 // comments in Runtime_NewObject in runtime.cc). In which case the
175 // initial map's instance type would be JS_FUNCTION_TYPE. 175 // initial map's instance type would be JS_FUNCTION_TYPE.
176 // rdi: constructor 176 // rdi: constructor
177 // rax: initial map 177 // rax: initial map
178 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 178 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
179 __ j(equal, &rt_call); 179 __ j(equal, &rt_call);
180 180
181 if (count_constructions) { 181 if (count_constructions) {
182 Label allocate; 182 Label allocate;
183 // Decrease generous allocation count. 183 // Decrease generous allocation count.
184 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 184 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
185 __ decb(FieldOperand(rcx, 185 __ decb(FieldOperand(rcx,
186 SharedFunctionInfo::kConstructionCountOffset)); 186 SharedFunctionInfo::kConstructionCountOffset));
187 __ j(not_zero, &allocate); 187 __ j(not_zero, &allocate);
188 188
189 __ push(rax); 189 __ push(rax);
190 __ push(rdi); 190 __ push(rdi);
191 191
192 __ push(rdi); // constructor 192 __ push(rdi); // constructor
193 // The call will replace the stub, so the countdown is only done once. 193 // The call will replace the stub, so the countdown is only done once.
194 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 194 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
(...skipping 11 matching lines...) Expand all
206 __ Allocate(rdi, 206 __ Allocate(rdi,
207 rbx, 207 rbx,
208 rdi, 208 rdi,
209 no_reg, 209 no_reg,
210 &rt_call, 210 &rt_call,
211 NO_ALLOCATION_FLAGS); 211 NO_ALLOCATION_FLAGS);
212 // Allocated the JSObject, now initialize the fields. 212 // Allocated the JSObject, now initialize the fields.
213 // rax: initial map 213 // rax: initial map
214 // rbx: JSObject (not HeapObject tagged - the actual address). 214 // rbx: JSObject (not HeapObject tagged - the actual address).
215 // rdi: start of next object 215 // rdi: start of next object
216 __ movq(Operand(rbx, JSObject::kMapOffset), rax); 216 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
217 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 217 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
218 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx); 218 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
219 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx); 219 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
220 // Set extra fields in the newly allocated object. 220 // Set extra fields in the newly allocated object.
221 // rax: initial map 221 // rax: initial map
222 // rbx: JSObject 222 // rbx: JSObject
223 // rdi: start of next object 223 // rdi: start of next object
224 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); 224 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
225 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 225 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
226 if (count_constructions) { 226 if (count_constructions) {
227 __ movzxbq(rsi, 227 __ movzxbq(rsi,
228 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 228 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
229 __ lea(rsi, 229 __ lea(rsi,
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
278 no_reg, 278 no_reg,
279 &undo_allocation, 279 &undo_allocation,
280 RESULT_CONTAINS_TOP); 280 RESULT_CONTAINS_TOP);
281 281
282 // Initialize the FixedArray. 282 // Initialize the FixedArray.
283 // rbx: JSObject 283 // rbx: JSObject
284 // rdi: FixedArray 284 // rdi: FixedArray
285 // rdx: number of elements 285 // rdx: number of elements
286 // rax: start of next object 286 // rax: start of next object
287 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); 287 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
288 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map 288 __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
289 __ Integer32ToSmi(rdx, rdx); 289 __ Integer32ToSmi(rdx, rdx);
290 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length 290 __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
291 291
292 // Initialize the fields to undefined. 292 // Initialize the fields to undefined.
293 // rbx: JSObject 293 // rbx: JSObject
294 // rdi: FixedArray 294 // rdi: FixedArray
295 // rax: start of next object 295 // rax: start of next object
296 // rdx: number of elements 296 // rdx: number of elements
297 { Label loop, entry; 297 { Label loop, entry;
298 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 298 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
299 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize)); 299 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
300 __ jmp(&entry); 300 __ jmp(&entry);
301 __ bind(&loop); 301 __ bind(&loop);
302 __ movq(Operand(rcx, 0), rdx); 302 __ movp(Operand(rcx, 0), rdx);
303 __ addq(rcx, Immediate(kPointerSize)); 303 __ addq(rcx, Immediate(kPointerSize));
304 __ bind(&entry); 304 __ bind(&entry);
305 __ cmpq(rcx, rax); 305 __ cmpq(rcx, rax);
306 __ j(below, &loop); 306 __ j(below, &loop);
307 } 307 }
308 308
309 // Store the initialized FixedArray into the properties field of 309 // Store the initialized FixedArray into the properties field of
310 // the JSObject 310 // the JSObject
311 // rbx: JSObject 311 // rbx: JSObject
312 // rdi: FixedArray 312 // rdi: FixedArray
313 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag 313 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
314 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi); 314 __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
315 315
316 316
317 // Continue with JSObject being successfully allocated 317 // Continue with JSObject being successfully allocated
318 // rbx: JSObject 318 // rbx: JSObject
319 __ jmp(&allocated); 319 __ jmp(&allocated);
320 320
321 // Undo the setting of the new top so that the heap is verifiable. For 321 // Undo the setting of the new top so that the heap is verifiable. For
322 // example, the map's unused properties potentially do not match the 322 // example, the map's unused properties potentially do not match the
323 // allocated objects unused properties. 323 // allocated objects unused properties.
324 // rbx: JSObject (previous new top) 324 // rbx: JSObject (previous new top)
325 __ bind(&undo_allocation); 325 __ bind(&undo_allocation);
326 __ UndoAllocationInNewSpace(rbx); 326 __ UndoAllocationInNewSpace(rbx);
327 } 327 }
328 328
329 // Allocate the new receiver object using the runtime call. 329 // Allocate the new receiver object using the runtime call.
330 // rdi: function (constructor) 330 // rdi: function (constructor)
331 __ bind(&rt_call); 331 __ bind(&rt_call);
332 // Must restore rdi (constructor) before calling runtime. 332 // Must restore rdi (constructor) before calling runtime.
333 __ movq(rdi, Operand(rsp, 0)); 333 __ movp(rdi, Operand(rsp, 0));
334 __ push(rdi); 334 __ push(rdi);
335 __ CallRuntime(Runtime::kNewObject, 1); 335 __ CallRuntime(Runtime::kNewObject, 1);
336 __ movq(rbx, rax); // store result in rbx 336 __ movp(rbx, rax); // store result in rbx
337 337
338 // New object allocated. 338 // New object allocated.
339 // rbx: newly allocated object 339 // rbx: newly allocated object
340 __ bind(&allocated); 340 __ bind(&allocated);
341 // Retrieve the function from the stack. 341 // Retrieve the function from the stack.
342 __ pop(rdi); 342 __ pop(rdi);
343 343
344 // Retrieve smi-tagged arguments count from the stack. 344 // Retrieve smi-tagged arguments count from the stack.
345 __ movq(rax, Operand(rsp, 0)); 345 __ movp(rax, Operand(rsp, 0));
346 __ SmiToInteger32(rax, rax); 346 __ SmiToInteger32(rax, rax);
347 347
348 // Push the allocated receiver to the stack. We need two copies 348 // Push the allocated receiver to the stack. We need two copies
349 // because we may have to return the original one and the calling 349 // because we may have to return the original one and the calling
350 // conventions dictate that the called function pops the receiver. 350 // conventions dictate that the called function pops the receiver.
351 __ push(rbx); 351 __ push(rbx);
352 __ push(rbx); 352 __ push(rbx);
353 353
354 // Set up pointer to last argument. 354 // Set up pointer to last argument.
355 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); 355 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
356 356
357 // Copy arguments and receiver to the expression stack. 357 // Copy arguments and receiver to the expression stack.
358 Label loop, entry; 358 Label loop, entry;
359 __ movq(rcx, rax); 359 __ movp(rcx, rax);
360 __ jmp(&entry); 360 __ jmp(&entry);
361 __ bind(&loop); 361 __ bind(&loop);
362 __ push(Operand(rbx, rcx, times_pointer_size, 0)); 362 __ push(Operand(rbx, rcx, times_pointer_size, 0));
363 __ bind(&entry); 363 __ bind(&entry);
364 __ decq(rcx); 364 __ decq(rcx);
365 __ j(greater_equal, &loop); 365 __ j(greater_equal, &loop);
366 366
367 // Call the function. 367 // Call the function.
368 if (is_api_function) { 368 if (is_api_function) {
369 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 369 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
370 Handle<Code> code = 370 Handle<Code> code =
371 masm->isolate()->builtins()->HandleApiCallConstruct(); 371 masm->isolate()->builtins()->HandleApiCallConstruct();
372 __ Call(code, RelocInfo::CODE_TARGET); 372 __ Call(code, RelocInfo::CODE_TARGET);
373 } else { 373 } else {
374 ParameterCount actual(rax); 374 ParameterCount actual(rax);
375 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper()); 375 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
376 } 376 }
377 377
378 // Store offset of return address for deoptimizer. 378 // Store offset of return address for deoptimizer.
379 if (!is_api_function && !count_constructions) { 379 if (!is_api_function && !count_constructions) {
380 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); 380 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
381 } 381 }
382 382
383 // Restore context from the frame. 383 // Restore context from the frame.
384 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 384 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
385 385
386 // If the result is an object (in the ECMA sense), we should get rid 386 // If the result is an object (in the ECMA sense), we should get rid
387 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 387 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
388 // on page 74. 388 // on page 74.
389 Label use_receiver, exit; 389 Label use_receiver, exit;
390 // If the result is a smi, it is *not* an object in the ECMA sense. 390 // If the result is a smi, it is *not* an object in the ECMA sense.
391 __ JumpIfSmi(rax, &use_receiver); 391 __ JumpIfSmi(rax, &use_receiver);
392 392
393 // If the type of the result (stored in its map) is less than 393 // If the type of the result (stored in its map) is less than
394 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. 394 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
395 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 395 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
396 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 396 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
397 __ j(above_equal, &exit); 397 __ j(above_equal, &exit);
398 398
399 // Throw away the result of the constructor invocation and use the 399 // Throw away the result of the constructor invocation and use the
400 // on-stack receiver as the result. 400 // on-stack receiver as the result.
401 __ bind(&use_receiver); 401 __ bind(&use_receiver);
402 __ movq(rax, Operand(rsp, 0)); 402 __ movp(rax, Operand(rsp, 0));
403 403
404 // Restore the arguments count and leave the construct frame. 404 // Restore the arguments count and leave the construct frame.
405 __ bind(&exit); 405 __ bind(&exit);
406 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count. 406 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
407 407
408 // Leave construct frame. 408 // Leave construct frame.
409 } 409 }
410 410
411 // Remove caller arguments from the stack and return. 411 // Remove caller arguments from the stack and return.
412 __ PopReturnAddressTo(rcx); 412 __ PopReturnAddressTo(rcx);
413 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 413 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
414 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 414 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
415 __ PushReturnAddressFrom(rcx); 415 __ PushReturnAddressFrom(rcx);
416 Counters* counters = masm->isolate()->counters(); 416 Counters* counters = masm->isolate()->counters();
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
460 // r8 : receiver 460 // r8 : receiver
461 // r9 : argc 461 // r9 : argc
462 // [rsp+0x20] : argv 462 // [rsp+0x20] : argv
463 463
464 // Clear the context before we push it when entering the internal frame. 464 // Clear the context before we push it when entering the internal frame.
465 __ Set(rsi, 0); 465 __ Set(rsi, 0);
466 // Enter an internal frame. 466 // Enter an internal frame.
467 FrameScope scope(masm, StackFrame::INTERNAL); 467 FrameScope scope(masm, StackFrame::INTERNAL);
468 468
469 // Load the function context into rsi. 469 // Load the function context into rsi.
470 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); 470 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
471 471
472 // Push the function and the receiver onto the stack. 472 // Push the function and the receiver onto the stack.
473 __ push(rdx); 473 __ push(rdx);
474 __ push(r8); 474 __ push(r8);
475 475
476 // Load the number of arguments and setup pointer to the arguments. 476 // Load the number of arguments and setup pointer to the arguments.
477 __ movq(rax, r9); 477 __ movp(rax, r9);
478 // Load the previous frame pointer to access C argument on stack 478 // Load the previous frame pointer to access C argument on stack
479 __ movq(kScratchRegister, Operand(rbp, 0)); 479 __ movp(kScratchRegister, Operand(rbp, 0));
480 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); 480 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
481 // Load the function pointer into rdi. 481 // Load the function pointer into rdi.
482 __ movq(rdi, rdx); 482 __ movp(rdi, rdx);
483 #else // _WIN64 483 #else // _WIN64
484 // GCC parameters in: 484 // GCC parameters in:
485 // rdi : entry (ignored) 485 // rdi : entry (ignored)
486 // rsi : function 486 // rsi : function
487 // rdx : receiver 487 // rdx : receiver
488 // rcx : argc 488 // rcx : argc
489 // r8 : argv 489 // r8 : argv
490 490
491 __ movq(rdi, rsi); 491 __ movp(rdi, rsi);
492 // rdi : function 492 // rdi : function
493 493
494 // Clear the context before we push it when entering the internal frame. 494 // Clear the context before we push it when entering the internal frame.
495 __ Set(rsi, 0); 495 __ Set(rsi, 0);
496 // Enter an internal frame. 496 // Enter an internal frame.
497 FrameScope scope(masm, StackFrame::INTERNAL); 497 FrameScope scope(masm, StackFrame::INTERNAL);
498 498
499 // Push the function and receiver and setup the context. 499 // Push the function and receiver and setup the context.
500 __ push(rdi); 500 __ push(rdi);
501 __ push(rdx); 501 __ push(rdx);
502 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 502 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
503 503
504 // Load the number of arguments and setup pointer to the arguments. 504 // Load the number of arguments and setup pointer to the arguments.
505 __ movq(rax, rcx); 505 __ movp(rax, rcx);
506 __ movq(rbx, r8); 506 __ movp(rbx, r8);
507 #endif // _WIN64 507 #endif // _WIN64
508 508
509 // Current stack contents: 509 // Current stack contents:
510 // [rsp + 2 * kPointerSize ... ] : Internal frame 510 // [rsp + 2 * kPointerSize ... ] : Internal frame
511 // [rsp + kPointerSize] : function 511 // [rsp + kPointerSize] : function
512 // [rsp] : receiver 512 // [rsp] : receiver
513 // Current register contents: 513 // Current register contents:
514 // rax : argc 514 // rax : argc
515 // rbx : argv 515 // rbx : argv
516 // rsi : context 516 // rsi : context
517 // rdi : function 517 // rdi : function
518 518
519 // Copy arguments to the stack in a loop. 519 // Copy arguments to the stack in a loop.
520 // Register rbx points to array of pointers to handle locations. 520 // Register rbx points to array of pointers to handle locations.
521 // Push the values of these handles. 521 // Push the values of these handles.
522 Label loop, entry; 522 Label loop, entry;
523 __ Set(rcx, 0); // Set loop variable to 0. 523 __ Set(rcx, 0); // Set loop variable to 0.
524 __ jmp(&entry); 524 __ jmp(&entry);
525 __ bind(&loop); 525 __ bind(&loop);
526 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 526 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
527 __ push(Operand(kScratchRegister, 0)); // dereference handle 527 __ push(Operand(kScratchRegister, 0)); // dereference handle
528 __ addq(rcx, Immediate(1)); 528 __ addq(rcx, Immediate(1));
529 __ bind(&entry); 529 __ bind(&entry);
530 __ cmpq(rcx, rax); 530 __ cmpq(rcx, rax);
531 __ j(not_equal, &loop); 531 __ j(not_equal, &loop);
532 532
533 // Invoke the code. 533 // Invoke the code.
534 if (is_construct) { 534 if (is_construct) {
535 // No type feedback cell is available 535 // No type feedback cell is available
536 Handle<Object> undefined_sentinel( 536 Handle<Object> undefined_sentinel(
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
603 // garbage collection which allows us to save/restore the registers without 603 // garbage collection which allows us to save/restore the registers without
604 // worrying about which of them contain pointers. We also don't build an 604 // worrying about which of them contain pointers. We also don't build an
605 // internal frame to make the code faster, since we shouldn't have to do stack 605 // internal frame to make the code faster, since we shouldn't have to do stack
606 // crawls in MakeCodeYoung. This seems a bit fragile. 606 // crawls in MakeCodeYoung. This seems a bit fragile.
607 607
608 // Re-execute the code that was patched back to the young age when 608 // Re-execute the code that was patched back to the young age when
609 // the stub returns. 609 // the stub returns.
610 __ subq(Operand(rsp, 0), Immediate(5)); 610 __ subq(Operand(rsp, 0), Immediate(5));
611 __ Pushad(); 611 __ Pushad();
612 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); 612 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
613 __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); 613 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
614 { // NOLINT 614 { // NOLINT
615 FrameScope scope(masm, StackFrame::MANUAL); 615 FrameScope scope(masm, StackFrame::MANUAL);
616 __ PrepareCallCFunction(2); 616 __ PrepareCallCFunction(2);
617 __ CallCFunction( 617 __ CallCFunction(
618 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 618 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
619 } 619 }
620 __ Popad(); 620 __ Popad();
621 __ ret(0); 621 __ ret(0);
622 } 622 }
623 623
(...skipping 11 matching lines...) Expand all
635 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 635 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
636 636
637 637
638 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 638 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
639 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 639 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
640 // that make_code_young doesn't do any garbage collection which allows us to 640 // that make_code_young doesn't do any garbage collection which allows us to
641 // save/restore the registers without worrying about which of them contain 641 // save/restore the registers without worrying about which of them contain
642 // pointers. 642 // pointers.
643 __ Pushad(); 643 __ Pushad();
644 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); 644 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
645 __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); 645 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
646 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 646 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
647 { // NOLINT 647 { // NOLINT
648 FrameScope scope(masm, StackFrame::MANUAL); 648 FrameScope scope(masm, StackFrame::MANUAL);
649 __ PrepareCallCFunction(2); 649 __ PrepareCallCFunction(2);
650 __ CallCFunction( 650 __ CallCFunction(
651 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), 651 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
652 2); 652 2);
653 } 653 }
654 __ Popad(); 654 __ Popad();
655 655
656 // Perform prologue operations usually performed by the young code stub. 656 // Perform prologue operations usually performed by the young code stub.
657 __ PopReturnAddressTo(kScratchRegister); 657 __ PopReturnAddressTo(kScratchRegister);
658 __ push(rbp); // Caller's frame pointer. 658 __ push(rbp); // Caller's frame pointer.
659 __ movq(rbp, rsp); 659 __ movp(rbp, rsp);
660 __ push(rsi); // Callee's context. 660 __ push(rsi); // Callee's context.
661 __ push(rdi); // Callee's JS Function. 661 __ push(rdi); // Callee's JS Function.
662 __ PushReturnAddressFrom(kScratchRegister); 662 __ PushReturnAddressFrom(kScratchRegister);
663 663
664 // Jump to point after the code-age stub. 664 // Jump to point after the code-age stub.
665 __ ret(0); 665 __ ret(0);
666 } 666 }
667 667
668 668
669 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 669 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
717 // Get the full codegen state from the stack and untag it. 717 // Get the full codegen state from the stack and untag it.
718 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); 718 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
719 719
720 // Switch on the state. 720 // Switch on the state.
721 Label not_no_registers, not_tos_rax; 721 Label not_no_registers, not_tos_rax;
722 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS)); 722 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
723 __ j(not_equal, &not_no_registers, Label::kNear); 723 __ j(not_equal, &not_no_registers, Label::kNear);
724 __ ret(1 * kPointerSize); // Remove state. 724 __ ret(1 * kPointerSize); // Remove state.
725 725
726 __ bind(&not_no_registers); 726 __ bind(&not_no_registers);
727 __ movq(rax, Operand(rsp, kPCOnStackSize + kPointerSize)); 727 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
728 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG)); 728 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
729 __ j(not_equal, &not_tos_rax, Label::kNear); 729 __ j(not_equal, &not_tos_rax, Label::kNear);
730 __ ret(2 * kPointerSize); // Remove state, rax. 730 __ ret(2 * kPointerSize); // Remove state, rax.
731 731
732 __ bind(&not_tos_rax); 732 __ bind(&not_tos_rax);
733 __ Abort(kNoCasesLeft); 733 __ Abort(kNoCasesLeft);
734 } 734 }
735 735
736 736
737 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 737 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
(...skipping 30 matching lines...) Expand all
768 __ Push(masm->isolate()->factory()->undefined_value()); 768 __ Push(masm->isolate()->factory()->undefined_value());
769 __ PushReturnAddressFrom(rbx); 769 __ PushReturnAddressFrom(rbx);
770 __ incq(rax); 770 __ incq(rax);
771 __ bind(&done); 771 __ bind(&done);
772 } 772 }
773 773
774 // 2. Get the function to call (passed as receiver) from the stack, check 774 // 2. Get the function to call (passed as receiver) from the stack, check
775 // if it is a function. 775 // if it is a function.
776 Label slow, non_function; 776 Label slow, non_function;
777 StackArgumentsAccessor args(rsp, rax); 777 StackArgumentsAccessor args(rsp, rax);
778 __ movq(rdi, args.GetReceiverOperand()); 778 __ movp(rdi, args.GetReceiverOperand());
779 __ JumpIfSmi(rdi, &non_function); 779 __ JumpIfSmi(rdi, &non_function);
780 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 780 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
781 __ j(not_equal, &slow); 781 __ j(not_equal, &slow);
782 782
783 // 3a. Patch the first argument if necessary when calling a function. 783 // 3a. Patch the first argument if necessary when calling a function.
784 Label shift_arguments; 784 Label shift_arguments;
785 __ Set(rdx, 0); // indicate regular JS_FUNCTION 785 __ Set(rdx, 0); // indicate regular JS_FUNCTION
786 { Label convert_to_object, use_global_receiver, patch_receiver; 786 { Label convert_to_object, use_global_receiver, patch_receiver;
787 // Change context eagerly in case we need the global receiver. 787 // Change context eagerly in case we need the global receiver.
788 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 788 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
789 789
790 // Do not transform the receiver for strict mode functions. 790 // Do not transform the receiver for strict mode functions.
791 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 791 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
792 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), 792 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
793 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 793 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
794 __ j(not_equal, &shift_arguments); 794 __ j(not_equal, &shift_arguments);
795 795
796 // Do not transform the receiver for natives. 796 // Do not transform the receiver for natives.
797 // SharedFunctionInfo is already loaded into rbx. 797 // SharedFunctionInfo is already loaded into rbx.
798 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset), 798 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
799 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 799 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
800 __ j(not_zero, &shift_arguments); 800 __ j(not_zero, &shift_arguments);
801 801
802 // Compute the receiver in non-strict mode. 802 // Compute the receiver in non-strict mode.
803 __ movq(rbx, args.GetArgumentOperand(1)); 803 __ movp(rbx, args.GetArgumentOperand(1));
804 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear); 804 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
805 805
806 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 806 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
807 __ j(equal, &use_global_receiver); 807 __ j(equal, &use_global_receiver);
808 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 808 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
809 __ j(equal, &use_global_receiver); 809 __ j(equal, &use_global_receiver);
810 810
811 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 811 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
812 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 812 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
813 __ j(above_equal, &shift_arguments); 813 __ j(above_equal, &shift_arguments);
814 814
815 __ bind(&convert_to_object); 815 __ bind(&convert_to_object);
816 { 816 {
817 // Enter an internal frame in order to preserve argument count. 817 // Enter an internal frame in order to preserve argument count.
818 FrameScope scope(masm, StackFrame::INTERNAL); 818 FrameScope scope(masm, StackFrame::INTERNAL);
819 __ Integer32ToSmi(rax, rax); 819 __ Integer32ToSmi(rax, rax);
820 __ push(rax); 820 __ push(rax);
821 821
822 __ push(rbx); 822 __ push(rbx);
823 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 823 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
824 __ movq(rbx, rax); 824 __ movp(rbx, rax);
825 __ Set(rdx, 0); // indicate regular JS_FUNCTION 825 __ Set(rdx, 0); // indicate regular JS_FUNCTION
826 826
827 __ pop(rax); 827 __ pop(rax);
828 __ SmiToInteger32(rax, rax); 828 __ SmiToInteger32(rax, rax);
829 } 829 }
830 830
831 // Restore the function to rdi. 831 // Restore the function to rdi.
832 __ movq(rdi, args.GetReceiverOperand()); 832 __ movp(rdi, args.GetReceiverOperand());
833 __ jmp(&patch_receiver, Label::kNear); 833 __ jmp(&patch_receiver, Label::kNear);
834 834
835 __ bind(&use_global_receiver); 835 __ bind(&use_global_receiver);
836 __ movq(rbx, 836 __ movp(rbx,
837 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 837 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
838 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 838 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
839 839
840 __ bind(&patch_receiver); 840 __ bind(&patch_receiver);
841 __ movq(args.GetArgumentOperand(1), rbx); 841 __ movp(args.GetArgumentOperand(1), rbx);
842 842
843 __ jmp(&shift_arguments); 843 __ jmp(&shift_arguments);
844 } 844 }
845 845
846 // 3b. Check for function proxy. 846 // 3b. Check for function proxy.
847 __ bind(&slow); 847 __ bind(&slow);
848 __ Set(rdx, 1); // indicate function proxy 848 __ Set(rdx, 1); // indicate function proxy
849 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 849 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
850 __ j(equal, &shift_arguments); 850 __ j(equal, &shift_arguments);
851 __ bind(&non_function); 851 __ bind(&non_function);
852 __ Set(rdx, 2); // indicate non-function 852 __ Set(rdx, 2); // indicate non-function
853 853
854 // 3c. Patch the first argument when calling a non-function. The 854 // 3c. Patch the first argument when calling a non-function. The
855 // CALL_NON_FUNCTION builtin expects the non-function callee as 855 // CALL_NON_FUNCTION builtin expects the non-function callee as
856 // receiver, so overwrite the first argument which will ultimately 856 // receiver, so overwrite the first argument which will ultimately
857 // become the receiver. 857 // become the receiver.
858 __ movq(args.GetArgumentOperand(1), rdi); 858 __ movp(args.GetArgumentOperand(1), rdi);
859 859
860 // 4. Shift arguments and return address one slot down on the stack 860 // 4. Shift arguments and return address one slot down on the stack
861 // (overwriting the original receiver). Adjust argument count to make 861 // (overwriting the original receiver). Adjust argument count to make
862 // the original first argument the new receiver. 862 // the original first argument the new receiver.
863 __ bind(&shift_arguments); 863 __ bind(&shift_arguments);
864 { Label loop; 864 { Label loop;
865 __ movq(rcx, rax); 865 __ movp(rcx, rax);
866 __ bind(&loop); 866 __ bind(&loop);
867 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0)); 867 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
868 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); 868 __ movp(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
869 __ decq(rcx); 869 __ decq(rcx);
870 __ j(not_sign, &loop); // While non-negative (to copy return address). 870 __ j(not_sign, &loop); // While non-negative (to copy return address).
871 __ pop(rbx); // Discard copy of return address. 871 __ pop(rbx); // Discard copy of return address.
872 __ decq(rax); // One fewer argument (first argument is new receiver). 872 __ decq(rax); // One fewer argument (first argument is new receiver).
873 } 873 }
874 874
875 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, 875 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
876 // or a function proxy via CALL_FUNCTION_PROXY. 876 // or a function proxy via CALL_FUNCTION_PROXY.
877 { Label function, non_proxy; 877 { Label function, non_proxy;
878 __ testq(rdx, rdx); 878 __ testq(rdx, rdx);
(...skipping 13 matching lines...) Expand all
892 __ bind(&non_proxy); 892 __ bind(&non_proxy);
893 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 893 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
894 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 894 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
895 RelocInfo::CODE_TARGET); 895 RelocInfo::CODE_TARGET);
896 __ bind(&function); 896 __ bind(&function);
897 } 897 }
898 898
899 // 5b. Get the code to call from the function and check that the number of 899 // 5b. Get the code to call from the function and check that the number of
900 // expected arguments matches what we're providing. If so, jump 900 // expected arguments matches what we're providing. If so, jump
901 // (tail-call) to the code in register edx without checking arguments. 901 // (tail-call) to the code in register edx without checking arguments.
902 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 902 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
903 __ movsxlq(rbx, 903 __ movsxlq(rbx,
904 FieldOperand(rdx, 904 FieldOperand(rdx,
905 SharedFunctionInfo::kFormalParameterCountOffset)); 905 SharedFunctionInfo::kFormalParameterCountOffset));
906 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 906 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
907 __ cmpq(rax, rbx); 907 __ cmpq(rax, rbx);
908 __ j(not_equal, 908 __ j(not_equal,
909 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 909 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
910 RelocInfo::CODE_TARGET); 910 RelocInfo::CODE_TARGET);
911 911
912 ParameterCount expected(0); 912 ParameterCount expected(0);
913 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper()); 913 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
914 } 914 }
915 915
916 916
(...skipping 17 matching lines...) Expand all
934 934
935 __ push(Operand(rbp, kFunctionOffset)); 935 __ push(Operand(rbp, kFunctionOffset));
936 __ push(Operand(rbp, kArgumentsOffset)); 936 __ push(Operand(rbp, kArgumentsOffset));
937 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 937 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
938 938
939 // Check the stack for overflow. We are not trying to catch 939 // Check the stack for overflow. We are not trying to catch
940 // interruptions (e.g. debug break and preemption) here, so the "real stack 940 // interruptions (e.g. debug break and preemption) here, so the "real stack
941 // limit" is checked. 941 // limit" is checked.
942 Label okay; 942 Label okay;
943 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 943 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
944 __ movq(rcx, rsp); 944 __ movp(rcx, rsp);
945 // Make rcx the space we have left. The stack might already be overflowed 945 // Make rcx the space we have left. The stack might already be overflowed
946 // here which will cause rcx to become negative. 946 // here which will cause rcx to become negative.
947 __ subq(rcx, kScratchRegister); 947 __ subq(rcx, kScratchRegister);
948 // Make rdx the space we need for the array when it is unrolled onto the 948 // Make rdx the space we need for the array when it is unrolled onto the
949 // stack. 949 // stack.
950 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); 950 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
951 // Check if the arguments will overflow the stack. 951 // Check if the arguments will overflow the stack.
952 __ cmpq(rcx, rdx); 952 __ cmpq(rcx, rdx);
953 __ j(greater, &okay); // Signed comparison. 953 __ j(greater, &okay); // Signed comparison.
954 954
955 // Out of stack space. 955 // Out of stack space.
956 __ push(Operand(rbp, kFunctionOffset)); 956 __ push(Operand(rbp, kFunctionOffset));
957 __ push(rax); 957 __ push(rax);
958 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 958 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
959 __ bind(&okay); 959 __ bind(&okay);
960 // End of stack check. 960 // End of stack check.
961 961
962 // Push current index and limit. 962 // Push current index and limit.
963 const int kLimitOffset = 963 const int kLimitOffset =
964 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; 964 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
965 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; 965 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
966 __ push(rax); // limit 966 __ push(rax); // limit
967 __ push(Immediate(0)); // index 967 __ push(Immediate(0)); // index
968 968
969 // Get the receiver. 969 // Get the receiver.
970 __ movq(rbx, Operand(rbp, kReceiverOffset)); 970 __ movp(rbx, Operand(rbp, kReceiverOffset));
971 971
972 // Check that the function is a JS function (otherwise it must be a proxy). 972 // Check that the function is a JS function (otherwise it must be a proxy).
973 Label push_receiver; 973 Label push_receiver;
974 __ movq(rdi, Operand(rbp, kFunctionOffset)); 974 __ movp(rdi, Operand(rbp, kFunctionOffset));
975 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 975 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
976 __ j(not_equal, &push_receiver); 976 __ j(not_equal, &push_receiver);
977 977
978 // Change context eagerly to get the right global object if necessary. 978 // Change context eagerly to get the right global object if necessary.
979 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 979 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
980 980
981 // Do not transform the receiver for strict mode functions. 981 // Do not transform the receiver for strict mode functions.
982 Label call_to_object, use_global_receiver; 982 Label call_to_object, use_global_receiver;
983 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 983 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
984 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), 984 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
985 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 985 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
986 __ j(not_equal, &push_receiver); 986 __ j(not_equal, &push_receiver);
987 987
988 // Do not transform the receiver for natives. 988 // Do not transform the receiver for natives.
989 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), 989 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
990 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 990 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
991 __ j(not_equal, &push_receiver); 991 __ j(not_equal, &push_receiver);
992 992
993 // Compute the receiver in non-strict mode. 993 // Compute the receiver in non-strict mode.
994 __ JumpIfSmi(rbx, &call_to_object, Label::kNear); 994 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
995 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 995 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
996 __ j(equal, &use_global_receiver); 996 __ j(equal, &use_global_receiver);
997 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 997 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
998 __ j(equal, &use_global_receiver); 998 __ j(equal, &use_global_receiver);
999 999
1000 // If given receiver is already a JavaScript object then there's no 1000 // If given receiver is already a JavaScript object then there's no
1001 // reason for converting it. 1001 // reason for converting it.
1002 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1002 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1003 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 1003 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1004 __ j(above_equal, &push_receiver); 1004 __ j(above_equal, &push_receiver);
1005 1005
1006 // Convert the receiver to an object. 1006 // Convert the receiver to an object.
1007 __ bind(&call_to_object); 1007 __ bind(&call_to_object);
1008 __ push(rbx); 1008 __ push(rbx);
1009 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1009 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1010 __ movq(rbx, rax); 1010 __ movp(rbx, rax);
1011 __ jmp(&push_receiver, Label::kNear); 1011 __ jmp(&push_receiver, Label::kNear);
1012 1012
1013 __ bind(&use_global_receiver); 1013 __ bind(&use_global_receiver);
1014 __ movq(rbx, 1014 __ movp(rbx,
1015 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1015 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1016 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 1016 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
1017 1017
1018 // Push the receiver. 1018 // Push the receiver.
1019 __ bind(&push_receiver); 1019 __ bind(&push_receiver);
1020 __ push(rbx); 1020 __ push(rbx);
1021 1021
1022 // Copy all arguments from the array to the stack. 1022 // Copy all arguments from the array to the stack.
1023 Label entry, loop; 1023 Label entry, loop;
1024 __ movq(rax, Operand(rbp, kIndexOffset)); 1024 __ movp(rax, Operand(rbp, kIndexOffset));
1025 __ jmp(&entry); 1025 __ jmp(&entry);
1026 __ bind(&loop); 1026 __ bind(&loop);
1027 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments 1027 __ movp(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
1028 1028
1029 // Use inline caching to speed up access to arguments. 1029 // Use inline caching to speed up access to arguments.
1030 Handle<Code> ic = 1030 Handle<Code> ic =
1031 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); 1031 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1032 __ Call(ic, RelocInfo::CODE_TARGET); 1032 __ Call(ic, RelocInfo::CODE_TARGET);
1033 // It is important that we do not have a test instruction after the 1033 // It is important that we do not have a test instruction after the
1034 // call. A test instruction after the call is used to indicate that 1034 // call. A test instruction after the call is used to indicate that
1035 // we have generated an inline version of the keyed load. In this 1035 // we have generated an inline version of the keyed load. In this
1036 // case, we know that we are not generating a test instruction next. 1036 // case, we know that we are not generating a test instruction next.
1037 1037
1038 // Push the nth argument. 1038 // Push the nth argument.
1039 __ push(rax); 1039 __ push(rax);
1040 1040
1041 // Update the index on the stack and in register rax. 1041 // Update the index on the stack and in register rax.
1042 __ movq(rax, Operand(rbp, kIndexOffset)); 1042 __ movp(rax, Operand(rbp, kIndexOffset));
1043 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 1043 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
1044 __ movq(Operand(rbp, kIndexOffset), rax); 1044 __ movp(Operand(rbp, kIndexOffset), rax);
1045 1045
1046 __ bind(&entry); 1046 __ bind(&entry);
1047 __ cmpq(rax, Operand(rbp, kLimitOffset)); 1047 __ cmpq(rax, Operand(rbp, kLimitOffset));
1048 __ j(not_equal, &loop); 1048 __ j(not_equal, &loop);
1049 1049
1050 // Call the function. 1050 // Call the function.
1051 Label call_proxy; 1051 Label call_proxy;
1052 ParameterCount actual(rax); 1052 ParameterCount actual(rax);
1053 __ SmiToInteger32(rax, rax); 1053 __ SmiToInteger32(rax, rax);
1054 __ movq(rdi, Operand(rbp, kFunctionOffset)); 1054 __ movp(rdi, Operand(rbp, kFunctionOffset));
1055 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 1055 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1056 __ j(not_equal, &call_proxy); 1056 __ j(not_equal, &call_proxy);
1057 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper()); 1057 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1058 1058
1059 frame_scope.GenerateLeaveFrame(); 1059 frame_scope.GenerateLeaveFrame();
1060 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 1060 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1061 1061
1062 // Call the function proxy. 1062 // Call the function proxy.
1063 __ bind(&call_proxy); 1063 __ bind(&call_proxy);
1064 __ push(rdi); // add function proxy as last argument 1064 __ push(rdi); // add function proxy as last argument
(...skipping 15 matching lines...) Expand all
1080 // -- rsp[0] : return address 1080 // -- rsp[0] : return address
1081 // -- rsp[8] : last argument 1081 // -- rsp[8] : last argument
1082 // ----------------------------------- 1082 // -----------------------------------
1083 Label generic_array_code; 1083 Label generic_array_code;
1084 1084
1085 // Get the InternalArray function. 1085 // Get the InternalArray function.
1086 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); 1086 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1087 1087
1088 if (FLAG_debug_code) { 1088 if (FLAG_debug_code) {
1089 // Initial map for the builtin InternalArray functions should be maps. 1089 // Initial map for the builtin InternalArray functions should be maps.
1090 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1090 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1091 // Will both indicate a NULL and a Smi. 1091 // Will both indicate a NULL and a Smi.
1092 STATIC_ASSERT(kSmiTag == 0); 1092 STATIC_ASSERT(kSmiTag == 0);
1093 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1093 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1094 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); 1094 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1095 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1095 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1096 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); 1096 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1097 } 1097 }
1098 1098
1099 // Run the native code for the InternalArray function called as a normal 1099 // Run the native code for the InternalArray function called as a normal
1100 // function. 1100 // function.
1101 // tail call a stub 1101 // tail call a stub
1102 InternalArrayConstructorStub stub(masm->isolate()); 1102 InternalArrayConstructorStub stub(masm->isolate());
1103 __ TailCallStub(&stub); 1103 __ TailCallStub(&stub);
1104 } 1104 }
1105 1105
1106 1106
1107 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 1107 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1108 // ----------- S t a t e ------------- 1108 // ----------- S t a t e -------------
1109 // -- rax : argc 1109 // -- rax : argc
1110 // -- rsp[0] : return address 1110 // -- rsp[0] : return address
1111 // -- rsp[8] : last argument 1111 // -- rsp[8] : last argument
1112 // ----------------------------------- 1112 // -----------------------------------
1113 Label generic_array_code; 1113 Label generic_array_code;
1114 1114
1115 // Get the Array function. 1115 // Get the Array function.
1116 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi); 1116 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1117 1117
1118 if (FLAG_debug_code) { 1118 if (FLAG_debug_code) {
1119 // Initial map for the builtin Array functions should be maps. 1119 // Initial map for the builtin Array functions should be maps.
1120 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1120 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1121 // Will both indicate a NULL and a Smi. 1121 // Will both indicate a NULL and a Smi.
1122 STATIC_ASSERT(kSmiTag == 0); 1122 STATIC_ASSERT(kSmiTag == 0);
1123 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1123 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1124 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 1124 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1125 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1125 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1126 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 1126 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1127 } 1127 }
1128 1128
1129 // Run the native code for the Array function called as a normal function. 1129 // Run the native code for the Array function called as a normal function.
1130 // tail call a stub 1130 // tail call a stub
(...skipping 22 matching lines...) Expand all
1153 __ cmpq(rdi, rcx); 1153 __ cmpq(rdi, rcx);
1154 __ Assert(equal, kUnexpectedStringFunction); 1154 __ Assert(equal, kUnexpectedStringFunction);
1155 } 1155 }
1156 1156
1157 // Load the first argument into rax and get rid of the rest 1157 // Load the first argument into rax and get rid of the rest
1158 // (including the receiver). 1158 // (including the receiver).
1159 StackArgumentsAccessor args(rsp, rax); 1159 StackArgumentsAccessor args(rsp, rax);
1160 Label no_arguments; 1160 Label no_arguments;
1161 __ testq(rax, rax); 1161 __ testq(rax, rax);
1162 __ j(zero, &no_arguments); 1162 __ j(zero, &no_arguments);
1163 __ movq(rbx, args.GetArgumentOperand(1)); 1163 __ movp(rbx, args.GetArgumentOperand(1));
1164 __ PopReturnAddressTo(rcx); 1164 __ PopReturnAddressTo(rcx);
1165 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1165 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1166 __ PushReturnAddressFrom(rcx); 1166 __ PushReturnAddressFrom(rcx);
1167 __ movq(rax, rbx); 1167 __ movp(rax, rbx);
1168 1168
1169 // Lookup the argument in the number to string cache. 1169 // Lookup the argument in the number to string cache.
1170 Label not_cached, argument_is_string; 1170 Label not_cached, argument_is_string;
1171 __ LookupNumberStringCache(rax, // Input. 1171 __ LookupNumberStringCache(rax, // Input.
1172 rbx, // Result. 1172 rbx, // Result.
1173 rcx, // Scratch 1. 1173 rcx, // Scratch 1.
1174 rdx, // Scratch 2. 1174 rdx, // Scratch 2.
1175 &not_cached); 1175 &not_cached);
1176 __ IncrementCounter(counters->string_ctor_cached_number(), 1); 1176 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1177 __ bind(&argument_is_string); 1177 __ bind(&argument_is_string);
(...skipping 15 matching lines...) Expand all
1193 1193
1194 // Set the map. 1194 // Set the map.
1195 __ LoadGlobalFunctionInitialMap(rdi, rcx); 1195 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1196 if (FLAG_debug_code) { 1196 if (FLAG_debug_code) {
1197 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset), 1197 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1198 Immediate(JSValue::kSize >> kPointerSizeLog2)); 1198 Immediate(JSValue::kSize >> kPointerSizeLog2));
1199 __ Assert(equal, kUnexpectedStringWrapperInstanceSize); 1199 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1200 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0)); 1200 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1201 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper); 1201 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1202 } 1202 }
1203 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx); 1203 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1204 1204
1205 // Set properties and elements. 1205 // Set properties and elements.
1206 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 1206 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1207 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx); 1207 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1208 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 1208 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1209 1209
1210 // Set the value. 1210 // Set the value.
1211 __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx); 1211 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1212 1212
1213 // Ensure the object is fully initialized. 1213 // Ensure the object is fully initialized.
1214 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); 1214 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1215 1215
1216 // We're done. Return. 1216 // We're done. Return.
1217 __ ret(0); 1217 __ ret(0);
1218 1218
1219 // The argument was not found in the number to string cache. Check 1219 // The argument was not found in the number to string cache. Check
1220 // if it's a string already before calling the conversion builtin. 1220 // if it's a string already before calling the conversion builtin.
1221 Label convert_argument; 1221 Label convert_argument;
1222 __ bind(&not_cached); 1222 __ bind(&not_cached);
1223 STATIC_ASSERT(kSmiTag == 0); 1223 STATIC_ASSERT(kSmiTag == 0);
1224 __ JumpIfSmi(rax, &convert_argument); 1224 __ JumpIfSmi(rax, &convert_argument);
1225 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx); 1225 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1226 __ j(NegateCondition(is_string), &convert_argument); 1226 __ j(NegateCondition(is_string), &convert_argument);
1227 __ movq(rbx, rax); 1227 __ movp(rbx, rax);
1228 __ IncrementCounter(counters->string_ctor_string_value(), 1); 1228 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1229 __ jmp(&argument_is_string); 1229 __ jmp(&argument_is_string);
1230 1230
1231 // Invoke the conversion builtin and put the result into rbx. 1231 // Invoke the conversion builtin and put the result into rbx.
1232 __ bind(&convert_argument); 1232 __ bind(&convert_argument);
1233 __ IncrementCounter(counters->string_ctor_conversions(), 1); 1233 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1234 { 1234 {
1235 FrameScope scope(masm, StackFrame::INTERNAL); 1235 FrameScope scope(masm, StackFrame::INTERNAL);
1236 __ push(rdi); // Preserve the function. 1236 __ push(rdi); // Preserve the function.
1237 __ push(rax); 1237 __ push(rax);
1238 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 1238 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1239 __ pop(rdi); 1239 __ pop(rdi);
1240 } 1240 }
1241 __ movq(rbx, rax); 1241 __ movp(rbx, rax);
1242 __ jmp(&argument_is_string); 1242 __ jmp(&argument_is_string);
1243 1243
1244 // Load the empty string into rbx, remove the receiver from the 1244 // Load the empty string into rbx, remove the receiver from the
1245 // stack, and jump back to the case where the argument is a string. 1245 // stack, and jump back to the case where the argument is a string.
1246 __ bind(&no_arguments); 1246 __ bind(&no_arguments);
1247 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); 1247 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1248 __ PopReturnAddressTo(rcx); 1248 __ PopReturnAddressTo(rcx);
1249 __ lea(rsp, Operand(rsp, kPointerSize)); 1249 __ lea(rsp, Operand(rsp, kPointerSize));
1250 __ PushReturnAddressFrom(rcx); 1250 __ PushReturnAddressFrom(rcx);
1251 __ jmp(&argument_is_string); 1251 __ jmp(&argument_is_string);
1252 1252
1253 // At this point the argument is already a string. Call runtime to 1253 // At this point the argument is already a string. Call runtime to
1254 // create a string wrapper. 1254 // create a string wrapper.
1255 __ bind(&gc_required); 1255 __ bind(&gc_required);
1256 __ IncrementCounter(counters->string_ctor_gc_required(), 1); 1256 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1257 { 1257 {
1258 FrameScope scope(masm, StackFrame::INTERNAL); 1258 FrameScope scope(masm, StackFrame::INTERNAL);
1259 __ push(rbx); 1259 __ push(rbx);
1260 __ CallRuntime(Runtime::kNewStringWrapper, 1); 1260 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1261 } 1261 }
1262 __ ret(0); 1262 __ ret(0);
1263 } 1263 }
1264 1264
1265 1265
1266 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1266 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1267 __ push(rbp); 1267 __ push(rbp);
1268 __ movq(rbp, rsp); 1268 __ movp(rbp, rsp);
1269 1269
1270 // Store the arguments adaptor context sentinel. 1270 // Store the arguments adaptor context sentinel.
1271 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1271 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1272 1272
1273 // Push the function on the stack. 1273 // Push the function on the stack.
1274 __ push(rdi); 1274 __ push(rdi);
1275 1275
1276 // Preserve the number of arguments on the stack. Must preserve rax, 1276 // Preserve the number of arguments on the stack. Must preserve rax,
1277 // rbx and rcx because these registers are used when copying the 1277 // rbx and rcx because these registers are used when copying the
1278 // arguments and the receiver. 1278 // arguments and the receiver.
1279 __ Integer32ToSmi(r8, rax); 1279 __ Integer32ToSmi(r8, rax);
1280 __ push(r8); 1280 __ push(r8);
1281 } 1281 }
1282 1282
1283 1283
1284 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1284 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1285 // Retrieve the number of arguments from the stack. Number is a Smi. 1285 // Retrieve the number of arguments from the stack. Number is a Smi.
1286 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1286 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1287 1287
1288 // Leave the frame. 1288 // Leave the frame.
1289 __ movq(rsp, rbp); 1289 __ movp(rsp, rbp);
1290 __ pop(rbp); 1290 __ pop(rbp);
1291 1291
1292 // Remove caller arguments from the stack. 1292 // Remove caller arguments from the stack.
1293 __ PopReturnAddressTo(rcx); 1293 __ PopReturnAddressTo(rcx);
1294 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 1294 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1295 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 1295 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1296 __ PushReturnAddressFrom(rcx); 1296 __ PushReturnAddressFrom(rcx);
1297 } 1297 }
1298 1298
1299 1299
1300 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1300 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1301 // ----------- S t a t e ------------- 1301 // ----------- S t a t e -------------
1302 // -- rax : actual number of arguments 1302 // -- rax : actual number of arguments
1303 // -- rbx : expected number of arguments 1303 // -- rbx : expected number of arguments
1304 // -- rdi: function (passed through to callee) 1304 // -- rdi: function (passed through to callee)
1305 // ----------------------------------- 1305 // -----------------------------------
1306 1306
1307 Label invoke, dont_adapt_arguments; 1307 Label invoke, dont_adapt_arguments;
1308 Counters* counters = masm->isolate()->counters(); 1308 Counters* counters = masm->isolate()->counters();
1309 __ IncrementCounter(counters->arguments_adaptors(), 1); 1309 __ IncrementCounter(counters->arguments_adaptors(), 1);
1310 1310
1311 Label enough, too_few; 1311 Label enough, too_few;
1312 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 1312 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1313 __ cmpq(rax, rbx); 1313 __ cmpq(rax, rbx);
1314 __ j(less, &too_few); 1314 __ j(less, &too_few);
1315 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); 1315 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1316 __ j(equal, &dont_adapt_arguments); 1316 __ j(equal, &dont_adapt_arguments);
1317 1317
1318 { // Enough parameters: Actual >= expected. 1318 { // Enough parameters: Actual >= expected.
1319 __ bind(&enough); 1319 __ bind(&enough);
1320 EnterArgumentsAdaptorFrame(masm); 1320 EnterArgumentsAdaptorFrame(masm);
1321 1321
1322 // Copy receiver and all expected arguments. 1322 // Copy receiver and all expected arguments.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1354 // Fill remaining expected arguments with undefined values. 1354 // Fill remaining expected arguments with undefined values.
1355 Label fill; 1355 Label fill;
1356 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 1356 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1357 __ bind(&fill); 1357 __ bind(&fill);
1358 __ incq(r8); 1358 __ incq(r8);
1359 __ push(kScratchRegister); 1359 __ push(kScratchRegister);
1360 __ cmpq(r8, rbx); 1360 __ cmpq(r8, rbx);
1361 __ j(less, &fill); 1361 __ j(less, &fill);
1362 1362
1363 // Restore function pointer. 1363 // Restore function pointer.
1364 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1364 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1365 } 1365 }
1366 1366
1367 // Call the entry point. 1367 // Call the entry point.
1368 __ bind(&invoke); 1368 __ bind(&invoke);
1369 __ call(rdx); 1369 __ call(rdx);
1370 1370
1371 // Store offset of return address for deoptimizer. 1371 // Store offset of return address for deoptimizer.
1372 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 1372 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1373 1373
1374 // Leave frame and return. 1374 // Leave frame and return.
1375 LeaveArgumentsAdaptorFrame(masm); 1375 LeaveArgumentsAdaptorFrame(masm);
1376 __ ret(0); 1376 __ ret(0);
1377 1377
1378 // ------------------------------------------- 1378 // -------------------------------------------
1379 // Dont adapt arguments. 1379 // Dont adapt arguments.
1380 // ------------------------------------------- 1380 // -------------------------------------------
1381 __ bind(&dont_adapt_arguments); 1381 __ bind(&dont_adapt_arguments);
1382 __ jmp(rdx); 1382 __ jmp(rdx);
1383 } 1383 }
1384 1384
1385 1385
1386 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1386 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1387 // Lookup the function in the JavaScript frame. 1387 // Lookup the function in the JavaScript frame.
1388 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1388 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1389 { 1389 {
1390 FrameScope scope(masm, StackFrame::INTERNAL); 1390 FrameScope scope(masm, StackFrame::INTERNAL);
1391 // Pass function as argument. 1391 // Pass function as argument.
1392 __ push(rax); 1392 __ push(rax);
1393 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1393 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1394 } 1394 }
1395 1395
1396 Label skip; 1396 Label skip;
1397 // If the code object is null, just return to the unoptimized code. 1397 // If the code object is null, just return to the unoptimized code.
1398 __ cmpq(rax, Immediate(0)); 1398 __ cmpq(rax, Immediate(0));
1399 __ j(not_equal, &skip, Label::kNear); 1399 __ j(not_equal, &skip, Label::kNear);
1400 __ ret(0); 1400 __ ret(0);
1401 1401
1402 __ bind(&skip); 1402 __ bind(&skip);
1403 1403
1404 // Load deoptimization data from the code object. 1404 // Load deoptimization data from the code object.
1405 __ movq(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 1405 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1406 1406
1407 // Load the OSR entrypoint offset from the deoptimization data. 1407 // Load the OSR entrypoint offset from the deoptimization data.
1408 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt( 1408 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1409 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); 1409 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1410 1410
1411 // Compute the target address = code_obj + header_size + osr_offset 1411 // Compute the target address = code_obj + header_size + osr_offset
1412 __ lea(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag)); 1412 __ lea(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1413 1413
1414 // Overwrite the return address on the stack. 1414 // Overwrite the return address on the stack.
1415 __ movq(Operand(rsp, 0), rax); 1415 __ movq(Operand(rsp, 0), rax);
(...skipping 18 matching lines...) Expand all
1434 __ bind(&ok); 1434 __ bind(&ok);
1435 __ ret(0); 1435 __ ret(0);
1436 } 1436 }
1437 1437
1438 1438
1439 #undef __ 1439 #undef __
1440 1440
1441 } } // namespace v8::internal 1441 } } // namespace v8::internal
1442 1442
1443 #endif // V8_TARGET_ARCH_X64 1443 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « no previous file | src/x64/code-stubs-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698