Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(176)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 19857006: Introduce StackOperandForArgument for X64 to access stack argument (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: One more place Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
310 // Create a new closure from the given function info in new 310 // Create a new closure from the given function info in new
311 // space. Set the context to the current context in rsi. 311 // space. Set the context to the current context in rsi.
312 Counters* counters = masm->isolate()->counters(); 312 Counters* counters = masm->isolate()->counters();
313 313
314 Label gc; 314 Label gc;
315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); 315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
316 316
317 __ IncrementCounter(counters->fast_new_closure_total(), 1); 317 __ IncrementCounter(counters->fast_new_closure_total(), 1);
318 318
319 // Get the function info from the stack. 319 // Get the function info from the stack.
320 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 320 __ movq(rdx, Operand(rsp, kPCOnStackSize));
321 321
322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); 322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_);
323 323
324 // Compute the function map in the current native context and set that 324 // Compute the function map in the current native context and set that
325 // as the map of the allocated object. 325 // as the map of the allocated object.
326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); 328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); 329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
330 330
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
418 rcx, 418 rcx,
419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), 419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
420 rdx, 420 rdx,
421 rbx, 421 rbx,
422 kDontSaveFPRegs); 422 kDontSaveFPRegs);
423 423
424 // Return and remove the on-stack parameter. 424 // Return and remove the on-stack parameter.
425 __ ret(1 * kPointerSize); 425 __ ret(1 * kPointerSize);
426 426
427 __ bind(&restore); 427 __ bind(&restore);
428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 428 __ movq(rdx, Operand(rsp, kPCOnStackSize));
429 __ jmp(&install_unoptimized); 429 __ jmp(&install_unoptimized);
430 430
431 // Create a new closure through the slower runtime call. 431 // Create a new closure through the slower runtime call.
432 __ bind(&gc); 432 __ bind(&gc);
433 __ pop(rcx); // Temporarily remove return address. 433 __ pop(rcx); // Temporarily remove return address.
434 __ pop(rdx); 434 __ pop(rdx);
435 __ push(rsi); 435 __ push(rsi);
436 __ push(rdx); 436 __ push(rdx);
437 __ PushRoot(Heap::kFalseValueRootIndex); 437 __ PushRoot(Heap::kFalseValueRootIndex);
438 __ push(rcx); // Restore return address. 438 __ push(rcx); // Restore return address.
439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
440 } 440 }
441 441
442 442
443 void FastNewContextStub::Generate(MacroAssembler* masm) { 443 void FastNewContextStub::Generate(MacroAssembler* masm) {
444 // Try to allocate the context in new space. 444 // Try to allocate the context in new space.
445 Label gc; 445 Label gc;
446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
448 rax, rbx, rcx, &gc, TAG_OBJECT); 448 rax, rbx, rcx, &gc, TAG_OBJECT);
449 449
450 // Get the function from the stack. 450 // Get the function from the stack.
451 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 451 __ movq(rcx, Operand(rsp, kPCOnStackSize));
452 452
453 // Set up the object header. 453 // Set up the object header.
454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); 454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
457 457
458 // Set up the fixed slots. 458 // Set up the fixed slots.
459 __ Set(rbx, 0); // Set to NULL. 459 __ Set(rbx, 0); // Set to NULL.
460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); 460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); 461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
(...skipping 15 matching lines...) Expand all
477 477
478 // Need to collect. Call into runtime system. 478 // Need to collect. Call into runtime system.
479 __ bind(&gc); 479 __ bind(&gc);
480 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); 480 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
481 } 481 }
482 482
483 483
484 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { 484 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
485 // Stack layout on entry: 485 // Stack layout on entry:
486 // 486 //
487 // [rsp + (1 * kPointerSize)] : function 487 // [rsp + kPCOnStackSize] : function
488 // [rsp + (2 * kPointerSize)] : serialized scope info 488 // [rsp + kPCOnStackSize + 1 * kPointerSize)] : serialized scope info
489 489
490 // Try to allocate the context in new space. 490 // Try to allocate the context in new space.
491 Label gc; 491 Label gc;
492 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 492 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
493 __ Allocate(FixedArray::SizeFor(length), 493 __ Allocate(FixedArray::SizeFor(length),
494 rax, rbx, rcx, &gc, TAG_OBJECT); 494 rax, rbx, rcx, &gc, TAG_OBJECT);
495 495
496 // Get the function from the stack. 496 // Get the function from the stack.
497 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 497 __ movq(rcx, Operand(rsp, kPCOnStackSize));
498 498
499 // Get the serialized scope info from the stack. 499 // Get the serialized scope info from the stack.
500 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 500 __ movq(rbx, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
501 501
502 // Set up the object header. 502 // Set up the object header.
503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); 503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
506 506
507 // If this block context is nested in the native context we get a smi 507 // If this block context is nested in the native context we get a smi
508 // sentinel instead of a function. The block context should get the 508 // sentinel instead of a function. The block context should get the
509 // canonical empty function of the native context as its closure which 509 // canonical empty function of the native context as its closure which
510 // we still have to look up. 510 // we still have to look up.
(...skipping 758 matching lines...) Expand 10 before | Expand all | Expand 10 after
1269 // Output: 1269 // Output:
1270 // xmm1 : untagged double result. 1270 // xmm1 : untagged double result.
1271 1271
1272 Label runtime_call; 1272 Label runtime_call;
1273 Label runtime_call_clear_stack; 1273 Label runtime_call_clear_stack;
1274 Label skip_cache; 1274 Label skip_cache;
1275 const bool tagged = (argument_type_ == TAGGED); 1275 const bool tagged = (argument_type_ == TAGGED);
1276 if (tagged) { 1276 if (tagged) {
1277 Label input_not_smi, loaded; 1277 Label input_not_smi, loaded;
1278 // Test that rax is a number. 1278 // Test that rax is a number.
1279 __ movq(rax, Operand(rsp, kPointerSize)); 1279 __ movq(rax, Operand(rsp, kPCOnStackSize));
1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); 1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
1281 // Input is a smi. Untag and load it onto the FPU stack. 1281 // Input is a smi. Untag and load it onto the FPU stack.
1282 // Then load the bits of the double into rbx. 1282 // Then load the bits of the double into rbx.
1283 __ SmiToInteger32(rax, rax); 1283 __ SmiToInteger32(rax, rax);
1284 __ subq(rsp, Immediate(kDoubleSize)); 1284 __ subq(rsp, Immediate(kDoubleSize));
1285 __ cvtlsi2sd(xmm1, rax); 1285 __ cvtlsi2sd(xmm1, rax);
1286 __ movsd(Operand(rsp, 0), xmm1); 1286 __ movsd(Operand(rsp, 0), xmm1);
1287 __ movq(rbx, xmm1); 1287 __ movq(rbx, xmm1);
1288 __ movq(rdx, xmm1); 1288 __ movq(rdx, xmm1);
1289 __ fld_d(Operand(rsp, 0)); 1289 __ fld_d(Operand(rsp, 0));
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after
1802 1802
1803 // Save 1 in double_result - we need this several times later on. 1803 // Save 1 in double_result - we need this several times later on.
1804 __ movq(scratch, Immediate(1)); 1804 __ movq(scratch, Immediate(1));
1805 __ cvtlsi2sd(double_result, scratch); 1805 __ cvtlsi2sd(double_result, scratch);
1806 1806
1807 if (exponent_type_ == ON_STACK) { 1807 if (exponent_type_ == ON_STACK) {
1808 Label base_is_smi, unpack_exponent; 1808 Label base_is_smi, unpack_exponent;
1809 // The exponent and base are supplied as arguments on the stack. 1809 // The exponent and base are supplied as arguments on the stack.
1810 // This can only happen if the stub is called from non-optimized code. 1810 // This can only happen if the stub is called from non-optimized code.
1811 // Load input parameters from stack. 1811 // Load input parameters from stack.
1812 __ movq(base, Operand(rsp, 2 * kPointerSize)); 1812 __ movq(base, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
1813 __ movq(exponent, Operand(rsp, 1 * kPointerSize)); 1813 __ movq(exponent, Operand(rsp, kPCOnStackSize));
1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), 1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
1816 Heap::kHeapNumberMapRootIndex); 1816 Heap::kHeapNumberMapRootIndex);
1817 __ j(not_equal, &call_runtime); 1817 __ j(not_equal, &call_runtime);
1818 1818
1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
1820 __ jmp(&unpack_exponent, Label::kNear); 1820 __ jmp(&unpack_exponent, Label::kNear);
1821 1821
1822 __ bind(&base_is_smi); 1822 __ bind(&base_is_smi);
1823 __ SmiToInteger32(base, base); 1823 __ SmiToInteger32(base, base);
(...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after
2236 // rsp[0] : return address 2236 // rsp[0] : return address
2237 // rsp[8] : number of parameters (tagged) 2237 // rsp[8] : number of parameters (tagged)
2238 // rsp[16] : receiver displacement 2238 // rsp[16] : receiver displacement
2239 // rsp[24] : function 2239 // rsp[24] : function
2240 // Registers used over the whole function: 2240 // Registers used over the whole function:
2241 // rbx: the mapped parameter count (untagged) 2241 // rbx: the mapped parameter count (untagged)
2242 // rax: the allocated object (tagged). 2242 // rax: the allocated object (tagged).
2243 2243
2244 Factory* factory = masm->isolate()->factory(); 2244 Factory* factory = masm->isolate()->factory();
2245 2245
2246 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); 2246 __ SmiToInteger64(rbx, Operand(rsp, kPCOnStackSize));
2247 // rbx = parameter count (untagged) 2247 // rbx = parameter count (untagged)
2248 2248
2249 // Check if the calling frame is an arguments adaptor frame. 2249 // Check if the calling frame is an arguments adaptor frame.
2250 Label runtime; 2250 Label runtime;
2251 Label adaptor_frame, try_allocate; 2251 Label adaptor_frame, try_allocate;
2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2255 __ j(equal, &adaptor_frame); 2255 __ j(equal, &adaptor_frame);
2256 2256
2257 // No adaptor, parameter count = argument count. 2257 // No adaptor, parameter count = argument count.
2258 __ movq(rcx, rbx); 2258 __ movq(rcx, rbx);
2259 __ jmp(&try_allocate, Label::kNear); 2259 __ jmp(&try_allocate, Label::kNear);
2260 2260
2261 // We have an adaptor frame. Patch the parameters pointer. 2261 // We have an adaptor frame. Patch the parameters pointer.
2262 __ bind(&adaptor_frame); 2262 __ bind(&adaptor_frame);
2263 __ SmiToInteger64(rcx, 2263 __ SmiToInteger64(rcx,
2264 Operand(rdx, 2264 Operand(rdx,
2265 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2265 ArgumentsAdaptorFrameConstants::kLengthOffset));
2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2267 StandardFrameConstants::kCallerSPOffset)); 2267 StandardFrameConstants::kCallerSPOffset));
2268 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2268 __ movq(Operand(rsp, kPCOnStackSize + 1 * kPointerSize), rdx);
2269 2269
2270 // rbx = parameter count (untagged) 2270 // rbx = parameter count (untagged)
2271 // rcx = argument count (untagged) 2271 // rcx = argument count (untagged)
2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx. 2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
2273 __ cmpq(rbx, rcx); 2273 __ cmpq(rbx, rcx);
2274 __ j(less_equal, &try_allocate, Label::kNear); 2274 __ j(less_equal, &try_allocate, Label::kNear);
2275 __ movq(rbx, rcx); 2275 __ movq(rbx, rcx);
2276 2276
2277 __ bind(&try_allocate); 2277 __ bind(&try_allocate);
2278 2278
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2319 // rcx = argument count (untagged) 2319 // rcx = argument count (untagged)
2320 // rdi = address of boilerplate object (tagged) 2320 // rdi = address of boilerplate object (tagged)
2321 // Copy the JS object part. 2321 // Copy the JS object part.
2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2323 __ movq(rdx, FieldOperand(rdi, i)); 2323 __ movq(rdx, FieldOperand(rdi, i));
2324 __ movq(FieldOperand(rax, i), rdx); 2324 __ movq(FieldOperand(rax, i), rdx);
2325 } 2325 }
2326 2326
2327 // Set up the callee in-object property. 2327 // Set up the callee in-object property.
2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); 2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2329 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); 2329 __ movq(rdx, Operand(rsp, kPCOnStackSize + 2 * kPointerSize));
2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2331 Heap::kArgumentsCalleeIndex * kPointerSize), 2331 Heap::kArgumentsCalleeIndex * kPointerSize),
2332 rdx); 2332 rdx);
2333 2333
2334 // Use the length (smi tagged) and set that as an in-object property too. 2334 // Use the length (smi tagged) and set that as an in-object property too.
2335 // Note: rcx is tagged from here on. 2335 // Note: rcx is tagged from here on.
2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2337 __ Integer32ToSmi(rcx, rcx); 2337 __ Integer32ToSmi(rcx, rcx);
2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2339 Heap::kArgumentsLengthIndex * kPointerSize), 2339 Heap::kArgumentsLengthIndex * kPointerSize),
(...skipping 30 matching lines...) Expand all
2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
2371 // The mapped parameter thus need to get indices 2371 // The mapped parameter thus need to get indices
2372 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 2372 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
2374 // We loop from right to left. 2374 // We loop from right to left.
2375 Label parameters_loop, parameters_test; 2375 Label parameters_loop, parameters_test;
2376 2376
2377 // Load tagged parameter count into r9. 2377 // Load tagged parameter count into r9.
2378 __ Integer32ToSmi(r9, rbx); 2378 __ Integer32ToSmi(r9, rbx);
2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); 2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
2380 __ addq(r8, Operand(rsp, 1 * kPointerSize)); 2380 __ addq(r8, Operand(rsp, kPCOnStackSize));
2381 __ subq(r8, r9); 2381 __ subq(r8, r9);
2382 __ Move(r11, factory->the_hole_value()); 2382 __ Move(r11, factory->the_hole_value());
2383 __ movq(rdx, rdi); 2383 __ movq(rdx, rdi);
2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
2385 // r9 = loop variable (tagged) 2385 // r9 = loop variable (tagged)
2386 // r8 = mapping index (tagged) 2386 // r8 = mapping index (tagged)
2387 // r11 = the hole value 2387 // r11 = the hole value
2388 // rdx = address of parameter map (tagged) 2388 // rdx = address of parameter map (tagged)
2389 // rdi = address of backing store (tagged) 2389 // rdi = address of backing store (tagged)
2390 __ jmp(&parameters_test, Label::kNear); 2390 __ jmp(&parameters_test, Label::kNear);
(...skipping 18 matching lines...) Expand all
2409 2409
2410 // rcx = argument count (tagged) 2410 // rcx = argument count (tagged)
2411 // rdi = address of backing store (tagged) 2411 // rdi = address of backing store (tagged)
2412 // Copy arguments header and remaining slots (if there are any). 2412 // Copy arguments header and remaining slots (if there are any).
2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), 2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
2414 factory->fixed_array_map()); 2414 factory->fixed_array_map());
2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2416 2416
2417 Label arguments_loop, arguments_test; 2417 Label arguments_loop, arguments_test;
2418 __ movq(r8, rbx); 2418 __ movq(r8, rbx);
2419 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2419 __ movq(rdx, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
2420 // Untag rcx for the loop below. 2420 // Untag rcx for the loop below.
2421 __ SmiToInteger64(rcx, rcx); 2421 __ SmiToInteger64(rcx, rcx);
2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); 2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
2423 __ subq(rdx, kScratchRegister); 2423 __ subq(rdx, kScratchRegister);
2424 __ jmp(&arguments_test, Label::kNear); 2424 __ jmp(&arguments_test, Label::kNear);
2425 2425
2426 __ bind(&arguments_loop); 2426 __ bind(&arguments_loop);
2427 __ subq(rdx, Immediate(kPointerSize)); 2427 __ subq(rdx, Immediate(kPointerSize));
2428 __ movq(r9, Operand(rdx, 0)); 2428 __ movq(r9, Operand(rdx, 0));
2429 __ movq(FieldOperand(rdi, r8, 2429 __ movq(FieldOperand(rdi, r8,
2430 times_pointer_size, 2430 times_pointer_size,
2431 FixedArray::kHeaderSize), 2431 FixedArray::kHeaderSize),
2432 r9); 2432 r9);
2433 __ addq(r8, Immediate(1)); 2433 __ addq(r8, Immediate(1));
2434 2434
2435 __ bind(&arguments_test); 2435 __ bind(&arguments_test);
2436 __ cmpq(r8, rcx); 2436 __ cmpq(r8, rcx);
2437 __ j(less, &arguments_loop, Label::kNear); 2437 __ j(less, &arguments_loop, Label::kNear);
2438 2438
2439 // Return and remove the on-stack parameters. 2439 // Return and remove the on-stack parameters.
2440 __ ret(3 * kPointerSize); 2440 __ ret(3 * kPointerSize);
2441 2441
2442 // Do the runtime call to allocate the arguments object. 2442 // Do the runtime call to allocate the arguments object.
2443 // rcx = argument count (untagged) 2443 // rcx = argument count (untagged)
2444 __ bind(&runtime); 2444 __ bind(&runtime);
2445 __ Integer32ToSmi(rcx, rcx); 2445 __ Integer32ToSmi(rcx, rcx);
2446 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. 2446 __ movq(Operand(rsp, kPCOnStackSize), rcx); // Patch argument count.
2447 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2447 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2448 } 2448 }
2449 2449
2450 2450
2451 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { 2451 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2452 // rsp[0] : return address 2452 // rsp[0] : return address
2453 // rsp[8] : number of parameters 2453 // rsp[8] : number of parameters
2454 // rsp[16] : receiver displacement 2454 // rsp[16] : receiver displacement
2455 // rsp[24] : function 2455 // rsp[24] : function
2456 2456
2457 // Check if the calling frame is an arguments adaptor frame. 2457 // Check if the calling frame is an arguments adaptor frame.
2458 Label runtime; 2458 Label runtime;
2459 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2459 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2460 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2460 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2461 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2461 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2462 __ j(not_equal, &runtime); 2462 __ j(not_equal, &runtime);
2463 2463
2464 // Patch the arguments.length and the parameters pointer. 2464 // Patch the arguments.length and the parameters pointer.
2465 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2465 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2466 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2466 __ movq(Operand(rsp, kPCOnStackSize), rcx);
2467 __ SmiToInteger64(rcx, rcx); 2467 __ SmiToInteger64(rcx, rcx);
2468 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2468 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2469 StandardFrameConstants::kCallerSPOffset)); 2469 StandardFrameConstants::kCallerSPOffset));
2470 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2470 __ movq(Operand(rsp, kPCOnStackSize + 1 * kPointerSize), rdx);
2471 2471
2472 __ bind(&runtime); 2472 __ bind(&runtime);
2473 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2473 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2474 } 2474 }
2475 2475
2476 2476
2477 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 2477 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2478 // rsp[0] : return address 2478 // rsp[0] : return address
2479 // rsp[8] : number of parameters 2479 // rsp[8] : number of parameters
2480 // rsp[16] : receiver displacement 2480 // rsp[16] : receiver displacement
2481 // rsp[24] : function 2481 // rsp[24] : function
2482 2482
2483 // Check if the calling frame is an arguments adaptor frame. 2483 // Check if the calling frame is an arguments adaptor frame.
2484 Label adaptor_frame, try_allocate, runtime; 2484 Label adaptor_frame, try_allocate, runtime;
2485 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2485 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2486 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2486 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2487 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2487 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2488 __ j(equal, &adaptor_frame); 2488 __ j(equal, &adaptor_frame);
2489 2489
2490 // Get the length from the frame. 2490 // Get the length from the frame.
2491 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2491 __ movq(rcx, Operand(rsp, kPCOnStackSize));
2492 __ SmiToInteger64(rcx, rcx); 2492 __ SmiToInteger64(rcx, rcx);
2493 __ jmp(&try_allocate); 2493 __ jmp(&try_allocate);
2494 2494
2495 // Patch the arguments.length and the parameters pointer. 2495 // Patch the arguments.length and the parameters pointer.
2496 __ bind(&adaptor_frame); 2496 __ bind(&adaptor_frame);
2497 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2497 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2498 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2498 __ movq(Operand(rsp, kPCOnStackSize), rcx);
2499 __ SmiToInteger64(rcx, rcx); 2499 __ SmiToInteger64(rcx, rcx);
2500 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2500 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2501 StandardFrameConstants::kCallerSPOffset)); 2501 StandardFrameConstants::kCallerSPOffset));
2502 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2502 __ movq(Operand(rsp, kPCOnStackSize + 1 * kPointerSize), rdx);
2503 2503
2504 // Try the new space allocation. Start out with computing the size of 2504 // Try the new space allocation. Start out with computing the size of
2505 // the arguments object and the elements array. 2505 // the arguments object and the elements array.
2506 Label add_arguments_object; 2506 Label add_arguments_object;
2507 __ bind(&try_allocate); 2507 __ bind(&try_allocate);
2508 __ testq(rcx, rcx); 2508 __ testq(rcx, rcx);
2509 __ j(zero, &add_arguments_object, Label::kNear); 2509 __ j(zero, &add_arguments_object, Label::kNear);
2510 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 2510 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2511 __ bind(&add_arguments_object); 2511 __ bind(&add_arguments_object);
2512 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); 2512 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
2513 2513
2514 // Do the allocation of both objects in one go. 2514 // Do the allocation of both objects in one go.
2515 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 2515 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2516 2516
2517 // Get the arguments boilerplate from the current native context. 2517 // Get the arguments boilerplate from the current native context.
2518 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2518 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2519 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2519 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2520 const int offset = 2520 const int offset =
2521 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 2521 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2522 __ movq(rdi, Operand(rdi, offset)); 2522 __ movq(rdi, Operand(rdi, offset));
2523 2523
2524 // Copy the JS object part. 2524 // Copy the JS object part.
2525 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2525 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2526 __ movq(rbx, FieldOperand(rdi, i)); 2526 __ movq(rbx, FieldOperand(rdi, i));
2527 __ movq(FieldOperand(rax, i), rbx); 2527 __ movq(FieldOperand(rax, i), rbx);
2528 } 2528 }
2529 2529
2530 // Get the length (smi tagged) and set that as an in-object property too. 2530 // Get the length (smi tagged) and set that as an in-object property too.
2531 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2531 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2532 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2532 __ movq(rcx, Operand(rsp, kPCOnStackSize));
2533 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2533 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2534 Heap::kArgumentsLengthIndex * kPointerSize), 2534 Heap::kArgumentsLengthIndex * kPointerSize),
2535 rcx); 2535 rcx);
2536 2536
2537 // If there are no actual arguments, we're done. 2537 // If there are no actual arguments, we're done.
2538 Label done; 2538 Label done;
2539 __ testq(rcx, rcx); 2539 __ testq(rcx, rcx);
2540 __ j(zero, &done); 2540 __ j(zero, &done);
2541 2541
2542 // Get the parameters pointer from the stack. 2542 // Get the parameters pointer from the stack.
2543 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2543 __ movq(rdx, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
2544 2544
2545 // Set up the elements pointer in the allocated arguments object and 2545 // Set up the elements pointer in the allocated arguments object and
2546 // initialize the header in the elements fixed array. 2546 // initialize the header in the elements fixed array.
2547 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); 2547 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
2548 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); 2548 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2549 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 2549 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2550 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 2550 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2551 2551
2552 2552
2553 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2553 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after
3016 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); 3016 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
3017 __ jmp(&check_underlying); 3017 __ jmp(&check_underlying);
3018 #endif // V8_INTERPRETED_REGEXP 3018 #endif // V8_INTERPRETED_REGEXP
3019 } 3019 }
3020 3020
3021 3021
3022 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { 3022 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3023 const int kMaxInlineLength = 100; 3023 const int kMaxInlineLength = 100;
3024 Label slowcase; 3024 Label slowcase;
3025 Label done; 3025 Label done;
3026 __ movq(r8, Operand(rsp, kPointerSize * 3)); 3026 __ movq(r8, Operand(rsp, kPCOnStackSize + 2 * kPointerSize));
3027 __ JumpIfNotSmi(r8, &slowcase); 3027 __ JumpIfNotSmi(r8, &slowcase);
3028 __ SmiToInteger32(rbx, r8); 3028 __ SmiToInteger32(rbx, r8);
3029 __ cmpl(rbx, Immediate(kMaxInlineLength)); 3029 __ cmpl(rbx, Immediate(kMaxInlineLength));
3030 __ j(above, &slowcase); 3030 __ j(above, &slowcase);
3031 // Smi-tagging is equivalent to multiplying by 2. 3031 // Smi-tagging is equivalent to multiplying by 2.
3032 STATIC_ASSERT(kSmiTag == 0); 3032 STATIC_ASSERT(kSmiTag == 0);
3033 STATIC_ASSERT(kSmiTagSize == 1); 3033 STATIC_ASSERT(kSmiTagSize == 1);
3034 // Allocate RegExpResult followed by FixedArray with size in rbx. 3034 // Allocate RegExpResult followed by FixedArray with size in rbx.
3035 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] 3035 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3036 // Elements: [Map][Length][..elements..] 3036 // Elements: [Map][Length][..elements..]
(...skipping 17 matching lines...) Expand all
3054 3054
3055 // Set empty properties FixedArray. 3055 // Set empty properties FixedArray.
3056 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); 3056 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
3057 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); 3057 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
3058 3058
3059 // Set elements to point to FixedArray allocated right after the JSArray. 3059 // Set elements to point to FixedArray allocated right after the JSArray.
3060 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); 3060 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
3061 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 3061 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
3062 3062
3063 // Set input, index and length fields from arguments. 3063 // Set input, index and length fields from arguments.
3064 __ movq(r8, Operand(rsp, kPointerSize * 1)); 3064 __ movq(r8, Operand(rsp, kPCOnStackSize));
3065 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); 3065 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
3066 __ movq(r8, Operand(rsp, kPointerSize * 2)); 3066 __ movq(r8, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
3067 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); 3067 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
3068 __ movq(r8, Operand(rsp, kPointerSize * 3)); 3068 __ movq(r8, Operand(rsp, kPCOnStackSize + 2 * kPointerSize));
3069 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); 3069 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
3070 3070
3071 // Fill out the elements FixedArray. 3071 // Fill out the elements FixedArray.
3072 // rax: JSArray. 3072 // rax: JSArray.
3073 // rcx: FixedArray. 3073 // rcx: FixedArray.
3074 // rbx: Number of elements in array as int32. 3074 // rbx: Number of elements in array as int32.
3075 3075
3076 // Set map. 3076 // Set map.
3077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 3077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
3078 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); 3078 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
3189 // but times_twice_pointer_size (multiplication by 16) scale factor 3189 // but times_twice_pointer_size (multiplication by 16) scale factor
3190 // is not supported by addrmode on x64 platform. 3190 // is not supported by addrmode on x64 platform.
3191 // So we have to premultiply entry index before lookup. 3191 // So we have to premultiply entry index before lookup.
3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); 3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
3193 } 3193 }
3194 3194
3195 3195
3196 void NumberToStringStub::Generate(MacroAssembler* masm) { 3196 void NumberToStringStub::Generate(MacroAssembler* masm) {
3197 Label runtime; 3197 Label runtime;
3198 3198
3199 __ movq(rbx, Operand(rsp, kPointerSize)); 3199 __ movq(rbx, Operand(rsp, kPCOnStackSize));
3200 3200
3201 // Generate code to lookup number in the number string cache. 3201 // Generate code to lookup number in the number string cache.
3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); 3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime);
3203 __ ret(1 * kPointerSize); 3203 __ ret(1 * kPointerSize);
3204 3204
3205 __ bind(&runtime); 3205 __ bind(&runtime);
3206 // Handle number to string in the runtime system if not found in the cache. 3206 // Handle number to string in the runtime system if not found in the cache.
3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3208 } 3208 }
3209 3209
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after
3609 Isolate* isolate = masm->isolate(); 3609 Isolate* isolate = masm->isolate();
3610 Label slow, non_function; 3610 Label slow, non_function;
3611 3611
3612 // The receiver might implicitly be the global object. This is 3612 // The receiver might implicitly be the global object. This is
3613 // indicated by passing the hole as the receiver to the call 3613 // indicated by passing the hole as the receiver to the call
3614 // function stub. 3614 // function stub.
3615 if (ReceiverMightBeImplicit()) { 3615 if (ReceiverMightBeImplicit()) {
3616 Label call; 3616 Label call;
3617 // Get the receiver from the stack. 3617 // Get the receiver from the stack.
3618 // +1 ~ return address 3618 // +1 ~ return address
3619 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize)); 3619 __ movq(rax, Operand(rsp, kPCOnStackSize + argc_ * kPointerSize));
3620 // Call as function is indicated with the hole. 3620 // Call as function is indicated with the hole.
3621 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 3621 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
3622 __ j(not_equal, &call, Label::kNear); 3622 __ j(not_equal, &call, Label::kNear);
3623 // Patch the receiver on the stack with the global receiver object. 3623 // Patch the receiver on the stack with the global receiver object.
3624 __ movq(rcx, GlobalObjectOperand()); 3624 __ movq(rcx, GlobalObjectOperand());
3625 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); 3625 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
3626 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx); 3626 __ movq(Operand(rsp, kPCOnStackSize + argc_ * kPointerSize), rcx);
3627 __ bind(&call); 3627 __ bind(&call);
3628 } 3628 }
3629 3629
3630 // Check that the function really is a JavaScript function. 3630 // Check that the function really is a JavaScript function.
3631 __ JumpIfSmi(rdi, &non_function); 3631 __ JumpIfSmi(rdi, &non_function);
3632 // Goto slow case if we do not have a function. 3632 // Goto slow case if we do not have a function.
3633 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 3633 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3634 __ j(not_equal, &slow); 3634 __ j(not_equal, &slow);
3635 3635
3636 if (RecordCallTarget()) { 3636 if (RecordCallTarget()) {
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
3679 { 3679 {
3680 Handle<Code> adaptor = 3680 Handle<Code> adaptor =
3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3682 __ jmp(adaptor, RelocInfo::CODE_TARGET); 3682 __ jmp(adaptor, RelocInfo::CODE_TARGET);
3683 } 3683 }
3684 3684
3685 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3685 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3686 // of the original receiver from the call site). 3686 // of the original receiver from the call site).
3687 __ bind(&non_function); 3687 __ bind(&non_function);
3688 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); 3688 __ movq(Operand(rsp, kPCOnStackSize + argc_ * kPointerSize), rdi);
3689 __ Set(rax, argc_); 3689 __ Set(rax, argc_);
3690 __ Set(rbx, 0); 3690 __ Set(rbx, 0);
3691 __ SetCallKind(rcx, CALL_AS_METHOD); 3691 __ SetCallKind(rcx, CALL_AS_METHOD);
3692 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 3692 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
3693 Handle<Code> adaptor = 3693 Handle<Code> adaptor =
3694 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline(); 3694 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3695 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3695 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3696 } 3696 }
3697 3697
3698 3698
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after
4228 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); 4228 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space));
4229 __ JumpIfSmi(rax, &slow); 4229 __ JumpIfSmi(rax, &slow);
4230 4230
4231 // Check that the left hand is a JS object. Leave its map in rax. 4231 // Check that the left hand is a JS object. Leave its map in rax.
4232 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); 4232 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
4233 __ j(below, &slow); 4233 __ j(below, &slow);
4234 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); 4234 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
4235 __ j(above, &slow); 4235 __ j(above, &slow);
4236 4236
4237 // Get the prototype of the function. 4237 // Get the prototype of the function.
4238 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); 4238 __ movq(rdx, Operand(rsp, kPCOnStackSize + extra_stack_space));
4239 // rdx is function, rax is map. 4239 // rdx is function, rax is map.
4240 4240
4241 // If there is a call site cache don't look in the global cache, but do the 4241 // If there is a call site cache don't look in the global cache, but do the
4242 // real lookup and update the call site cache. 4242 // real lookup and update the call site cache.
4243 if (!HasCallSiteInlineCheck()) { 4243 if (!HasCallSiteInlineCheck()) {
4244 // Look up the function and the map in the instanceof cache. 4244 // Look up the function and the map in the instanceof cache.
4245 Label miss; 4245 Label miss;
4246 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4246 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4247 __ j(not_equal, &miss, Label::kNear); 4247 __ j(not_equal, &miss, Label::kNear);
4248 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4248 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
(...skipping 15 matching lines...) Expand all
4264 // Register mapping: 4264 // Register mapping:
4265 // rax is object map. 4265 // rax is object map.
4266 // rdx is function. 4266 // rdx is function.
4267 // rbx is function prototype. 4267 // rbx is function prototype.
4268 if (!HasCallSiteInlineCheck()) { 4268 if (!HasCallSiteInlineCheck()) {
4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
4271 } else { 4271 } else {
4272 // Get return address and delta to inlined map check. 4272 // Get return address and delta to inlined map check.
4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
4274 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4274 __ subq(kScratchRegister, Operand(rsp, kPCOnStackSize));
4275 if (FLAG_debug_code) { 4275 if (FLAG_debug_code) {
4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
4279 } 4279 }
4280 __ movq(kScratchRegister, 4280 __ movq(kScratchRegister,
4281 Operand(kScratchRegister, kOffsetToMapCheckValue)); 4281 Operand(kScratchRegister, kOffsetToMapCheckValue));
4282 __ movq(Operand(kScratchRegister, 0), rax); 4282 __ movq(Operand(kScratchRegister, 0), rax);
4283 } 4283 }
4284 4284
(...skipping 20 matching lines...) Expand all
4305 STATIC_ASSERT(kSmiTag == 0); 4305 STATIC_ASSERT(kSmiTag == 0);
4306 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 4306 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
4307 } else { 4307 } else {
4308 // Store offset of true in the root array at the inline check site. 4308 // Store offset of true in the root array at the inline check site.
4309 int true_offset = 0x100 + 4309 int true_offset = 0x100 +
4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4311 // Assert it is a 1-byte signed value. 4311 // Assert it is a 1-byte signed value.
4312 ASSERT(true_offset >= 0 && true_offset < 0x100); 4312 ASSERT(true_offset >= 0 && true_offset < 0x100);
4313 __ movl(rax, Immediate(true_offset)); 4313 __ movl(rax, Immediate(true_offset));
4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
4315 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4315 __ subq(kScratchRegister, Operand(rsp, kPCOnStackSize));
4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4317 if (FLAG_debug_code) { 4317 if (FLAG_debug_code) {
4318 __ movl(rax, Immediate(kWordBeforeResultValue)); 4318 __ movl(rax, Immediate(kWordBeforeResultValue));
4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
4321 } 4321 }
4322 __ Set(rax, 0); 4322 __ Set(rax, 0);
4323 } 4323 }
4324 __ ret(2 * kPointerSize + extra_stack_space); 4324 __ ret(2 * kPointerSize + extra_stack_space);
4325 4325
4326 __ bind(&is_not_instance); 4326 __ bind(&is_not_instance);
4327 if (!HasCallSiteInlineCheck()) { 4327 if (!HasCallSiteInlineCheck()) {
4328 // We have to store a non-zero value in the cache. 4328 // We have to store a non-zero value in the cache.
4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
4330 } else { 4330 } else {
4331 // Store offset of false in the root array at the inline check site. 4331 // Store offset of false in the root array at the inline check site.
4332 int false_offset = 0x100 + 4332 int false_offset = 0x100 +
4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4334 // Assert it is a 1-byte signed value. 4334 // Assert it is a 1-byte signed value.
4335 ASSERT(false_offset >= 0 && false_offset < 0x100); 4335 ASSERT(false_offset >= 0 && false_offset < 0x100);
4336 __ movl(rax, Immediate(false_offset)); 4336 __ movl(rax, Immediate(false_offset));
4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
4338 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4338 __ subq(kScratchRegister, Operand(rsp, kPCOnStackSize));
4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4340 if (FLAG_debug_code) { 4340 if (FLAG_debug_code) {
4341 __ movl(rax, Immediate(kWordBeforeResultValue)); 4341 __ movl(rax, Immediate(kWordBeforeResultValue));
4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4344 } 4344 }
4345 } 4345 }
4346 __ ret(2 * kPointerSize + extra_stack_space); 4346 __ ret(2 * kPointerSize + extra_stack_space);
4347 4347
4348 // Slow-case: Go through the JavaScript implementation. 4348 // Slow-case: Go through the JavaScript implementation.
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
4494 4494
4495 __ Abort("Unexpected fallthrough from CharFromCode slow case"); 4495 __ Abort("Unexpected fallthrough from CharFromCode slow case");
4496 } 4496 }
4497 4497
4498 4498
4499 void StringAddStub::Generate(MacroAssembler* masm) { 4499 void StringAddStub::Generate(MacroAssembler* masm) {
4500 Label call_runtime, call_builtin; 4500 Label call_runtime, call_builtin;
4501 Builtins::JavaScript builtin_id = Builtins::ADD; 4501 Builtins::JavaScript builtin_id = Builtins::ADD;
4502 4502
4503 // Load the two arguments. 4503 // Load the two arguments.
4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). 4504 // First argument (left).
4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). 4505 __ movq(rax, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
4506 __ movq(rdx, Operand(rsp, kPCOnStackSize)); // Second argument (right).
4506 4507
4507 // Make sure that both arguments are strings if not known in advance. 4508 // Make sure that both arguments are strings if not known in advance.
4508 // Otherwise, at least one of the arguments is definitely a string, 4509 // Otherwise, at least one of the arguments is definitely a string,
4509 // and we convert the one that is not known to be a string. 4510 // and we convert the one that is not known to be a string.
4510 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { 4511 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4511 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); 4512 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
4512 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); 4513 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
4513 __ JumpIfSmi(rax, &call_runtime); 4514 __ JumpIfSmi(rax, &call_runtime);
4514 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); 4515 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
4515 __ j(above_equal, &call_runtime); 4516 __ j(above_equal, &call_runtime);
(...skipping 986 matching lines...) Expand 10 before | Expand all | Expand 10 after
5502 5503
5503 5504
5504 void StringCompareStub::Generate(MacroAssembler* masm) { 5505 void StringCompareStub::Generate(MacroAssembler* masm) {
5505 Label runtime; 5506 Label runtime;
5506 5507
5507 // Stack frame on entry. 5508 // Stack frame on entry.
5508 // rsp[0] : return address 5509 // rsp[0] : return address
5509 // rsp[8] : right string 5510 // rsp[8] : right string
5510 // rsp[16] : left string 5511 // rsp[16] : left string
5511 5512
5512 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left 5513 __ movq(rdx, Operand(rsp, kPCOnStackSize + 1 * kPointerSize)); // left
5513 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right 5514 __ movq(rax, Operand(rsp, kPCOnStackSize)); // right
5514 5515
5515 // Check for identity. 5516 // Check for identity.
5516 Label not_same; 5517 Label not_same;
5517 __ cmpq(rdx, rax); 5518 __ cmpq(rdx, rax);
5518 __ j(not_equal, &not_same, Label::kNear); 5519 __ j(not_equal, &not_same, Label::kNear);
5519 __ Move(rax, Smi::FromInt(EQUAL)); 5520 __ Move(rax, Smi::FromInt(EQUAL));
5520 Counters* counters = masm->isolate()->counters(); 5521 Counters* counters = masm->isolate()->counters();
5521 __ IncrementCounter(counters->string_compare_native(), 1); 5522 __ IncrementCounter(counters->string_compare_native(), 1);
5522 __ ret(2 * kPointerSize); 5523 __ ret(2 * kPointerSize);
5523 5524
(...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after
5992 __ testq(r0, r0); 5993 __ testq(r0, r0);
5993 __ j(zero, miss); 5994 __ j(zero, miss);
5994 __ jmp(done); 5995 __ jmp(done);
5995 } 5996 }
5996 5997
5997 5998
5998 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { 5999 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
5999 // This stub overrides SometimesSetsUpAFrame() to return false. That means 6000 // This stub overrides SometimesSetsUpAFrame() to return false. That means
6000 // we cannot call anything that could cause a GC from this stub. 6001 // we cannot call anything that could cause a GC from this stub.
6001 // Stack frame on entry: 6002 // Stack frame on entry:
6002 // rsp[0 * kPointerSize] : return address. 6003 // rsp[0] : return address.
6003 // rsp[1 * kPointerSize] : key's hash. 6004 // rsp[kPCOnStackSize] : key's hash.
6004 // rsp[2 * kPointerSize] : key. 6005 // rsp[kPCOnStackSize + 1 * kPointerSize] : key.
6005 // Registers: 6006 // Registers:
6006 // dictionary_: NameDictionary to probe. 6007 // dictionary_: NameDictionary to probe.
6007 // result_: used as scratch. 6008 // result_: used as scratch.
6008 // index_: will hold an index of entry if lookup is successful. 6009 // index_: will hold an index of entry if lookup is successful.
6009 // might alias with result_. 6010 // might alias with result_.
6010 // Returns: 6011 // Returns:
6011 // result_ is zero if lookup failed, non zero otherwise. 6012 // result_ is zero if lookup failed, non zero otherwise.
6012 6013
6013 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; 6014 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
6014 6015
6015 Register scratch = result_; 6016 Register scratch = result_;
6016 6017
6017 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset)); 6018 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
6018 __ decl(scratch); 6019 __ decl(scratch);
6019 __ push(scratch); 6020 __ push(scratch);
6020 6021
6021 // If names of slots in range from 1 to kProbes - 1 for the hash value are 6022 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6022 // not equal to the name and kProbes-th slot is not used (its name is the 6023 // not equal to the name and kProbes-th slot is not used (its name is the
6023 // undefined value), it guarantees the hash table doesn't contain the 6024 // undefined value), it guarantees the hash table doesn't contain the
6024 // property. It's true even if some slots represent deleted properties 6025 // property. It's true even if some slots represent deleted properties
6025 // (their names are the null value). 6026 // (their names are the null value).
6026 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 6027 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6027 // Compute the masked index: (hash + i + i * i) & mask. 6028 // Compute the masked index: (hash + i + i * i) & mask.
6028 __ movq(scratch, Operand(rsp, 2 * kPointerSize)); 6029 __ movq(scratch, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
6029 if (i > 0) { 6030 if (i > 0) {
6030 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 6031 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
6031 } 6032 }
6032 __ and_(scratch, Operand(rsp, 0)); 6033 __ and_(scratch, Operand(rsp, 0));
6033 6034
6034 // Scale the index by multiplying by the entry size. 6035 // Scale the index by multiplying by the entry size.
6035 ASSERT(NameDictionary::kEntrySize == 3); 6036 ASSERT(NameDictionary::kEntrySize == 3);
6036 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. 6037 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6037 6038
6038 // Having undefined at this place means the name is not contained. 6039 // Having undefined at this place means the name is not contained.
6039 __ movq(scratch, Operand(dictionary_, 6040 __ movq(scratch, Operand(dictionary_,
6040 index_, 6041 index_,
6041 times_pointer_size, 6042 times_pointer_size,
6042 kElementsStartOffset - kHeapObjectTag)); 6043 kElementsStartOffset - kHeapObjectTag));
6043 6044
6044 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); 6045 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
6045 __ j(equal, &not_in_dictionary); 6046 __ j(equal, &not_in_dictionary);
6046 6047
6047 // Stop if found the property. 6048 // Stop if found the property.
6048 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); 6049 __ cmpq(scratch, Operand(rsp, kPCOnStackSize + 2 * kPointerSize));
6049 __ j(equal, &in_dictionary); 6050 __ j(equal, &in_dictionary);
6050 6051
6051 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 6052 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6052 // If we hit a key that is not a unique name during negative 6053 // If we hit a key that is not a unique name during negative
6053 // lookup we have to bailout as this key might be equal to the 6054 // lookup we have to bailout as this key might be equal to the
6054 // key we are looking for. 6055 // key we are looking for.
6055 6056
6056 // Check if the entry name is not a unique name. 6057 // Check if the entry name is not a unique name.
6057 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 6058 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6058 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), 6059 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
6390 // clobbers rbx, rdx, rdi 6391 // clobbers rbx, rdx, rdi
6391 // ----------------------------------- 6392 // -----------------------------------
6392 6393
6393 Label element_done; 6394 Label element_done;
6394 Label double_elements; 6395 Label double_elements;
6395 Label smi_element; 6396 Label smi_element;
6396 Label slow_elements; 6397 Label slow_elements;
6397 Label fast_elements; 6398 Label fast_elements;
6398 6399
6399 // Get array literal index, array literal and its map. 6400 // Get array literal index, array literal and its map.
6400 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 6401 __ movq(rdx, Operand(rsp, kPCOnStackSize));
6401 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 6402 __ movq(rbx, Operand(rsp, kPCOnStackSize + 1 * kPointerSize));
6402 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); 6403 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
6403 6404
6404 __ CheckFastElements(rdi, &double_elements); 6405 __ CheckFastElements(rdi, &double_elements);
6405 6406
6406 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS 6407 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
6407 __ JumpIfSmi(rax, &smi_element); 6408 __ JumpIfSmi(rax, &smi_element);
6408 __ CheckFastSmiElements(rdi, &fast_elements); 6409 __ CheckFastSmiElements(rdi, &fast_elements);
6409 6410
6410 // Store into the array literal requires a elements transition. Call into 6411 // Store into the array literal requires a elements transition. Call into
6411 // the runtime. 6412 // the runtime.
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
6560 Handle<Object> undefined_sentinel( 6561 Handle<Object> undefined_sentinel(
6561 masm->isolate()->heap()->undefined_value(), 6562 masm->isolate()->heap()->undefined_value(),
6562 masm->isolate()); 6563 masm->isolate());
6563 6564
6564 // is the low bit set? If so, we are holey and that is good. 6565 // is the low bit set? If so, we are holey and that is good.
6565 __ testb(rdx, Immediate(1)); 6566 __ testb(rdx, Immediate(1));
6566 Label normal_sequence; 6567 Label normal_sequence;
6567 __ j(not_zero, &normal_sequence); 6568 __ j(not_zero, &normal_sequence);
6568 6569
6569 // look at the first argument 6570 // look at the first argument
6570 __ movq(rcx, Operand(rsp, kPointerSize)); 6571 __ movq(rcx, Operand(rsp, kPCOnStackSize));
6571 __ testq(rcx, rcx); 6572 __ testq(rcx, rcx);
6572 __ j(zero, &normal_sequence); 6573 __ j(zero, &normal_sequence);
6573 6574
6574 // We are going to create a holey array, but our kind is non-holey. 6575 // We are going to create a holey array, but our kind is non-holey.
6575 // Fix kind and retry (only if we have an allocation site in the cell). 6576 // Fix kind and retry (only if we have an allocation site in the cell).
6576 __ incl(rdx); 6577 __ incl(rdx);
6577 __ Cmp(rbx, undefined_sentinel); 6578 __ Cmp(rbx, undefined_sentinel);
6578 __ j(equal, &normal_sequence); 6579 __ j(equal, &normal_sequence);
6579 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); 6580 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
6580 Handle<Map> allocation_site_map( 6581 Handle<Map> allocation_site_map(
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
6739 InternalArrayNoArgumentConstructorStub stub0(kind); 6740 InternalArrayNoArgumentConstructorStub stub0(kind);
6740 __ TailCallStub(&stub0); 6741 __ TailCallStub(&stub0);
6741 6742
6742 __ bind(&not_zero_case); 6743 __ bind(&not_zero_case);
6743 __ cmpl(rax, Immediate(1)); 6744 __ cmpl(rax, Immediate(1));
6744 __ j(greater, &not_one_case); 6745 __ j(greater, &not_one_case);
6745 6746
6746 if (IsFastPackedElementsKind(kind)) { 6747 if (IsFastPackedElementsKind(kind)) {
6747 // We might need to create a holey array 6748 // We might need to create a holey array
6748 // look at the first argument 6749 // look at the first argument
6749 __ movq(rcx, Operand(rsp, kPointerSize)); 6750 __ movq(rcx, Operand(rsp, kPCOnStackSize));
6750 __ testq(rcx, rcx); 6751 __ testq(rcx, rcx);
6751 __ j(zero, &normal_sequence); 6752 __ j(zero, &normal_sequence);
6752 6753
6753 InternalArraySingleArgumentConstructorStub 6754 InternalArraySingleArgumentConstructorStub
6754 stub1_holey(GetHoleyElementsKind(kind)); 6755 stub1_holey(GetHoleyElementsKind(kind));
6755 __ TailCallStub(&stub1_holey); 6756 __ TailCallStub(&stub1_holey);
6756 } 6757 }
6757 6758
6758 __ bind(&normal_sequence); 6759 __ bind(&normal_sequence);
6759 InternalArraySingleArgumentConstructorStub stub1(kind); 6760 InternalArraySingleArgumentConstructorStub stub1(kind);
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
6816 __ bind(&fast_elements_case); 6817 __ bind(&fast_elements_case);
6817 GenerateCase(masm, FAST_ELEMENTS); 6818 GenerateCase(masm, FAST_ELEMENTS);
6818 } 6819 }
6819 6820
6820 6821
6821 #undef __ 6822 #undef __
6822 6823
6823 } } // namespace v8::internal 6824 } } // namespace v8::internal
6824 6825
6825 #endif // V8_TARGET_ARCH_X64 6826 #endif // V8_TARGET_ARCH_X64
OLDNEW
« src/x64/builtins-x64.cc ('K') | « src/x64/builtins-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698