Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(73)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 21123008: Introduce StackArgumentsAccessor class for X64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed nits Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
310 // Create a new closure from the given function info in new 310 // Create a new closure from the given function info in new
311 // space. Set the context to the current context in rsi. 311 // space. Set the context to the current context in rsi.
312 Counters* counters = masm->isolate()->counters(); 312 Counters* counters = masm->isolate()->counters();
313 313
314 Label gc; 314 Label gc;
315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); 315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
316 316
317 __ IncrementCounter(counters->fast_new_closure_total(), 1); 317 __ IncrementCounter(counters->fast_new_closure_total(), 1);
318 318
319 // Get the function info from the stack. 319 // Get the function info from the stack.
320 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 320 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
321 __ movq(rdx, args.GetArgumentOperand(0));
321 322
322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); 323 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_);
323 324
324 // Compute the function map in the current native context and set that 325 // Compute the function map in the current native context and set that
325 // as the map of the allocated object. 326 // as the map of the allocated object.
326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 327 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 328 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); 329 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); 330 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
330 331
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
418 rcx, 419 rcx,
419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), 420 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
420 rdx, 421 rdx,
421 rbx, 422 rbx,
422 kDontSaveFPRegs); 423 kDontSaveFPRegs);
423 424
424 // Return and remove the on-stack parameter. 425 // Return and remove the on-stack parameter.
425 __ ret(1 * kPointerSize); 426 __ ret(1 * kPointerSize);
426 427
427 __ bind(&restore); 428 __ bind(&restore);
428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 429 __ movq(rdx, args.GetArgumentOperand(0));
429 __ jmp(&install_unoptimized); 430 __ jmp(&install_unoptimized);
430 431
431 // Create a new closure through the slower runtime call. 432 // Create a new closure through the slower runtime call.
432 __ bind(&gc); 433 __ bind(&gc);
433 __ pop(rcx); // Temporarily remove return address. 434 __ pop(rcx); // Temporarily remove return address.
434 __ pop(rdx); 435 __ pop(rdx);
435 __ push(rsi); 436 __ push(rsi);
436 __ push(rdx); 437 __ push(rdx);
437 __ PushRoot(Heap::kFalseValueRootIndex); 438 __ PushRoot(Heap::kFalseValueRootIndex);
438 __ push(rcx); // Restore return address. 439 __ push(rcx); // Restore return address.
439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); 440 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
440 } 441 }
441 442
442 443
443 void FastNewContextStub::Generate(MacroAssembler* masm) { 444 void FastNewContextStub::Generate(MacroAssembler* masm) {
444 // Try to allocate the context in new space. 445 // Try to allocate the context in new space.
445 Label gc; 446 Label gc;
446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 447 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, 448 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
448 rax, rbx, rcx, &gc, TAG_OBJECT); 449 rax, rbx, rcx, &gc, TAG_OBJECT);
449 450
450 // Get the function from the stack. 451 // Get the function from the stack.
451 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 452 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
453 __ movq(rcx, args.GetArgumentOperand(0));
452 454
453 // Set up the object header. 455 // Set up the object header.
454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); 456 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 457 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 458 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
457 459
458 // Set up the fixed slots. 460 // Set up the fixed slots.
459 __ Set(rbx, 0); // Set to NULL. 461 __ Set(rbx, 0); // Set to NULL.
460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); 462 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); 463 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
(...skipping 25 matching lines...) Expand all
487 // [rsp + (1 * kPointerSize)] : function 489 // [rsp + (1 * kPointerSize)] : function
488 // [rsp + (2 * kPointerSize)] : serialized scope info 490 // [rsp + (2 * kPointerSize)] : serialized scope info
489 491
490 // Try to allocate the context in new space. 492 // Try to allocate the context in new space.
491 Label gc; 493 Label gc;
492 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 494 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
493 __ Allocate(FixedArray::SizeFor(length), 495 __ Allocate(FixedArray::SizeFor(length),
494 rax, rbx, rcx, &gc, TAG_OBJECT); 496 rax, rbx, rcx, &gc, TAG_OBJECT);
495 497
496 // Get the function from the stack. 498 // Get the function from the stack.
497 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 499 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
498 500 __ movq(rcx, args.GetArgumentOperand(1));
499 // Get the serialized scope info from the stack. 501 // Get the serialized scope info from the stack.
500 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 502 __ movq(rbx, args.GetArgumentOperand(0));
501 503
502 // Set up the object header. 504 // Set up the object header.
503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); 505 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 506 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 507 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
506 508
507 // If this block context is nested in the native context we get a smi 509 // If this block context is nested in the native context we get a smi
508 // sentinel instead of a function. The block context should get the 510 // sentinel instead of a function. The block context should get the
509 // canonical empty function of the native context as its closure which 511 // canonical empty function of the native context as its closure which
510 // we still have to look up. 512 // we still have to look up.
(...skipping 758 matching lines...) Expand 10 before | Expand all | Expand 10 after
1269 // Output: 1271 // Output:
1270 // xmm1 : untagged double result. 1272 // xmm1 : untagged double result.
1271 1273
1272 Label runtime_call; 1274 Label runtime_call;
1273 Label runtime_call_clear_stack; 1275 Label runtime_call_clear_stack;
1274 Label skip_cache; 1276 Label skip_cache;
1275 const bool tagged = (argument_type_ == TAGGED); 1277 const bool tagged = (argument_type_ == TAGGED);
1276 if (tagged) { 1278 if (tagged) {
1277 Label input_not_smi, loaded; 1279 Label input_not_smi, loaded;
1278 // Test that rax is a number. 1280 // Test that rax is a number.
1279 __ movq(rax, Operand(rsp, kPointerSize)); 1281 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1282 __ movq(rax, args.GetArgumentOperand(0));
1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); 1283 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
1281 // Input is a smi. Untag and load it onto the FPU stack. 1284 // Input is a smi. Untag and load it onto the FPU stack.
1282 // Then load the bits of the double into rbx. 1285 // Then load the bits of the double into rbx.
1283 __ SmiToInteger32(rax, rax); 1286 __ SmiToInteger32(rax, rax);
1284 __ subq(rsp, Immediate(kDoubleSize)); 1287 __ subq(rsp, Immediate(kDoubleSize));
1285 __ cvtlsi2sd(xmm1, rax); 1288 __ cvtlsi2sd(xmm1, rax);
1286 __ movsd(Operand(rsp, 0), xmm1); 1289 __ movsd(Operand(rsp, 0), xmm1);
1287 __ movq(rbx, xmm1); 1290 __ movq(rbx, xmm1);
1288 __ movq(rdx, xmm1); 1291 __ movq(rdx, xmm1);
1289 __ fld_d(Operand(rsp, 0)); 1292 __ fld_d(Operand(rsp, 0));
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after
1802 1805
1803 // Save 1 in double_result - we need this several times later on. 1806 // Save 1 in double_result - we need this several times later on.
1804 __ movq(scratch, Immediate(1)); 1807 __ movq(scratch, Immediate(1));
1805 __ cvtlsi2sd(double_result, scratch); 1808 __ cvtlsi2sd(double_result, scratch);
1806 1809
1807 if (exponent_type_ == ON_STACK) { 1810 if (exponent_type_ == ON_STACK) {
1808 Label base_is_smi, unpack_exponent; 1811 Label base_is_smi, unpack_exponent;
1809 // The exponent and base are supplied as arguments on the stack. 1812 // The exponent and base are supplied as arguments on the stack.
1810 // This can only happen if the stub is called from non-optimized code. 1813 // This can only happen if the stub is called from non-optimized code.
1811 // Load input parameters from stack. 1814 // Load input parameters from stack.
1812 __ movq(base, Operand(rsp, 2 * kPointerSize)); 1815 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1813 __ movq(exponent, Operand(rsp, 1 * kPointerSize)); 1816 __ movq(base, args.GetArgumentOperand(0));
1817 __ movq(exponent, args.GetArgumentOperand(1));
1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 1818 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), 1819 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
1816 Heap::kHeapNumberMapRootIndex); 1820 Heap::kHeapNumberMapRootIndex);
1817 __ j(not_equal, &call_runtime); 1821 __ j(not_equal, &call_runtime);
1818 1822
1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 1823 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
1820 __ jmp(&unpack_exponent, Label::kNear); 1824 __ jmp(&unpack_exponent, Label::kNear);
1821 1825
1822 __ bind(&base_is_smi); 1826 __ bind(&base_is_smi);
1823 __ SmiToInteger32(base, base); 1827 __ SmiToInteger32(base, base);
(...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after
2236 // rsp[0] : return address 2240 // rsp[0] : return address
2237 // rsp[8] : number of parameters (tagged) 2241 // rsp[8] : number of parameters (tagged)
2238 // rsp[16] : receiver displacement 2242 // rsp[16] : receiver displacement
2239 // rsp[24] : function 2243 // rsp[24] : function
2240 // Registers used over the whole function: 2244 // Registers used over the whole function:
2241 // rbx: the mapped parameter count (untagged) 2245 // rbx: the mapped parameter count (untagged)
2242 // rax: the allocated object (tagged). 2246 // rax: the allocated object (tagged).
2243 2247
2244 Factory* factory = masm->isolate()->factory(); 2248 Factory* factory = masm->isolate()->factory();
2245 2249
2246 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); 2250 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2251 __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
2247 // rbx = parameter count (untagged) 2252 // rbx = parameter count (untagged)
2248 2253
2249 // Check if the calling frame is an arguments adaptor frame. 2254 // Check if the calling frame is an arguments adaptor frame.
2250 Label runtime; 2255 Label runtime;
2251 Label adaptor_frame, try_allocate; 2256 Label adaptor_frame, try_allocate;
2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2257 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2258 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2259 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2255 __ j(equal, &adaptor_frame); 2260 __ j(equal, &adaptor_frame);
2256 2261
2257 // No adaptor, parameter count = argument count. 2262 // No adaptor, parameter count = argument count.
2258 __ movq(rcx, rbx); 2263 __ movq(rcx, rbx);
2259 __ jmp(&try_allocate, Label::kNear); 2264 __ jmp(&try_allocate, Label::kNear);
2260 2265
2261 // We have an adaptor frame. Patch the parameters pointer. 2266 // We have an adaptor frame. Patch the parameters pointer.
2262 __ bind(&adaptor_frame); 2267 __ bind(&adaptor_frame);
2263 __ SmiToInteger64(rcx, 2268 __ SmiToInteger64(rcx,
2264 Operand(rdx, 2269 Operand(rdx,
2265 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2270 ArgumentsAdaptorFrameConstants::kLengthOffset));
2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2271 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2267 StandardFrameConstants::kCallerSPOffset)); 2272 StandardFrameConstants::kCallerSPOffset));
2268 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2273 __ movq(args.GetArgumentOperand(1), rdx);
2269 2274
2270 // rbx = parameter count (untagged) 2275 // rbx = parameter count (untagged)
2271 // rcx = argument count (untagged) 2276 // rcx = argument count (untagged)
2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx. 2277 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
2273 __ cmpq(rbx, rcx); 2278 __ cmpq(rbx, rcx);
2274 __ j(less_equal, &try_allocate, Label::kNear); 2279 __ j(less_equal, &try_allocate, Label::kNear);
2275 __ movq(rbx, rcx); 2280 __ movq(rbx, rcx);
2276 2281
2277 __ bind(&try_allocate); 2282 __ bind(&try_allocate);
2278 2283
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2319 // rcx = argument count (untagged) 2324 // rcx = argument count (untagged)
2320 // rdi = address of boilerplate object (tagged) 2325 // rdi = address of boilerplate object (tagged)
2321 // Copy the JS object part. 2326 // Copy the JS object part.
2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2327 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2323 __ movq(rdx, FieldOperand(rdi, i)); 2328 __ movq(rdx, FieldOperand(rdi, i));
2324 __ movq(FieldOperand(rax, i), rdx); 2329 __ movq(FieldOperand(rax, i), rdx);
2325 } 2330 }
2326 2331
2327 // Set up the callee in-object property. 2332 // Set up the callee in-object property.
2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); 2333 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2329 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); 2334 __ movq(rdx, args.GetArgumentOperand(0));
2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2335 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2331 Heap::kArgumentsCalleeIndex * kPointerSize), 2336 Heap::kArgumentsCalleeIndex * kPointerSize),
2332 rdx); 2337 rdx);
2333 2338
2334 // Use the length (smi tagged) and set that as an in-object property too. 2339 // Use the length (smi tagged) and set that as an in-object property too.
2335 // Note: rcx is tagged from here on. 2340 // Note: rcx is tagged from here on.
2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2341 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2337 __ Integer32ToSmi(rcx, rcx); 2342 __ Integer32ToSmi(rcx, rcx);
2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2343 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2339 Heap::kArgumentsLengthIndex * kPointerSize), 2344 Heap::kArgumentsLengthIndex * kPointerSize),
(...skipping 30 matching lines...) Expand all
2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 2375 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
2371 // The mapped parameter thus need to get indices 2376 // The mapped parameter thus need to get indices
2372 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 2377 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 2378 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
2374 // We loop from right to left. 2379 // We loop from right to left.
2375 Label parameters_loop, parameters_test; 2380 Label parameters_loop, parameters_test;
2376 2381
2377 // Load tagged parameter count into r9. 2382 // Load tagged parameter count into r9.
2378 __ Integer32ToSmi(r9, rbx); 2383 __ Integer32ToSmi(r9, rbx);
2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); 2384 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
2380 __ addq(r8, Operand(rsp, 1 * kPointerSize)); 2385 __ addq(r8, args.GetArgumentOperand(2));
2381 __ subq(r8, r9); 2386 __ subq(r8, r9);
2382 __ Move(r11, factory->the_hole_value()); 2387 __ Move(r11, factory->the_hole_value());
2383 __ movq(rdx, rdi); 2388 __ movq(rdx, rdi);
2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 2389 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
2385 // r9 = loop variable (tagged) 2390 // r9 = loop variable (tagged)
2386 // r8 = mapping index (tagged) 2391 // r8 = mapping index (tagged)
2387 // r11 = the hole value 2392 // r11 = the hole value
2388 // rdx = address of parameter map (tagged) 2393 // rdx = address of parameter map (tagged)
2389 // rdi = address of backing store (tagged) 2394 // rdi = address of backing store (tagged)
2390 __ jmp(&parameters_test, Label::kNear); 2395 __ jmp(&parameters_test, Label::kNear);
(...skipping 18 matching lines...) Expand all
2409 2414
2410 // rcx = argument count (tagged) 2415 // rcx = argument count (tagged)
2411 // rdi = address of backing store (tagged) 2416 // rdi = address of backing store (tagged)
2412 // Copy arguments header and remaining slots (if there are any). 2417 // Copy arguments header and remaining slots (if there are any).
2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), 2418 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
2414 factory->fixed_array_map()); 2419 factory->fixed_array_map());
2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2420 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2416 2421
2417 Label arguments_loop, arguments_test; 2422 Label arguments_loop, arguments_test;
2418 __ movq(r8, rbx); 2423 __ movq(r8, rbx);
2419 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2424 __ movq(rdx, args.GetArgumentOperand(1));
2420 // Untag rcx for the loop below. 2425 // Untag rcx for the loop below.
2421 __ SmiToInteger64(rcx, rcx); 2426 __ SmiToInteger64(rcx, rcx);
2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); 2427 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
2423 __ subq(rdx, kScratchRegister); 2428 __ subq(rdx, kScratchRegister);
2424 __ jmp(&arguments_test, Label::kNear); 2429 __ jmp(&arguments_test, Label::kNear);
2425 2430
2426 __ bind(&arguments_loop); 2431 __ bind(&arguments_loop);
2427 __ subq(rdx, Immediate(kPointerSize)); 2432 __ subq(rdx, Immediate(kPointerSize));
2428 __ movq(r9, Operand(rdx, 0)); 2433 __ movq(r9, Operand(rdx, 0));
2429 __ movq(FieldOperand(rdi, r8, 2434 __ movq(FieldOperand(rdi, r8,
2430 times_pointer_size, 2435 times_pointer_size,
2431 FixedArray::kHeaderSize), 2436 FixedArray::kHeaderSize),
2432 r9); 2437 r9);
2433 __ addq(r8, Immediate(1)); 2438 __ addq(r8, Immediate(1));
2434 2439
2435 __ bind(&arguments_test); 2440 __ bind(&arguments_test);
2436 __ cmpq(r8, rcx); 2441 __ cmpq(r8, rcx);
2437 __ j(less, &arguments_loop, Label::kNear); 2442 __ j(less, &arguments_loop, Label::kNear);
2438 2443
2439 // Return and remove the on-stack parameters. 2444 // Return and remove the on-stack parameters.
2440 __ ret(3 * kPointerSize); 2445 __ ret(3 * kPointerSize);
2441 2446
2442 // Do the runtime call to allocate the arguments object. 2447 // Do the runtime call to allocate the arguments object.
2443 // rcx = argument count (untagged) 2448 // rcx = argument count (untagged)
2444 __ bind(&runtime); 2449 __ bind(&runtime);
2445 __ Integer32ToSmi(rcx, rcx); 2450 __ Integer32ToSmi(rcx, rcx);
2446 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. 2451 __ movq(args.GetArgumentOperand(2), rcx); // Patch argument count.
2447 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2452 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2448 } 2453 }
2449 2454
2450 2455
2451 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { 2456 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2452 // rsp[0] : return address 2457 // rsp[0] : return address
2453 // rsp[8] : number of parameters 2458 // rsp[8] : number of parameters
2454 // rsp[16] : receiver displacement 2459 // rsp[16] : receiver displacement
2455 // rsp[24] : function 2460 // rsp[24] : function
2456 2461
2457 // Check if the calling frame is an arguments adaptor frame. 2462 // Check if the calling frame is an arguments adaptor frame.
2458 Label runtime; 2463 Label runtime;
2459 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2464 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2460 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2465 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2461 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2466 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2462 __ j(not_equal, &runtime); 2467 __ j(not_equal, &runtime);
2463 2468
2464 // Patch the arguments.length and the parameters pointer. 2469 // Patch the arguments.length and the parameters pointer.
2470 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2465 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2471 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2466 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2472 __ movq(args.GetArgumentOperand(2), rcx);
2467 __ SmiToInteger64(rcx, rcx); 2473 __ SmiToInteger64(rcx, rcx);
2468 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2474 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2469 StandardFrameConstants::kCallerSPOffset)); 2475 StandardFrameConstants::kCallerSPOffset));
2470 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2476 __ movq(args.GetArgumentOperand(1), rdx);
2471 2477
2472 __ bind(&runtime); 2478 __ bind(&runtime);
2473 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2479 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2474 } 2480 }
2475 2481
2476 2482
2477 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 2483 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2478 // rsp[0] : return address 2484 // rsp[0] : return address
2479 // rsp[8] : number of parameters 2485 // rsp[8] : number of parameters
2480 // rsp[16] : receiver displacement 2486 // rsp[16] : receiver displacement
2481 // rsp[24] : function 2487 // rsp[24] : function
2482 2488
2483 // Check if the calling frame is an arguments adaptor frame. 2489 // Check if the calling frame is an arguments adaptor frame.
2484 Label adaptor_frame, try_allocate, runtime; 2490 Label adaptor_frame, try_allocate, runtime;
2485 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2491 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2486 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2492 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2487 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2493 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2488 __ j(equal, &adaptor_frame); 2494 __ j(equal, &adaptor_frame);
2489 2495
2490 // Get the length from the frame. 2496 // Get the length from the frame.
2491 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2497 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2498 __ movq(rcx, args.GetArgumentOperand(2));
2492 __ SmiToInteger64(rcx, rcx); 2499 __ SmiToInteger64(rcx, rcx);
2493 __ jmp(&try_allocate); 2500 __ jmp(&try_allocate);
2494 2501
2495 // Patch the arguments.length and the parameters pointer. 2502 // Patch the arguments.length and the parameters pointer.
2496 __ bind(&adaptor_frame); 2503 __ bind(&adaptor_frame);
2497 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2504 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2498 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2505 __ movq(args.GetArgumentOperand(2), rcx);
2499 __ SmiToInteger64(rcx, rcx); 2506 __ SmiToInteger64(rcx, rcx);
2500 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2507 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2501 StandardFrameConstants::kCallerSPOffset)); 2508 StandardFrameConstants::kCallerSPOffset));
2502 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2509 __ movq(args.GetArgumentOperand(1), rdx);
2503 2510
2504 // Try the new space allocation. Start out with computing the size of 2511 // Try the new space allocation. Start out with computing the size of
2505 // the arguments object and the elements array. 2512 // the arguments object and the elements array.
2506 Label add_arguments_object; 2513 Label add_arguments_object;
2507 __ bind(&try_allocate); 2514 __ bind(&try_allocate);
2508 __ testq(rcx, rcx); 2515 __ testq(rcx, rcx);
2509 __ j(zero, &add_arguments_object, Label::kNear); 2516 __ j(zero, &add_arguments_object, Label::kNear);
2510 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 2517 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2511 __ bind(&add_arguments_object); 2518 __ bind(&add_arguments_object);
2512 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); 2519 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
2513 2520
2514 // Do the allocation of both objects in one go. 2521 // Do the allocation of both objects in one go.
2515 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 2522 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2516 2523
2517 // Get the arguments boilerplate from the current native context. 2524 // Get the arguments boilerplate from the current native context.
2518 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2525 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2519 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2526 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2520 const int offset = 2527 const int offset =
2521 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 2528 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2522 __ movq(rdi, Operand(rdi, offset)); 2529 __ movq(rdi, Operand(rdi, offset));
2523 2530
2524 // Copy the JS object part. 2531 // Copy the JS object part.
2525 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2532 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2526 __ movq(rbx, FieldOperand(rdi, i)); 2533 __ movq(rbx, FieldOperand(rdi, i));
2527 __ movq(FieldOperand(rax, i), rbx); 2534 __ movq(FieldOperand(rax, i), rbx);
2528 } 2535 }
2529 2536
2530 // Get the length (smi tagged) and set that as an in-object property too. 2537 // Get the length (smi tagged) and set that as an in-object property too.
2531 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2538 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2532 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2539 __ movq(rcx, args.GetArgumentOperand(2));
2533 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2540 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2534 Heap::kArgumentsLengthIndex * kPointerSize), 2541 Heap::kArgumentsLengthIndex * kPointerSize),
2535 rcx); 2542 rcx);
2536 2543
2537 // If there are no actual arguments, we're done. 2544 // If there are no actual arguments, we're done.
2538 Label done; 2545 Label done;
2539 __ testq(rcx, rcx); 2546 __ testq(rcx, rcx);
2540 __ j(zero, &done); 2547 __ j(zero, &done);
2541 2548
2542 // Get the parameters pointer from the stack. 2549 // Get the parameters pointer from the stack.
2543 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2550 __ movq(rdx, args.GetArgumentOperand(1));
2544 2551
2545 // Set up the elements pointer in the allocated arguments object and 2552 // Set up the elements pointer in the allocated arguments object and
2546 // initialize the header in the elements fixed array. 2553 // initialize the header in the elements fixed array.
2547 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); 2554 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
2548 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); 2555 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2549 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 2556 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2550 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 2557 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2551 2558
2552 2559
2553 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2560 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after
3016 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); 3023 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
3017 __ jmp(&check_underlying); 3024 __ jmp(&check_underlying);
3018 #endif // V8_INTERPRETED_REGEXP 3025 #endif // V8_INTERPRETED_REGEXP
3019 } 3026 }
3020 3027
3021 3028
3022 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { 3029 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3023 const int kMaxInlineLength = 100; 3030 const int kMaxInlineLength = 100;
3024 Label slowcase; 3031 Label slowcase;
3025 Label done; 3032 Label done;
3026 __ movq(r8, Operand(rsp, kPointerSize * 3)); 3033 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3034 __ movq(r8, args.GetArgumentOperand(0));
3027 __ JumpIfNotSmi(r8, &slowcase); 3035 __ JumpIfNotSmi(r8, &slowcase);
3028 __ SmiToInteger32(rbx, r8); 3036 __ SmiToInteger32(rbx, r8);
3029 __ cmpl(rbx, Immediate(kMaxInlineLength)); 3037 __ cmpl(rbx, Immediate(kMaxInlineLength));
3030 __ j(above, &slowcase); 3038 __ j(above, &slowcase);
3031 // Smi-tagging is equivalent to multiplying by 2. 3039 // Smi-tagging is equivalent to multiplying by 2.
3032 STATIC_ASSERT(kSmiTag == 0); 3040 STATIC_ASSERT(kSmiTag == 0);
3033 STATIC_ASSERT(kSmiTagSize == 1); 3041 STATIC_ASSERT(kSmiTagSize == 1);
3034 // Allocate RegExpResult followed by FixedArray with size in rbx. 3042 // Allocate RegExpResult followed by FixedArray with size in rbx.
3035 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] 3043 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3036 // Elements: [Map][Length][..elements..] 3044 // Elements: [Map][Length][..elements..]
(...skipping 17 matching lines...) Expand all
3054 3062
3055 // Set empty properties FixedArray. 3063 // Set empty properties FixedArray.
3056 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); 3064 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
3057 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); 3065 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
3058 3066
3059 // Set elements to point to FixedArray allocated right after the JSArray. 3067 // Set elements to point to FixedArray allocated right after the JSArray.
3060 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); 3068 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
3061 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 3069 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
3062 3070
3063 // Set input, index and length fields from arguments. 3071 // Set input, index and length fields from arguments.
3064 __ movq(r8, Operand(rsp, kPointerSize * 1)); 3072 __ movq(r8, args.GetArgumentOperand(2));
3065 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); 3073 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
3066 __ movq(r8, Operand(rsp, kPointerSize * 2)); 3074 __ movq(r8, args.GetArgumentOperand(1));
3067 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); 3075 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
3068 __ movq(r8, Operand(rsp, kPointerSize * 3)); 3076 __ movq(r8, args.GetArgumentOperand(0));
3069 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); 3077 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
3070 3078
3071 // Fill out the elements FixedArray. 3079 // Fill out the elements FixedArray.
3072 // rax: JSArray. 3080 // rax: JSArray.
3073 // rcx: FixedArray. 3081 // rcx: FixedArray.
3074 // rbx: Number of elements in array as int32. 3082 // rbx: Number of elements in array as int32.
3075 3083
3076 // Set map. 3084 // Set map.
3077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 3085 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
3078 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); 3086 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
3189 // but times_twice_pointer_size (multiplication by 16) scale factor 3197 // but times_twice_pointer_size (multiplication by 16) scale factor
3190 // is not supported by addrmode on x64 platform. 3198 // is not supported by addrmode on x64 platform.
3191 // So we have to premultiply entry index before lookup. 3199 // So we have to premultiply entry index before lookup.
3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); 3200 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
3193 } 3201 }
3194 3202
3195 3203
3196 void NumberToStringStub::Generate(MacroAssembler* masm) { 3204 void NumberToStringStub::Generate(MacroAssembler* masm) {
3197 Label runtime; 3205 Label runtime;
3198 3206
3199 __ movq(rbx, Operand(rsp, kPointerSize)); 3207 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3208 __ movq(rbx, args.GetArgumentOperand(0));
3200 3209
3201 // Generate code to lookup number in the number string cache. 3210 // Generate code to lookup number in the number string cache.
3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); 3211 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime);
3203 __ ret(1 * kPointerSize); 3212 __ ret(1 * kPointerSize);
3204 3213
3205 __ bind(&runtime); 3214 __ bind(&runtime);
3206 // Handle number to string in the runtime system if not found in the cache. 3215 // Handle number to string in the runtime system if not found in the cache.
3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 3216 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3208 } 3217 }
3209 3218
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
3601 3610
3602 __ bind(&done); 3611 __ bind(&done);
3603 } 3612 }
3604 3613
3605 3614
3606 void CallFunctionStub::Generate(MacroAssembler* masm) { 3615 void CallFunctionStub::Generate(MacroAssembler* masm) {
3607 // rbx : cache cell for call target 3616 // rbx : cache cell for call target
3608 // rdi : the function to call 3617 // rdi : the function to call
3609 Isolate* isolate = masm->isolate(); 3618 Isolate* isolate = masm->isolate();
3610 Label slow, non_function; 3619 Label slow, non_function;
3620 StackArgumentsAccessor args(rsp, argc_);
3611 3621
3612 // The receiver might implicitly be the global object. This is 3622 // The receiver might implicitly be the global object. This is
3613 // indicated by passing the hole as the receiver to the call 3623 // indicated by passing the hole as the receiver to the call
3614 // function stub. 3624 // function stub.
3615 if (ReceiverMightBeImplicit()) { 3625 if (ReceiverMightBeImplicit()) {
3616 Label call; 3626 Label call;
3617 // Get the receiver from the stack. 3627 // Get the receiver from the stack.
3618 // +1 ~ return address 3628 __ movq(rax, args.GetReceiverOperand());
3619 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
3620 // Call as function is indicated with the hole. 3629 // Call as function is indicated with the hole.
3621 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 3630 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
3622 __ j(not_equal, &call, Label::kNear); 3631 __ j(not_equal, &call, Label::kNear);
3623 // Patch the receiver on the stack with the global receiver object. 3632 // Patch the receiver on the stack with the global receiver object.
3624 __ movq(rcx, GlobalObjectOperand()); 3633 __ movq(rcx, GlobalObjectOperand());
3625 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); 3634 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
3626 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx); 3635 __ movq(args.GetReceiverOperand(), rcx);
3627 __ bind(&call); 3636 __ bind(&call);
3628 } 3637 }
3629 3638
3630 // Check that the function really is a JavaScript function. 3639 // Check that the function really is a JavaScript function.
3631 __ JumpIfSmi(rdi, &non_function); 3640 __ JumpIfSmi(rdi, &non_function);
3632 // Goto slow case if we do not have a function. 3641 // Goto slow case if we do not have a function.
3633 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 3642 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3634 __ j(not_equal, &slow); 3643 __ j(not_equal, &slow);
3635 3644
3636 if (RecordCallTarget()) { 3645 if (RecordCallTarget()) {
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 3687 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
3679 { 3688 {
3680 Handle<Code> adaptor = 3689 Handle<Code> adaptor =
3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3690 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3682 __ jmp(adaptor, RelocInfo::CODE_TARGET); 3691 __ jmp(adaptor, RelocInfo::CODE_TARGET);
3683 } 3692 }
3684 3693
3685 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3694 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3686 // of the original receiver from the call site). 3695 // of the original receiver from the call site).
3687 __ bind(&non_function); 3696 __ bind(&non_function);
3688 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); 3697 __ movq(args.GetReceiverOperand(), rdi);
3689 __ Set(rax, argc_); 3698 __ Set(rax, argc_);
3690 __ Set(rbx, 0); 3699 __ Set(rbx, 0);
3691 __ SetCallKind(rcx, CALL_AS_METHOD); 3700 __ SetCallKind(rcx, CALL_AS_METHOD);
3692 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 3701 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
3693 Handle<Code> adaptor = 3702 Handle<Code> adaptor =
3694 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline(); 3703 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3695 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3704 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3696 } 3705 }
3697 3706
3698 3707
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after
4213 // Move(kScratchRegister, Factory::the_hole_value()) 4222 // Move(kScratchRegister, Factory::the_hole_value())
4214 // in front of the hole value address. 4223 // in front of the hole value address.
4215 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78; 4224 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
4216 // The last 4 bytes of the instruction sequence 4225 // The last 4 bytes of the instruction sequence
4217 // __ j(not_equal, &cache_miss); 4226 // __ j(not_equal, &cache_miss);
4218 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 4227 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
4219 // before the offset of the hole value in the root array. 4228 // before the offset of the hole value in the root array.
4220 static const unsigned int kWordBeforeResultValue = 0x458B4909; 4229 static const unsigned int kWordBeforeResultValue = 0x458B4909;
4221 // Only the inline check flag is supported on X64. 4230 // Only the inline check flag is supported on X64.
4222 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); 4231 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
4223 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0; 4232 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
4224 4233
4225 // Get the object - go slow case if it's a smi. 4234 // Get the object - go slow case if it's a smi.
4226 Label slow; 4235 Label slow;
4227 4236 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
4228 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); 4237 ARGUMENTS_DONT_CONTAIN_RECEIVER);
4238 __ movq(rax, args.GetArgumentOperand(0));
4229 __ JumpIfSmi(rax, &slow); 4239 __ JumpIfSmi(rax, &slow);
4230 4240
4231 // Check that the left hand is a JS object. Leave its map in rax. 4241 // Check that the left hand is a JS object. Leave its map in rax.
4232 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); 4242 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
4233 __ j(below, &slow); 4243 __ j(below, &slow);
4234 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); 4244 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
4235 __ j(above, &slow); 4245 __ j(above, &slow);
4236 4246
4237 // Get the prototype of the function. 4247 // Get the prototype of the function.
4238 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); 4248 __ movq(rdx, args.GetArgumentOperand(1));
4239 // rdx is function, rax is map. 4249 // rdx is function, rax is map.
4240 4250
4241 // If there is a call site cache don't look in the global cache, but do the 4251 // If there is a call site cache don't look in the global cache, but do the
4242 // real lookup and update the call site cache. 4252 // real lookup and update the call site cache.
4243 if (!HasCallSiteInlineCheck()) { 4253 if (!HasCallSiteInlineCheck()) {
4244 // Look up the function and the map in the instanceof cache. 4254 // Look up the function and the map in the instanceof cache.
4245 Label miss; 4255 Label miss;
4246 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4256 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4247 __ j(not_equal, &miss, Label::kNear); 4257 __ j(not_equal, &miss, Label::kNear);
4248 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4258 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
(...skipping 14 matching lines...) Expand all
4263 4273
4264 // Register mapping: 4274 // Register mapping:
4265 // rax is object map. 4275 // rax is object map.
4266 // rdx is function. 4276 // rdx is function.
4267 // rbx is function prototype. 4277 // rbx is function prototype.
4268 if (!HasCallSiteInlineCheck()) { 4278 if (!HasCallSiteInlineCheck()) {
4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4279 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4280 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
4271 } else { 4281 } else {
4272 // Get return address and delta to inlined map check. 4282 // Get return address and delta to inlined map check.
4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4283 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
4274 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4284 __ subq(kScratchRegister, args.GetArgumentOperand(2));
4275 if (FLAG_debug_code) { 4285 if (FLAG_debug_code) {
4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 4286 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 4287 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 4288 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
4279 } 4289 }
4280 __ movq(kScratchRegister, 4290 __ movq(kScratchRegister,
4281 Operand(kScratchRegister, kOffsetToMapCheckValue)); 4291 Operand(kScratchRegister, kOffsetToMapCheckValue));
4282 __ movq(Operand(kScratchRegister, 0), rax); 4292 __ movq(Operand(kScratchRegister, 0), rax);
4283 } 4293 }
4284 4294
(...skipping 19 matching lines...) Expand all
4304 // Store bitwise zero in the cache. This is a Smi in GC terms. 4314 // Store bitwise zero in the cache. This is a Smi in GC terms.
4305 STATIC_ASSERT(kSmiTag == 0); 4315 STATIC_ASSERT(kSmiTag == 0);
4306 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 4316 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
4307 } else { 4317 } else {
4308 // Store offset of true in the root array at the inline check site. 4318 // Store offset of true in the root array at the inline check site.
4309 int true_offset = 0x100 + 4319 int true_offset = 0x100 +
4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4320 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4311 // Assert it is a 1-byte signed value. 4321 // Assert it is a 1-byte signed value.
4312 ASSERT(true_offset >= 0 && true_offset < 0x100); 4322 ASSERT(true_offset >= 0 && true_offset < 0x100);
4313 __ movl(rax, Immediate(true_offset)); 4323 __ movl(rax, Immediate(true_offset));
4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4324 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
4315 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4325 __ subq(kScratchRegister, args.GetArgumentOperand(2));
4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4326 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4317 if (FLAG_debug_code) { 4327 if (FLAG_debug_code) {
4318 __ movl(rax, Immediate(kWordBeforeResultValue)); 4328 __ movl(rax, Immediate(kWordBeforeResultValue));
4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4329 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 4330 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
4321 } 4331 }
4322 __ Set(rax, 0); 4332 __ Set(rax, 0);
4323 } 4333 }
4324 __ ret(2 * kPointerSize + extra_stack_space); 4334 __ ret((2 + extra_argument_offset) * kPointerSize);
4325 4335
4326 __ bind(&is_not_instance); 4336 __ bind(&is_not_instance);
4327 if (!HasCallSiteInlineCheck()) { 4337 if (!HasCallSiteInlineCheck()) {
4328 // We have to store a non-zero value in the cache. 4338 // We have to store a non-zero value in the cache.
4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 4339 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
4330 } else { 4340 } else {
4331 // Store offset of false in the root array at the inline check site. 4341 // Store offset of false in the root array at the inline check site.
4332 int false_offset = 0x100 + 4342 int false_offset = 0x100 +
4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4343 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4334 // Assert it is a 1-byte signed value. 4344 // Assert it is a 1-byte signed value.
4335 ASSERT(false_offset >= 0 && false_offset < 0x100); 4345 ASSERT(false_offset >= 0 && false_offset < 0x100);
4336 __ movl(rax, Immediate(false_offset)); 4346 __ movl(rax, Immediate(false_offset));
4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4347 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
4338 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4348 __ subq(kScratchRegister, args.GetArgumentOperand(2));
4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4349 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4340 if (FLAG_debug_code) { 4350 if (FLAG_debug_code) {
4341 __ movl(rax, Immediate(kWordBeforeResultValue)); 4351 __ movl(rax, Immediate(kWordBeforeResultValue));
4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4352 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 4353 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4344 } 4354 }
4345 } 4355 }
4346 __ ret(2 * kPointerSize + extra_stack_space); 4356 __ ret((2 + extra_argument_offset) * kPointerSize);
4347 4357
4348 // Slow-case: Go through the JavaScript implementation. 4358 // Slow-case: Go through the JavaScript implementation.
4349 __ bind(&slow); 4359 __ bind(&slow);
4350 if (HasCallSiteInlineCheck()) { 4360 if (HasCallSiteInlineCheck()) {
4351 // Remove extra value from the stack. 4361 // Remove extra value from the stack.
4352 __ pop(rcx); 4362 __ pop(rcx);
4353 __ pop(rax); 4363 __ pop(rax);
4354 __ push(rcx); 4364 __ push(rcx);
4355 } 4365 }
4356 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 4366 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
4494 4504
4495 __ Abort("Unexpected fallthrough from CharFromCode slow case"); 4505 __ Abort("Unexpected fallthrough from CharFromCode slow case");
4496 } 4506 }
4497 4507
4498 4508
4499 void StringAddStub::Generate(MacroAssembler* masm) { 4509 void StringAddStub::Generate(MacroAssembler* masm) {
4500 Label call_runtime, call_builtin; 4510 Label call_runtime, call_builtin;
4501 Builtins::JavaScript builtin_id = Builtins::ADD; 4511 Builtins::JavaScript builtin_id = Builtins::ADD;
4502 4512
4503 // Load the two arguments. 4513 // Load the two arguments.
4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). 4514 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). 4515 __ movq(rax, args.GetArgumentOperand(0)); // First argument (left).
4516 __ movq(rdx, args.GetArgumentOperand(1)); // Second argument (right).
4506 4517
4507 // Make sure that both arguments are strings if not known in advance. 4518 // Make sure that both arguments are strings if not known in advance.
4508 // Otherwise, at least one of the arguments is definitely a string, 4519 // Otherwise, at least one of the arguments is definitely a string,
4509 // and we convert the one that is not known to be a string. 4520 // and we convert the one that is not known to be a string.
4510 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { 4521 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4511 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); 4522 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
4512 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); 4523 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
4513 __ JumpIfSmi(rax, &call_runtime); 4524 __ JumpIfSmi(rax, &call_runtime);
4514 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); 4525 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
4515 __ j(above_equal, &call_runtime); 4526 __ j(above_equal, &call_runtime);
(...skipping 986 matching lines...) Expand 10 before | Expand all | Expand 10 after
5502 5513
5503 5514
5504 void StringCompareStub::Generate(MacroAssembler* masm) { 5515 void StringCompareStub::Generate(MacroAssembler* masm) {
5505 Label runtime; 5516 Label runtime;
5506 5517
5507 // Stack frame on entry. 5518 // Stack frame on entry.
5508 // rsp[0] : return address 5519 // rsp[0] : return address
5509 // rsp[8] : right string 5520 // rsp[8] : right string
5510 // rsp[16] : left string 5521 // rsp[16] : left string
5511 5522
5512 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left 5523 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
5513 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right 5524 __ movq(rdx, args.GetArgumentOperand(0)); // left
5525 __ movq(rax, args.GetArgumentOperand(1)); // right
5514 5526
5515 // Check for identity. 5527 // Check for identity.
5516 Label not_same; 5528 Label not_same;
5517 __ cmpq(rdx, rax); 5529 __ cmpq(rdx, rax);
5518 __ j(not_equal, &not_same, Label::kNear); 5530 __ j(not_equal, &not_same, Label::kNear);
5519 __ Move(rax, Smi::FromInt(EQUAL)); 5531 __ Move(rax, Smi::FromInt(EQUAL));
5520 Counters* counters = masm->isolate()->counters(); 5532 Counters* counters = masm->isolate()->counters();
5521 __ IncrementCounter(counters->string_compare_native(), 1); 5533 __ IncrementCounter(counters->string_compare_native(), 1);
5522 __ ret(2 * kPointerSize); 5534 __ ret(2 * kPointerSize);
5523 5535
(...skipping 492 matching lines...) Expand 10 before | Expand all | Expand 10 after
6016 6028
6017 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset)); 6029 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
6018 __ decl(scratch); 6030 __ decl(scratch);
6019 __ push(scratch); 6031 __ push(scratch);
6020 6032
6021 // If names of slots in range from 1 to kProbes - 1 for the hash value are 6033 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6022 // not equal to the name and kProbes-th slot is not used (its name is the 6034 // not equal to the name and kProbes-th slot is not used (its name is the
6023 // undefined value), it guarantees the hash table doesn't contain the 6035 // undefined value), it guarantees the hash table doesn't contain the
6024 // property. It's true even if some slots represent deleted properties 6036 // property. It's true even if some slots represent deleted properties
6025 // (their names are the null value). 6037 // (their names are the null value).
6038 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
6039 kPointerSize);
6026 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 6040 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6027 // Compute the masked index: (hash + i + i * i) & mask. 6041 // Compute the masked index: (hash + i + i * i) & mask.
6028 __ movq(scratch, Operand(rsp, 2 * kPointerSize)); 6042 __ movq(scratch, args.GetArgumentOperand(1));
6029 if (i > 0) { 6043 if (i > 0) {
6030 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 6044 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
6031 } 6045 }
6032 __ and_(scratch, Operand(rsp, 0)); 6046 __ and_(scratch, Operand(rsp, 0));
6033 6047
6034 // Scale the index by multiplying by the entry size. 6048 // Scale the index by multiplying by the entry size.
6035 ASSERT(NameDictionary::kEntrySize == 3); 6049 ASSERT(NameDictionary::kEntrySize == 3);
6036 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. 6050 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6037 6051
6038 // Having undefined at this place means the name is not contained. 6052 // Having undefined at this place means the name is not contained.
6039 __ movq(scratch, Operand(dictionary_, 6053 __ movq(scratch, Operand(dictionary_,
6040 index_, 6054 index_,
6041 times_pointer_size, 6055 times_pointer_size,
6042 kElementsStartOffset - kHeapObjectTag)); 6056 kElementsStartOffset - kHeapObjectTag));
6043 6057
6044 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); 6058 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
6045 __ j(equal, &not_in_dictionary); 6059 __ j(equal, &not_in_dictionary);
6046 6060
6047 // Stop if found the property. 6061 // Stop if found the property.
6048 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); 6062 __ cmpq(scratch, args.GetArgumentOperand(0));
6049 __ j(equal, &in_dictionary); 6063 __ j(equal, &in_dictionary);
6050 6064
6051 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 6065 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6052 // If we hit a key that is not a unique name during negative 6066 // If we hit a key that is not a unique name during negative
6053 // lookup we have to bailout as this key might be equal to the 6067 // lookup we have to bailout as this key might be equal to the
6054 // key we are looking for. 6068 // key we are looking for.
6055 6069
6056 // Check if the entry name is not a unique name. 6070 // Check if the entry name is not a unique name.
6057 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 6071 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6058 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), 6072 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
6390 // clobbers rbx, rdx, rdi 6404 // clobbers rbx, rdx, rdi
6391 // ----------------------------------- 6405 // -----------------------------------
6392 6406
6393 Label element_done; 6407 Label element_done;
6394 Label double_elements; 6408 Label double_elements;
6395 Label smi_element; 6409 Label smi_element;
6396 Label slow_elements; 6410 Label slow_elements;
6397 Label fast_elements; 6411 Label fast_elements;
6398 6412
6399 // Get array literal index, array literal and its map. 6413 // Get array literal index, array literal and its map.
6400 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 6414 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
6401 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 6415 __ movq(rdx, args.GetArgumentOperand(1));
6416 __ movq(rbx, args.GetArgumentOperand(0));
6402 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); 6417 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
6403 6418
6404 __ CheckFastElements(rdi, &double_elements); 6419 __ CheckFastElements(rdi, &double_elements);
6405 6420
6406 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS 6421 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
6407 __ JumpIfSmi(rax, &smi_element); 6422 __ JumpIfSmi(rax, &smi_element);
6408 __ CheckFastSmiElements(rdi, &fast_elements); 6423 __ CheckFastSmiElements(rdi, &fast_elements);
6409 6424
6410 // Store into the array literal requires a elements transition. Call into 6425 // Store into the array literal requires a elements transition. Call into
6411 // the runtime. 6426 // the runtime.
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
6560 Handle<Object> undefined_sentinel( 6575 Handle<Object> undefined_sentinel(
6561 masm->isolate()->heap()->undefined_value(), 6576 masm->isolate()->heap()->undefined_value(),
6562 masm->isolate()); 6577 masm->isolate());
6563 6578
6564 // is the low bit set? If so, we are holey and that is good. 6579 // is the low bit set? If so, we are holey and that is good.
6565 __ testb(rdx, Immediate(1)); 6580 __ testb(rdx, Immediate(1));
6566 Label normal_sequence; 6581 Label normal_sequence;
6567 __ j(not_zero, &normal_sequence); 6582 __ j(not_zero, &normal_sequence);
6568 6583
6569 // look at the first argument 6584 // look at the first argument
6570 __ movq(rcx, Operand(rsp, kPointerSize)); 6585 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
6586 __ movq(rcx, args.GetArgumentOperand(0));
6571 __ testq(rcx, rcx); 6587 __ testq(rcx, rcx);
6572 __ j(zero, &normal_sequence); 6588 __ j(zero, &normal_sequence);
6573 6589
6574 // We are going to create a holey array, but our kind is non-holey. 6590 // We are going to create a holey array, but our kind is non-holey.
6575 // Fix kind and retry (only if we have an allocation site in the cell). 6591 // Fix kind and retry (only if we have an allocation site in the cell).
6576 __ incl(rdx); 6592 __ incl(rdx);
6577 __ Cmp(rbx, undefined_sentinel); 6593 __ Cmp(rbx, undefined_sentinel);
6578 __ j(equal, &normal_sequence); 6594 __ j(equal, &normal_sequence);
6579 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); 6595 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
6580 Handle<Map> allocation_site_map( 6596 Handle<Map> allocation_site_map(
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
6739 InternalArrayNoArgumentConstructorStub stub0(kind); 6755 InternalArrayNoArgumentConstructorStub stub0(kind);
6740 __ TailCallStub(&stub0); 6756 __ TailCallStub(&stub0);
6741 6757
6742 __ bind(&not_zero_case); 6758 __ bind(&not_zero_case);
6743 __ cmpl(rax, Immediate(1)); 6759 __ cmpl(rax, Immediate(1));
6744 __ j(greater, &not_one_case); 6760 __ j(greater, &not_one_case);
6745 6761
6746 if (IsFastPackedElementsKind(kind)) { 6762 if (IsFastPackedElementsKind(kind)) {
6747 // We might need to create a holey array 6763 // We might need to create a holey array
6748 // look at the first argument 6764 // look at the first argument
6749 __ movq(rcx, Operand(rsp, kPointerSize)); 6765 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
6766 __ movq(rcx, args.GetArgumentOperand(0));
6750 __ testq(rcx, rcx); 6767 __ testq(rcx, rcx);
6751 __ j(zero, &normal_sequence); 6768 __ j(zero, &normal_sequence);
6752 6769
6753 InternalArraySingleArgumentConstructorStub 6770 InternalArraySingleArgumentConstructorStub
6754 stub1_holey(GetHoleyElementsKind(kind)); 6771 stub1_holey(GetHoleyElementsKind(kind));
6755 __ TailCallStub(&stub1_holey); 6772 __ TailCallStub(&stub1_holey);
6756 } 6773 }
6757 6774
6758 __ bind(&normal_sequence); 6775 __ bind(&normal_sequence);
6759 InternalArraySingleArgumentConstructorStub stub1(kind); 6776 InternalArraySingleArgumentConstructorStub stub1(kind);
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
6816 __ bind(&fast_elements_case); 6833 __ bind(&fast_elements_case);
6817 GenerateCase(masm, FAST_ELEMENTS); 6834 GenerateCase(masm, FAST_ELEMENTS);
6818 } 6835 }
6819 6836
6820 6837
6821 #undef __ 6838 #undef __
6822 6839
6823 } } // namespace v8::internal 6840 } } // namespace v8::internal
6824 6841
6825 #endif // V8_TARGET_ARCH_X64 6842 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698