Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(223)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 21123008: Introduce StackArgumentsAccessor class for X64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed danno's comments Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
310 // Create a new closure from the given function info in new 310 // Create a new closure from the given function info in new
311 // space. Set the context to the current context in rsi. 311 // space. Set the context to the current context in rsi.
312 Counters* counters = masm->isolate()->counters(); 312 Counters* counters = masm->isolate()->counters();
313 313
314 Label gc; 314 Label gc;
315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); 315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
316 316
317 __ IncrementCounter(counters->fast_new_closure_total(), 1); 317 __ IncrementCounter(counters->fast_new_closure_total(), 1);
318 318
319 // Get the function info from the stack. 319 // Get the function info from the stack.
320 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 320 __ movq(rdx, StackOperandForArgument(0, 1));
321 321
322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); 322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_);
323 323
324 // Compute the function map in the current native context and set that 324 // Compute the function map in the current native context and set that
325 // as the map of the allocated object. 325 // as the map of the allocated object.
326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); 328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index)));
329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); 329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx);
330 330
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
418 rcx, 418 rcx,
419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), 419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
420 rdx, 420 rdx,
421 rbx, 421 rbx,
422 kDontSaveFPRegs); 422 kDontSaveFPRegs);
423 423
424 // Return and remove the on-stack parameter. 424 // Return and remove the on-stack parameter.
425 __ ret(1 * kPointerSize); 425 __ ret(1 * kPointerSize);
426 426
427 __ bind(&restore); 427 __ bind(&restore);
428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 428 __ movq(rdx, StackOperandForArgument(0, 1));
429 __ jmp(&install_unoptimized); 429 __ jmp(&install_unoptimized);
430 430
431 // Create a new closure through the slower runtime call. 431 // Create a new closure through the slower runtime call.
432 __ bind(&gc); 432 __ bind(&gc);
433 __ pop(rcx); // Temporarily remove return address. 433 __ pop(rcx); // Temporarily remove return address.
434 __ pop(rdx); 434 __ pop(rdx);
435 __ push(rsi); 435 __ push(rsi);
436 __ push(rdx); 436 __ push(rdx);
437 __ PushRoot(Heap::kFalseValueRootIndex); 437 __ PushRoot(Heap::kFalseValueRootIndex);
438 __ push(rcx); // Restore return address. 438 __ push(rcx); // Restore return address.
439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
440 } 440 }
441 441
442 442
443 void FastNewContextStub::Generate(MacroAssembler* masm) { 443 void FastNewContextStub::Generate(MacroAssembler* masm) {
444 // Try to allocate the context in new space. 444 // Try to allocate the context in new space.
445 Label gc; 445 Label gc;
446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
448 rax, rbx, rcx, &gc, TAG_OBJECT); 448 rax, rbx, rcx, &gc, TAG_OBJECT);
449 449
450 // Get the function from the stack. 450 // Get the function from the stack.
451 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 451 __ movq(rcx, StackOperandForArgument(0, 1));
452 452
453 // Set up the object header. 453 // Set up the object header.
454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); 454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
457 457
458 // Set up the fixed slots. 458 // Set up the fixed slots.
459 __ Set(rbx, 0); // Set to NULL. 459 __ Set(rbx, 0); // Set to NULL.
460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); 460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); 461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
(...skipping 25 matching lines...) Expand all
487 // [rsp + (1 * kPointerSize)] : function 487 // [rsp + (1 * kPointerSize)] : function
488 // [rsp + (2 * kPointerSize)] : serialized scope info 488 // [rsp + (2 * kPointerSize)] : serialized scope info
489 489
490 // Try to allocate the context in new space. 490 // Try to allocate the context in new space.
491 Label gc; 491 Label gc;
492 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 492 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
493 __ Allocate(FixedArray::SizeFor(length), 493 __ Allocate(FixedArray::SizeFor(length),
494 rax, rbx, rcx, &gc, TAG_OBJECT); 494 rax, rbx, rcx, &gc, TAG_OBJECT);
495 495
496 // Get the function from the stack. 496 // Get the function from the stack.
497 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 497 __ movq(rcx, StackOperandForArgument(1, 2));
498 498
499 // Get the serialized scope info from the stack. 499 // Get the serialized scope info from the stack.
500 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 500 __ movq(rbx, StackOperandForArgument(0, 2));
501 501
502 // Set up the object header. 502 // Set up the object header.
503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); 503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
506 506
507 // If this block context is nested in the native context we get a smi 507 // If this block context is nested in the native context we get a smi
508 // sentinel instead of a function. The block context should get the 508 // sentinel instead of a function. The block context should get the
509 // canonical empty function of the native context as its closure which 509 // canonical empty function of the native context as its closure which
510 // we still have to look up. 510 // we still have to look up.
(...skipping 758 matching lines...) Expand 10 before | Expand all | Expand 10 after
1269 // Output: 1269 // Output:
1270 // xmm1 : untagged double result. 1270 // xmm1 : untagged double result.
1271 1271
1272 Label runtime_call; 1272 Label runtime_call;
1273 Label runtime_call_clear_stack; 1273 Label runtime_call_clear_stack;
1274 Label skip_cache; 1274 Label skip_cache;
1275 const bool tagged = (argument_type_ == TAGGED); 1275 const bool tagged = (argument_type_ == TAGGED);
1276 if (tagged) { 1276 if (tagged) {
1277 Label input_not_smi, loaded; 1277 Label input_not_smi, loaded;
1278 // Test that rax is a number. 1278 // Test that rax is a number.
1279 __ movq(rax, Operand(rsp, kPointerSize)); 1279 __ movq(rax, StackOperandForArgument(0, 1));
1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); 1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
1281 // Input is a smi. Untag and load it onto the FPU stack. 1281 // Input is a smi. Untag and load it onto the FPU stack.
1282 // Then load the bits of the double into rbx. 1282 // Then load the bits of the double into rbx.
1283 __ SmiToInteger32(rax, rax); 1283 __ SmiToInteger32(rax, rax);
1284 __ subq(rsp, Immediate(kDoubleSize)); 1284 __ subq(rsp, Immediate(kDoubleSize));
1285 __ cvtlsi2sd(xmm1, rax); 1285 __ cvtlsi2sd(xmm1, rax);
1286 __ movsd(Operand(rsp, 0), xmm1); 1286 __ movsd(Operand(rsp, 0), xmm1);
1287 __ movq(rbx, xmm1); 1287 __ movq(rbx, xmm1);
1288 __ movq(rdx, xmm1); 1288 __ movq(rdx, xmm1);
1289 __ fld_d(Operand(rsp, 0)); 1289 __ fld_d(Operand(rsp, 0));
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after
1802 1802
1803 // Save 1 in double_result - we need this several times later on. 1803 // Save 1 in double_result - we need this several times later on.
1804 __ movq(scratch, Immediate(1)); 1804 __ movq(scratch, Immediate(1));
1805 __ cvtlsi2sd(double_result, scratch); 1805 __ cvtlsi2sd(double_result, scratch);
1806 1806
1807 if (exponent_type_ == ON_STACK) { 1807 if (exponent_type_ == ON_STACK) {
1808 Label base_is_smi, unpack_exponent; 1808 Label base_is_smi, unpack_exponent;
1809 // The exponent and base are supplied as arguments on the stack. 1809 // The exponent and base are supplied as arguments on the stack.
1810 // This can only happen if the stub is called from non-optimized code. 1810 // This can only happen if the stub is called from non-optimized code.
1811 // Load input parameters from stack. 1811 // Load input parameters from stack.
1812 __ movq(base, Operand(rsp, 2 * kPointerSize)); 1812 __ movq(base, StackOperandForArgument(0, 2));
1813 __ movq(exponent, Operand(rsp, 1 * kPointerSize)); 1813 __ movq(exponent, StackOperandForArgument(1, 2));
1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), 1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
1816 Heap::kHeapNumberMapRootIndex); 1816 Heap::kHeapNumberMapRootIndex);
1817 __ j(not_equal, &call_runtime); 1817 __ j(not_equal, &call_runtime);
1818 1818
1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
1820 __ jmp(&unpack_exponent, Label::kNear); 1820 __ jmp(&unpack_exponent, Label::kNear);
1821 1821
1822 __ bind(&base_is_smi); 1822 __ bind(&base_is_smi);
1823 __ SmiToInteger32(base, base); 1823 __ SmiToInteger32(base, base);
(...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after
2236 // rsp[0] : return address 2236 // rsp[0] : return address
2237 // rsp[8] : number of parameters (tagged) 2237 // rsp[8] : number of parameters (tagged)
2238 // rsp[16] : receiver displacement 2238 // rsp[16] : receiver displacement
2239 // rsp[24] : function 2239 // rsp[24] : function
2240 // Registers used over the whole function: 2240 // Registers used over the whole function:
2241 // rbx: the mapped parameter count (untagged) 2241 // rbx: the mapped parameter count (untagged)
2242 // rax: the allocated object (tagged). 2242 // rax: the allocated object (tagged).
2243 2243
2244 Factory* factory = masm->isolate()->factory(); 2244 Factory* factory = masm->isolate()->factory();
2245 2245
2246 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); 2246 __ SmiToInteger64(rbx, StackOperandForArgument(2, 3));
2247 // rbx = parameter count (untagged) 2247 // rbx = parameter count (untagged)
2248 2248
2249 // Check if the calling frame is an arguments adaptor frame. 2249 // Check if the calling frame is an arguments adaptor frame.
2250 Label runtime; 2250 Label runtime;
2251 Label adaptor_frame, try_allocate; 2251 Label adaptor_frame, try_allocate;
2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2255 __ j(equal, &adaptor_frame); 2255 __ j(equal, &adaptor_frame);
2256 2256
2257 // No adaptor, parameter count = argument count. 2257 // No adaptor, parameter count = argument count.
2258 __ movq(rcx, rbx); 2258 __ movq(rcx, rbx);
2259 __ jmp(&try_allocate, Label::kNear); 2259 __ jmp(&try_allocate, Label::kNear);
2260 2260
2261 // We have an adaptor frame. Patch the parameters pointer. 2261 // We have an adaptor frame. Patch the parameters pointer.
2262 __ bind(&adaptor_frame); 2262 __ bind(&adaptor_frame);
2263 __ SmiToInteger64(rcx, 2263 __ SmiToInteger64(rcx,
2264 Operand(rdx, 2264 Operand(rdx,
2265 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2265 ArgumentsAdaptorFrameConstants::kLengthOffset));
2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2267 StandardFrameConstants::kCallerSPOffset)); 2267 StandardFrameConstants::kCallerSPOffset));
2268 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2268 __ movq(StackOperandForArgument(1, 3), rdx);
2269 2269
2270 // rbx = parameter count (untagged) 2270 // rbx = parameter count (untagged)
2271 // rcx = argument count (untagged) 2271 // rcx = argument count (untagged)
2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx. 2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
2273 __ cmpq(rbx, rcx); 2273 __ cmpq(rbx, rcx);
2274 __ j(less_equal, &try_allocate, Label::kNear); 2274 __ j(less_equal, &try_allocate, Label::kNear);
2275 __ movq(rbx, rcx); 2275 __ movq(rbx, rcx);
2276 2276
2277 __ bind(&try_allocate); 2277 __ bind(&try_allocate);
2278 2278
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2319 // rcx = argument count (untagged) 2319 // rcx = argument count (untagged)
2320 // rdi = address of boilerplate object (tagged) 2320 // rdi = address of boilerplate object (tagged)
2321 // Copy the JS object part. 2321 // Copy the JS object part.
2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2323 __ movq(rdx, FieldOperand(rdi, i)); 2323 __ movq(rdx, FieldOperand(rdi, i));
2324 __ movq(FieldOperand(rax, i), rdx); 2324 __ movq(FieldOperand(rax, i), rdx);
2325 } 2325 }
2326 2326
2327 // Set up the callee in-object property. 2327 // Set up the callee in-object property.
2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); 2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2329 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); 2329 __ movq(rdx, StackOperandForArgument(0, 3));
2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2331 Heap::kArgumentsCalleeIndex * kPointerSize), 2331 Heap::kArgumentsCalleeIndex * kPointerSize),
2332 rdx); 2332 rdx);
2333 2333
2334 // Use the length (smi tagged) and set that as an in-object property too. 2334 // Use the length (smi tagged) and set that as an in-object property too.
2335 // Note: rcx is tagged from here on. 2335 // Note: rcx is tagged from here on.
2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2337 __ Integer32ToSmi(rcx, rcx); 2337 __ Integer32ToSmi(rcx, rcx);
2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2339 Heap::kArgumentsLengthIndex * kPointerSize), 2339 Heap::kArgumentsLengthIndex * kPointerSize),
(...skipping 30 matching lines...) Expand all
2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
2371 // The mapped parameter thus need to get indices 2371 // The mapped parameter thus need to get indices
2372 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 2372 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
2374 // We loop from right to left. 2374 // We loop from right to left.
2375 Label parameters_loop, parameters_test; 2375 Label parameters_loop, parameters_test;
2376 2376
2377 // Load tagged parameter count into r9. 2377 // Load tagged parameter count into r9.
2378 __ Integer32ToSmi(r9, rbx); 2378 __ Integer32ToSmi(r9, rbx);
2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); 2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
2380 __ addq(r8, Operand(rsp, 1 * kPointerSize)); 2380 __ addq(r8, StackOperandForArgument(2, 3));
2381 __ subq(r8, r9); 2381 __ subq(r8, r9);
2382 __ Move(r11, factory->the_hole_value()); 2382 __ Move(r11, factory->the_hole_value());
2383 __ movq(rdx, rdi); 2383 __ movq(rdx, rdi);
2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
2385 // r9 = loop variable (tagged) 2385 // r9 = loop variable (tagged)
2386 // r8 = mapping index (tagged) 2386 // r8 = mapping index (tagged)
2387 // r11 = the hole value 2387 // r11 = the hole value
2388 // rdx = address of parameter map (tagged) 2388 // rdx = address of parameter map (tagged)
2389 // rdi = address of backing store (tagged) 2389 // rdi = address of backing store (tagged)
2390 __ jmp(&parameters_test, Label::kNear); 2390 __ jmp(&parameters_test, Label::kNear);
(...skipping 18 matching lines...) Expand all
2409 2409
2410 // rcx = argument count (tagged) 2410 // rcx = argument count (tagged)
2411 // rdi = address of backing store (tagged) 2411 // rdi = address of backing store (tagged)
2412 // Copy arguments header and remaining slots (if there are any). 2412 // Copy arguments header and remaining slots (if there are any).
2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), 2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
2414 factory->fixed_array_map()); 2414 factory->fixed_array_map());
2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2416 2416
2417 Label arguments_loop, arguments_test; 2417 Label arguments_loop, arguments_test;
2418 __ movq(r8, rbx); 2418 __ movq(r8, rbx);
2419 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2419 __ movq(rdx, StackOperandForArgument(1, 3));
2420 // Untag rcx for the loop below. 2420 // Untag rcx for the loop below.
2421 __ SmiToInteger64(rcx, rcx); 2421 __ SmiToInteger64(rcx, rcx);
2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); 2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
2423 __ subq(rdx, kScratchRegister); 2423 __ subq(rdx, kScratchRegister);
2424 __ jmp(&arguments_test, Label::kNear); 2424 __ jmp(&arguments_test, Label::kNear);
2425 2425
2426 __ bind(&arguments_loop); 2426 __ bind(&arguments_loop);
2427 __ subq(rdx, Immediate(kPointerSize)); 2427 __ subq(rdx, Immediate(kPointerSize));
2428 __ movq(r9, Operand(rdx, 0)); 2428 __ movq(r9, Operand(rdx, 0));
2429 __ movq(FieldOperand(rdi, r8, 2429 __ movq(FieldOperand(rdi, r8,
2430 times_pointer_size, 2430 times_pointer_size,
2431 FixedArray::kHeaderSize), 2431 FixedArray::kHeaderSize),
2432 r9); 2432 r9);
2433 __ addq(r8, Immediate(1)); 2433 __ addq(r8, Immediate(1));
2434 2434
2435 __ bind(&arguments_test); 2435 __ bind(&arguments_test);
2436 __ cmpq(r8, rcx); 2436 __ cmpq(r8, rcx);
2437 __ j(less, &arguments_loop, Label::kNear); 2437 __ j(less, &arguments_loop, Label::kNear);
2438 2438
2439 // Return and remove the on-stack parameters. 2439 // Return and remove the on-stack parameters.
2440 __ ret(3 * kPointerSize); 2440 __ ret(3 * kPointerSize);
2441 2441
2442 // Do the runtime call to allocate the arguments object. 2442 // Do the runtime call to allocate the arguments object.
2443 // rcx = argument count (untagged) 2443 // rcx = argument count (untagged)
2444 __ bind(&runtime); 2444 __ bind(&runtime);
2445 __ Integer32ToSmi(rcx, rcx); 2445 __ Integer32ToSmi(rcx, rcx);
2446 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. 2446 __ movq(StackOperandForArgument(2, 3), rcx); // Patch argument count.
2447 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2447 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2448 } 2448 }
2449 2449
2450 2450
2451 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { 2451 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2452 // rsp[0] : return address 2452 // rsp[0] : return address
2453 // rsp[8] : number of parameters 2453 // rsp[8] : number of parameters
2454 // rsp[16] : receiver displacement 2454 // rsp[16] : receiver displacement
2455 // rsp[24] : function 2455 // rsp[24] : function
2456 2456
2457 // Check if the calling frame is an arguments adaptor frame. 2457 // Check if the calling frame is an arguments adaptor frame.
2458 Label runtime; 2458 Label runtime;
2459 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2459 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2460 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2460 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2461 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2461 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2462 __ j(not_equal, &runtime); 2462 __ j(not_equal, &runtime);
2463 2463
2464 // Patch the arguments.length and the parameters pointer. 2464 // Patch the arguments.length and the parameters pointer.
2465 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2465 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2466 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2466 __ movq(StackOperandForArgument(2, 3), rcx);
2467 __ SmiToInteger64(rcx, rcx); 2467 __ SmiToInteger64(rcx, rcx);
2468 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2468 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2469 StandardFrameConstants::kCallerSPOffset)); 2469 StandardFrameConstants::kCallerSPOffset));
2470 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2470 __ movq(StackOperandForArgument(1, 3), rdx);
2471 2471
2472 __ bind(&runtime); 2472 __ bind(&runtime);
2473 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); 2473 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2474 } 2474 }
2475 2475
2476 2476
2477 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 2477 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
2478 // rsp[0] : return address 2478 // rsp[0] : return address
2479 // rsp[8] : number of parameters 2479 // rsp[8] : number of parameters
2480 // rsp[16] : receiver displacement 2480 // rsp[16] : receiver displacement
2481 // rsp[24] : function 2481 // rsp[24] : function
2482 2482
2483 // Check if the calling frame is an arguments adaptor frame. 2483 // Check if the calling frame is an arguments adaptor frame.
2484 Label adaptor_frame, try_allocate, runtime; 2484 Label adaptor_frame, try_allocate, runtime;
2485 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2485 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2486 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2486 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2487 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2487 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2488 __ j(equal, &adaptor_frame); 2488 __ j(equal, &adaptor_frame);
2489 2489
2490 // Get the length from the frame. 2490 // Get the length from the frame.
2491 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2491 __ movq(rcx, StackOperandForArgument(2, 3));
2492 __ SmiToInteger64(rcx, rcx); 2492 __ SmiToInteger64(rcx, rcx);
2493 __ jmp(&try_allocate); 2493 __ jmp(&try_allocate);
2494 2494
2495 // Patch the arguments.length and the parameters pointer. 2495 // Patch the arguments.length and the parameters pointer.
2496 __ bind(&adaptor_frame); 2496 __ bind(&adaptor_frame);
2497 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2497 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2498 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2498 __ movq(StackOperandForArgument(2, 3), rcx);
2499 __ SmiToInteger64(rcx, rcx); 2499 __ SmiToInteger64(rcx, rcx);
2500 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, 2500 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
2501 StandardFrameConstants::kCallerSPOffset)); 2501 StandardFrameConstants::kCallerSPOffset));
2502 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2502 __ movq(StackOperandForArgument(1, 3), rdx);
2503 2503
2504 // Try the new space allocation. Start out with computing the size of 2504 // Try the new space allocation. Start out with computing the size of
2505 // the arguments object and the elements array. 2505 // the arguments object and the elements array.
2506 Label add_arguments_object; 2506 Label add_arguments_object;
2507 __ bind(&try_allocate); 2507 __ bind(&try_allocate);
2508 __ testq(rcx, rcx); 2508 __ testq(rcx, rcx);
2509 __ j(zero, &add_arguments_object, Label::kNear); 2509 __ j(zero, &add_arguments_object, Label::kNear);
2510 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 2510 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2511 __ bind(&add_arguments_object); 2511 __ bind(&add_arguments_object);
2512 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); 2512 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
2513 2513
2514 // Do the allocation of both objects in one go. 2514 // Do the allocation of both objects in one go.
2515 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 2515 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2516 2516
2517 // Get the arguments boilerplate from the current native context. 2517 // Get the arguments boilerplate from the current native context.
2518 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 2518 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2519 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 2519 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
2520 const int offset = 2520 const int offset =
2521 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); 2521 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
2522 __ movq(rdi, Operand(rdi, offset)); 2522 __ movq(rdi, Operand(rdi, offset));
2523 2523
2524 // Copy the JS object part. 2524 // Copy the JS object part.
2525 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 2525 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2526 __ movq(rbx, FieldOperand(rdi, i)); 2526 __ movq(rbx, FieldOperand(rdi, i));
2527 __ movq(FieldOperand(rax, i), rbx); 2527 __ movq(FieldOperand(rax, i), rbx);
2528 } 2528 }
2529 2529
2530 // Get the length (smi tagged) and set that as an in-object property too. 2530 // Get the length (smi tagged) and set that as an in-object property too.
2531 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 2531 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2532 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); 2532 __ movq(rcx, StackOperandForArgument(2, 3));
2533 __ movq(FieldOperand(rax, JSObject::kHeaderSize + 2533 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2534 Heap::kArgumentsLengthIndex * kPointerSize), 2534 Heap::kArgumentsLengthIndex * kPointerSize),
2535 rcx); 2535 rcx);
2536 2536
2537 // If there are no actual arguments, we're done. 2537 // If there are no actual arguments, we're done.
2538 Label done; 2538 Label done;
2539 __ testq(rcx, rcx); 2539 __ testq(rcx, rcx);
2540 __ j(zero, &done); 2540 __ j(zero, &done);
2541 2541
2542 // Get the parameters pointer from the stack. 2542 // Get the parameters pointer from the stack.
2543 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2543 __ movq(rdx, StackOperandForArgument(1, 3));
2544 2544
2545 // Set up the elements pointer in the allocated arguments object and 2545 // Set up the elements pointer in the allocated arguments object and
2546 // initialize the header in the elements fixed array. 2546 // initialize the header in the elements fixed array.
2547 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); 2547 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
2548 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); 2548 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2549 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 2549 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2550 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 2550 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2551 2551
2552 2552
2553 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 2553 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after
3016 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); 3016 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
3017 __ jmp(&check_underlying); 3017 __ jmp(&check_underlying);
3018 #endif // V8_INTERPRETED_REGEXP 3018 #endif // V8_INTERPRETED_REGEXP
3019 } 3019 }
3020 3020
3021 3021
3022 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { 3022 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3023 const int kMaxInlineLength = 100; 3023 const int kMaxInlineLength = 100;
3024 Label slowcase; 3024 Label slowcase;
3025 Label done; 3025 Label done;
3026 __ movq(r8, Operand(rsp, kPointerSize * 3)); 3026 __ movq(r8, StackOperandForArgument(0, 3));
3027 __ JumpIfNotSmi(r8, &slowcase); 3027 __ JumpIfNotSmi(r8, &slowcase);
3028 __ SmiToInteger32(rbx, r8); 3028 __ SmiToInteger32(rbx, r8);
3029 __ cmpl(rbx, Immediate(kMaxInlineLength)); 3029 __ cmpl(rbx, Immediate(kMaxInlineLength));
3030 __ j(above, &slowcase); 3030 __ j(above, &slowcase);
3031 // Smi-tagging is equivalent to multiplying by 2. 3031 // Smi-tagging is equivalent to multiplying by 2.
3032 STATIC_ASSERT(kSmiTag == 0); 3032 STATIC_ASSERT(kSmiTag == 0);
3033 STATIC_ASSERT(kSmiTagSize == 1); 3033 STATIC_ASSERT(kSmiTagSize == 1);
3034 // Allocate RegExpResult followed by FixedArray with size in rbx. 3034 // Allocate RegExpResult followed by FixedArray with size in rbx.
3035 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] 3035 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3036 // Elements: [Map][Length][..elements..] 3036 // Elements: [Map][Length][..elements..]
(...skipping 17 matching lines...) Expand all
3054 3054
3055 // Set empty properties FixedArray. 3055 // Set empty properties FixedArray.
3056 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); 3056 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
3057 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); 3057 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
3058 3058
3059 // Set elements to point to FixedArray allocated right after the JSArray. 3059 // Set elements to point to FixedArray allocated right after the JSArray.
3060 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); 3060 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
3061 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 3061 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
3062 3062
3063 // Set input, index and length fields from arguments. 3063 // Set input, index and length fields from arguments.
3064 __ movq(r8, Operand(rsp, kPointerSize * 1)); 3064 __ movq(r8, StackOperandForArgument(2, 3));
3065 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); 3065 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
3066 __ movq(r8, Operand(rsp, kPointerSize * 2)); 3066 __ movq(r8, StackOperandForArgument(1, 3));
3067 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); 3067 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
3068 __ movq(r8, Operand(rsp, kPointerSize * 3)); 3068 __ movq(r8, StackOperandForArgument(0, 3));
3069 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); 3069 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
3070 3070
3071 // Fill out the elements FixedArray. 3071 // Fill out the elements FixedArray.
3072 // rax: JSArray. 3072 // rax: JSArray.
3073 // rcx: FixedArray. 3073 // rcx: FixedArray.
3074 // rbx: Number of elements in array as int32. 3074 // rbx: Number of elements in array as int32.
3075 3075
3076 // Set map. 3076 // Set map.
3077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 3077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
3078 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); 3078 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
3189 // but times_twice_pointer_size (multiplication by 16) scale factor 3189 // but times_twice_pointer_size (multiplication by 16) scale factor
3190 // is not supported by addrmode on x64 platform. 3190 // is not supported by addrmode on x64 platform.
3191 // So we have to premultiply entry index before lookup. 3191 // So we have to premultiply entry index before lookup.
3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); 3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
3193 } 3193 }
3194 3194
3195 3195
3196 void NumberToStringStub::Generate(MacroAssembler* masm) { 3196 void NumberToStringStub::Generate(MacroAssembler* masm) {
3197 Label runtime; 3197 Label runtime;
3198 3198
3199 __ movq(rbx, Operand(rsp, kPointerSize)); 3199 __ movq(rbx, StackOperandForArgument(0, 1));
3200 3200
3201 // Generate code to lookup number in the number string cache. 3201 // Generate code to lookup number in the number string cache.
3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); 3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime);
3203 __ ret(1 * kPointerSize); 3203 __ ret(1 * kPointerSize);
3204 3204
3205 __ bind(&runtime); 3205 __ bind(&runtime);
3206 // Handle number to string in the runtime system if not found in the cache. 3206 // Handle number to string in the runtime system if not found in the cache.
3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3208 } 3208 }
3209 3209
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after
3608 // rdi : the function to call 3608 // rdi : the function to call
3609 Isolate* isolate = masm->isolate(); 3609 Isolate* isolate = masm->isolate();
3610 Label slow, non_function; 3610 Label slow, non_function;
3611 3611
3612 // The receiver might implicitly be the global object. This is 3612 // The receiver might implicitly be the global object. This is
3613 // indicated by passing the hole as the receiver to the call 3613 // indicated by passing the hole as the receiver to the call
3614 // function stub. 3614 // function stub.
3615 if (ReceiverMightBeImplicit()) { 3615 if (ReceiverMightBeImplicit()) {
3616 Label call; 3616 Label call;
3617 // Get the receiver from the stack. 3617 // Get the receiver from the stack.
3618 // +1 ~ return address 3618 __ movq(rax, StackOperandForReceiver(argc_));
3619 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
3620 // Call as function is indicated with the hole. 3619 // Call as function is indicated with the hole.
3621 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 3620 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
3622 __ j(not_equal, &call, Label::kNear); 3621 __ j(not_equal, &call, Label::kNear);
3623 // Patch the receiver on the stack with the global receiver object. 3622 // Patch the receiver on the stack with the global receiver object.
3624 __ movq(rcx, GlobalObjectOperand()); 3623 __ movq(rcx, GlobalObjectOperand());
3625 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); 3624 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
3626 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx); 3625 __ movq(StackOperandForReceiver(argc_), rcx);
3627 __ bind(&call); 3626 __ bind(&call);
3628 } 3627 }
3629 3628
3630 // Check that the function really is a JavaScript function. 3629 // Check that the function really is a JavaScript function.
3631 __ JumpIfSmi(rdi, &non_function); 3630 __ JumpIfSmi(rdi, &non_function);
3632 // Goto slow case if we do not have a function. 3631 // Goto slow case if we do not have a function.
3633 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 3632 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3634 __ j(not_equal, &slow); 3633 __ j(not_equal, &slow);
3635 3634
3636 if (RecordCallTarget()) { 3635 if (RecordCallTarget()) {
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 3677 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
3679 { 3678 {
3680 Handle<Code> adaptor = 3679 Handle<Code> adaptor =
3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3680 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3682 __ jmp(adaptor, RelocInfo::CODE_TARGET); 3681 __ jmp(adaptor, RelocInfo::CODE_TARGET);
3683 } 3682 }
3684 3683
3685 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 3684 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3686 // of the original receiver from the call site). 3685 // of the original receiver from the call site).
3687 __ bind(&non_function); 3686 __ bind(&non_function);
3688 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); 3687 __ movq(StackOperandForReceiver(argc_), rdi);
3689 __ Set(rax, argc_); 3688 __ Set(rax, argc_);
3690 __ Set(rbx, 0); 3689 __ Set(rbx, 0);
3691 __ SetCallKind(rcx, CALL_AS_METHOD); 3690 __ SetCallKind(rcx, CALL_AS_METHOD);
3692 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 3691 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
3693 Handle<Code> adaptor = 3692 Handle<Code> adaptor =
3694 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline(); 3693 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3695 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3694 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3696 } 3695 }
3697 3696
3698 3697
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after
4213 // Move(kScratchRegister, Factory::the_hole_value()) 4212 // Move(kScratchRegister, Factory::the_hole_value())
4214 // in front of the hole value address. 4213 // in front of the hole value address.
4215 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78; 4214 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
4216 // The last 4 bytes of the instruction sequence 4215 // The last 4 bytes of the instruction sequence
4217 // __ j(not_equal, &cache_miss); 4216 // __ j(not_equal, &cache_miss);
4218 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 4217 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
4219 // before the offset of the hole value in the root array. 4218 // before the offset of the hole value in the root array.
4220 static const unsigned int kWordBeforeResultValue = 0x458B4909; 4219 static const unsigned int kWordBeforeResultValue = 0x458B4909;
4221 // Only the inline check flag is supported on X64. 4220 // Only the inline check flag is supported on X64.
4222 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); 4221 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
4223 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0; 4222 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
4224 4223
4225 // Get the object - go slow case if it's a smi. 4224 // Get the object - go slow case if it's a smi.
4226 Label slow; 4225 Label slow;
4227 4226
4228 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); 4227 __ movq(rax, StackOperandForReversedArgument(1 + extra_argument_offset));
4229 __ JumpIfSmi(rax, &slow); 4228 __ JumpIfSmi(rax, &slow);
4230 4229
4231 // Check that the left hand is a JS object. Leave its map in rax. 4230 // Check that the left hand is a JS object. Leave its map in rax.
4232 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); 4231 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
4233 __ j(below, &slow); 4232 __ j(below, &slow);
4234 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); 4233 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
4235 __ j(above, &slow); 4234 __ j(above, &slow);
4236 4235
4237 // Get the prototype of the function. 4236 // Get the prototype of the function.
4238 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); 4237 __ movq(rdx, StackOperandForReversedArgument(extra_argument_offset));
4239 // rdx is function, rax is map. 4238 // rdx is function, rax is map.
4240 4239
4241 // If there is a call site cache don't look in the global cache, but do the 4240 // If there is a call site cache don't look in the global cache, but do the
4242 // real lookup and update the call site cache. 4241 // real lookup and update the call site cache.
4243 if (!HasCallSiteInlineCheck()) { 4242 if (!HasCallSiteInlineCheck()) {
4244 // Look up the function and the map in the instanceof cache. 4243 // Look up the function and the map in the instanceof cache.
4245 Label miss; 4244 Label miss;
4246 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4245 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4247 __ j(not_equal, &miss, Label::kNear); 4246 __ j(not_equal, &miss, Label::kNear);
4248 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4247 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
(...skipping 14 matching lines...) Expand all
4263 4262
4264 // Register mapping: 4263 // Register mapping:
4265 // rax is object map. 4264 // rax is object map.
4266 // rdx is function. 4265 // rdx is function.
4267 // rbx is function prototype. 4266 // rbx is function prototype.
4268 if (!HasCallSiteInlineCheck()) { 4267 if (!HasCallSiteInlineCheck()) {
4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4268 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4269 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
4271 } else { 4270 } else {
4272 // Get return address and delta to inlined map check. 4271 // Get return address and delta to inlined map check.
4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4272 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
4274 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4273 __ subq(kScratchRegister, StackOperandForReversedArgument(0));
4275 if (FLAG_debug_code) { 4274 if (FLAG_debug_code) {
4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 4275 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 4276 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 4277 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
4279 } 4278 }
4280 __ movq(kScratchRegister, 4279 __ movq(kScratchRegister,
4281 Operand(kScratchRegister, kOffsetToMapCheckValue)); 4280 Operand(kScratchRegister, kOffsetToMapCheckValue));
4282 __ movq(Operand(kScratchRegister, 0), rax); 4281 __ movq(Operand(kScratchRegister, 0), rax);
4283 } 4282 }
4284 4283
(...skipping 19 matching lines...) Expand all
4304 // Store bitwise zero in the cache. This is a Smi in GC terms. 4303 // Store bitwise zero in the cache. This is a Smi in GC terms.
4305 STATIC_ASSERT(kSmiTag == 0); 4304 STATIC_ASSERT(kSmiTag == 0);
4306 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 4305 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
4307 } else { 4306 } else {
4308 // Store offset of true in the root array at the inline check site. 4307 // Store offset of true in the root array at the inline check site.
4309 int true_offset = 0x100 + 4308 int true_offset = 0x100 +
4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4309 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4311 // Assert it is a 1-byte signed value. 4310 // Assert it is a 1-byte signed value.
4312 ASSERT(true_offset >= 0 && true_offset < 0x100); 4311 ASSERT(true_offset >= 0 && true_offset < 0x100);
4313 __ movl(rax, Immediate(true_offset)); 4312 __ movl(rax, Immediate(true_offset));
4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4313 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
4315 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4314 __ subq(kScratchRegister, StackOperandForReversedArgument(0));
4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4315 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4317 if (FLAG_debug_code) { 4316 if (FLAG_debug_code) {
4318 __ movl(rax, Immediate(kWordBeforeResultValue)); 4317 __ movl(rax, Immediate(kWordBeforeResultValue));
4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4318 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 4319 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
4321 } 4320 }
4322 __ Set(rax, 0); 4321 __ Set(rax, 0);
4323 } 4322 }
4324 __ ret(2 * kPointerSize + extra_stack_space); 4323 __ ret((2 + extra_argument_offset) * kPointerSize);
4325 4324
4326 __ bind(&is_not_instance); 4325 __ bind(&is_not_instance);
4327 if (!HasCallSiteInlineCheck()) { 4326 if (!HasCallSiteInlineCheck()) {
4328 // We have to store a non-zero value in the cache. 4327 // We have to store a non-zero value in the cache.
4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 4328 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
4330 } else { 4329 } else {
4331 // Store offset of false in the root array at the inline check site. 4330 // Store offset of false in the root array at the inline check site.
4332 int false_offset = 0x100 + 4331 int false_offset = 0x100 +
4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4332 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4334 // Assert it is a 1-byte signed value. 4333 // Assert it is a 1-byte signed value.
4335 ASSERT(false_offset >= 0 && false_offset < 0x100); 4334 ASSERT(false_offset >= 0 && false_offset < 0x100);
4336 __ movl(rax, Immediate(false_offset)); 4335 __ movl(rax, Immediate(false_offset));
4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4336 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
4338 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4337 __ subq(kScratchRegister, StackOperandForReversedArgument(0));
4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4338 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4340 if (FLAG_debug_code) { 4339 if (FLAG_debug_code) {
4341 __ movl(rax, Immediate(kWordBeforeResultValue)); 4340 __ movl(rax, Immediate(kWordBeforeResultValue));
4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4341 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 4342 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4344 } 4343 }
4345 } 4344 }
4346 __ ret(2 * kPointerSize + extra_stack_space); 4345 __ ret((2 + extra_argument_offset) * kPointerSize);
4347 4346
4348 // Slow-case: Go through the JavaScript implementation. 4347 // Slow-case: Go through the JavaScript implementation.
4349 __ bind(&slow); 4348 __ bind(&slow);
4350 if (HasCallSiteInlineCheck()) { 4349 if (HasCallSiteInlineCheck()) {
4351 // Remove extra value from the stack. 4350 // Remove extra value from the stack.
4352 __ pop(rcx); 4351 __ pop(rcx);
4353 __ pop(rax); 4352 __ pop(rax);
4354 __ push(rcx); 4353 __ push(rcx);
4355 } 4354 }
4356 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 4355 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
4494 4493
4495 __ Abort("Unexpected fallthrough from CharFromCode slow case"); 4494 __ Abort("Unexpected fallthrough from CharFromCode slow case");
4496 } 4495 }
4497 4496
4498 4497
4499 void StringAddStub::Generate(MacroAssembler* masm) { 4498 void StringAddStub::Generate(MacroAssembler* masm) {
4500 Label call_runtime, call_builtin; 4499 Label call_runtime, call_builtin;
4501 Builtins::JavaScript builtin_id = Builtins::ADD; 4500 Builtins::JavaScript builtin_id = Builtins::ADD;
4502 4501
4503 // Load the two arguments. 4502 // Load the two arguments.
4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). 4503 __ movq(rax, StackOperandForArgument(0, 2)); // First argument (left).
4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). 4504 __ movq(rdx, StackOperandForArgument(1, 2)); // Second argument (right).
4506 4505
4507 // Make sure that both arguments are strings if not known in advance. 4506 // Make sure that both arguments are strings if not known in advance.
4508 // Otherwise, at least one of the arguments is definitely a string, 4507 // Otherwise, at least one of the arguments is definitely a string,
4509 // and we convert the one that is not known to be a string. 4508 // and we convert the one that is not known to be a string.
4510 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { 4509 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4511 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); 4510 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
4512 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); 4511 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
4513 __ JumpIfSmi(rax, &call_runtime); 4512 __ JumpIfSmi(rax, &call_runtime);
4514 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); 4513 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
4515 __ j(above_equal, &call_runtime); 4514 __ j(above_equal, &call_runtime);
(...skipping 986 matching lines...) Expand 10 before | Expand all | Expand 10 after
5502 5501
5503 5502
5504 void StringCompareStub::Generate(MacroAssembler* masm) { 5503 void StringCompareStub::Generate(MacroAssembler* masm) {
5505 Label runtime; 5504 Label runtime;
5506 5505
5507 // Stack frame on entry. 5506 // Stack frame on entry.
5508 // rsp[0] : return address 5507 // rsp[0] : return address
5509 // rsp[8] : right string 5508 // rsp[8] : right string
5510 // rsp[16] : left string 5509 // rsp[16] : left string
5511 5510
5512 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left 5511 __ movq(rdx, StackOperandForArgument(0, 2)); // left
5513 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right 5512 __ movq(rax, StackOperandForArgument(1, 2)); // right
5514 5513
5515 // Check for identity. 5514 // Check for identity.
5516 Label not_same; 5515 Label not_same;
5517 __ cmpq(rdx, rax); 5516 __ cmpq(rdx, rax);
5518 __ j(not_equal, &not_same, Label::kNear); 5517 __ j(not_equal, &not_same, Label::kNear);
5519 __ Move(rax, Smi::FromInt(EQUAL)); 5518 __ Move(rax, Smi::FromInt(EQUAL));
5520 Counters* counters = masm->isolate()->counters(); 5519 Counters* counters = masm->isolate()->counters();
5521 __ IncrementCounter(counters->string_compare_native(), 1); 5520 __ IncrementCounter(counters->string_compare_native(), 1);
5522 __ ret(2 * kPointerSize); 5521 __ ret(2 * kPointerSize);
5523 5522
(...skipping 494 matching lines...) Expand 10 before | Expand all | Expand 10 after
6018 __ decl(scratch); 6017 __ decl(scratch);
6019 __ push(scratch); 6018 __ push(scratch);
6020 6019
6021 // If names of slots in range from 1 to kProbes - 1 for the hash value are 6020 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6022 // not equal to the name and kProbes-th slot is not used (its name is the 6021 // not equal to the name and kProbes-th slot is not used (its name is the
6023 // undefined value), it guarantees the hash table doesn't contain the 6022 // undefined value), it guarantees the hash table doesn't contain the
6024 // property. It's true even if some slots represent deleted properties 6023 // property. It's true even if some slots represent deleted properties
6025 // (their names are the null value). 6024 // (their names are the null value).
6026 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 6025 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6027 // Compute the masked index: (hash + i + i * i) & mask. 6026 // Compute the masked index: (hash + i + i * i) & mask.
6028 __ movq(scratch, Operand(rsp, 2 * kPointerSize)); 6027 __ movq(scratch, StackOperandForArgument(1, 2, kPointerSize));
haitao.feng 2013/08/02 15:04:58 There is a push instruction between the function s
6029 if (i > 0) { 6028 if (i > 0) {
6030 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 6029 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
6031 } 6030 }
6032 __ and_(scratch, Operand(rsp, 0)); 6031 __ and_(scratch, Operand(rsp, 0));
6033 6032
6034 // Scale the index by multiplying by the entry size. 6033 // Scale the index by multiplying by the entry size.
6035 ASSERT(NameDictionary::kEntrySize == 3); 6034 ASSERT(NameDictionary::kEntrySize == 3);
6036 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. 6035 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6037 6036
6038 // Having undefined at this place means the name is not contained. 6037 // Having undefined at this place means the name is not contained.
6039 __ movq(scratch, Operand(dictionary_, 6038 __ movq(scratch, Operand(dictionary_,
6040 index_, 6039 index_,
6041 times_pointer_size, 6040 times_pointer_size,
6042 kElementsStartOffset - kHeapObjectTag)); 6041 kElementsStartOffset - kHeapObjectTag));
6043 6042
6044 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); 6043 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
6045 __ j(equal, &not_in_dictionary); 6044 __ j(equal, &not_in_dictionary);
6046 6045
6047 // Stop if found the property. 6046 // Stop if found the property.
6048 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); 6047 __ cmpq(scratch, StackOperandForArgument(0, 2, kPointerSize));
haitao.feng 2013/08/02 15:04:58 Same explanation above.
6049 __ j(equal, &in_dictionary); 6048 __ j(equal, &in_dictionary);
6050 6049
6051 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 6050 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6052 // If we hit a key that is not a unique name during negative 6051 // If we hit a key that is not a unique name during negative
6053 // lookup we have to bailout as this key might be equal to the 6052 // lookup we have to bailout as this key might be equal to the
6054 // key we are looking for. 6053 // key we are looking for.
6055 6054
6056 // Check if the entry name is not a unique name. 6055 // Check if the entry name is not a unique name.
6057 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 6056 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6058 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), 6057 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
6390 // clobbers rbx, rdx, rdi 6389 // clobbers rbx, rdx, rdi
6391 // ----------------------------------- 6390 // -----------------------------------
6392 6391
6393 Label element_done; 6392 Label element_done;
6394 Label double_elements; 6393 Label double_elements;
6395 Label smi_element; 6394 Label smi_element;
6396 Label slow_elements; 6395 Label slow_elements;
6397 Label fast_elements; 6396 Label fast_elements;
6398 6397
6399 // Get array literal index, array literal and its map. 6398 // Get array literal index, array literal and its map.
6400 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); 6399 __ movq(rdx, StackOperandForArgument(1, 2));
6401 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 6400 __ movq(rbx, StackOperandForArgument(0, 2));
6402 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); 6401 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
6403 6402
6404 __ CheckFastElements(rdi, &double_elements); 6403 __ CheckFastElements(rdi, &double_elements);
6405 6404
6406 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS 6405 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
6407 __ JumpIfSmi(rax, &smi_element); 6406 __ JumpIfSmi(rax, &smi_element);
6408 __ CheckFastSmiElements(rdi, &fast_elements); 6407 __ CheckFastSmiElements(rdi, &fast_elements);
6409 6408
6410 // Store into the array literal requires a elements transition. Call into 6409 // Store into the array literal requires a elements transition. Call into
6411 // the runtime. 6410 // the runtime.
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
6560 Handle<Object> undefined_sentinel( 6559 Handle<Object> undefined_sentinel(
6561 masm->isolate()->heap()->undefined_value(), 6560 masm->isolate()->heap()->undefined_value(),
6562 masm->isolate()); 6561 masm->isolate());
6563 6562
6564 // is the low bit set? If so, we are holey and that is good. 6563 // is the low bit set? If so, we are holey and that is good.
6565 __ testb(rdx, Immediate(1)); 6564 __ testb(rdx, Immediate(1));
6566 Label normal_sequence; 6565 Label normal_sequence;
6567 __ j(not_zero, &normal_sequence); 6566 __ j(not_zero, &normal_sequence);
6568 6567
6569 // look at the first argument 6568 // look at the first argument
6570 __ movq(rcx, Operand(rsp, kPointerSize)); 6569 __ movq(rcx, StackOperandForArgument(0, 1));
6571 __ testq(rcx, rcx); 6570 __ testq(rcx, rcx);
6572 __ j(zero, &normal_sequence); 6571 __ j(zero, &normal_sequence);
6573 6572
6574 // We are going to create a holey array, but our kind is non-holey. 6573 // We are going to create a holey array, but our kind is non-holey.
6575 // Fix kind and retry (only if we have an allocation site in the cell). 6574 // Fix kind and retry (only if we have an allocation site in the cell).
6576 __ incl(rdx); 6575 __ incl(rdx);
6577 __ Cmp(rbx, undefined_sentinel); 6576 __ Cmp(rbx, undefined_sentinel);
6578 __ j(equal, &normal_sequence); 6577 __ j(equal, &normal_sequence);
6579 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); 6578 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
6580 Handle<Map> allocation_site_map( 6579 Handle<Map> allocation_site_map(
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
6739 InternalArrayNoArgumentConstructorStub stub0(kind); 6738 InternalArrayNoArgumentConstructorStub stub0(kind);
6740 __ TailCallStub(&stub0); 6739 __ TailCallStub(&stub0);
6741 6740
6742 __ bind(&not_zero_case); 6741 __ bind(&not_zero_case);
6743 __ cmpl(rax, Immediate(1)); 6742 __ cmpl(rax, Immediate(1));
6744 __ j(greater, &not_one_case); 6743 __ j(greater, &not_one_case);
6745 6744
6746 if (IsFastPackedElementsKind(kind)) { 6745 if (IsFastPackedElementsKind(kind)) {
6747 // We might need to create a holey array 6746 // We might need to create a holey array
6748 // look at the first argument 6747 // look at the first argument
6749 __ movq(rcx, Operand(rsp, kPointerSize)); 6748 __ movq(rcx, StackOperandForArgument(0, 1));
6750 __ testq(rcx, rcx); 6749 __ testq(rcx, rcx);
6751 __ j(zero, &normal_sequence); 6750 __ j(zero, &normal_sequence);
6752 6751
6753 InternalArraySingleArgumentConstructorStub 6752 InternalArraySingleArgumentConstructorStub
6754 stub1_holey(GetHoleyElementsKind(kind)); 6753 stub1_holey(GetHoleyElementsKind(kind));
6755 __ TailCallStub(&stub1_holey); 6754 __ TailCallStub(&stub1_holey);
6756 } 6755 }
6757 6756
6758 __ bind(&normal_sequence); 6757 __ bind(&normal_sequence);
6759 InternalArraySingleArgumentConstructorStub stub1(kind); 6758 InternalArraySingleArgumentConstructorStub stub1(kind);
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
6816 __ bind(&fast_elements_case); 6815 __ bind(&fast_elements_case);
6817 GenerateCase(masm, FAST_ELEMENTS); 6816 GenerateCase(masm, FAST_ELEMENTS);
6818 } 6817 }
6819 6818
6820 6819
6821 #undef __ 6820 #undef __
6822 6821
6823 } } // namespace v8::internal 6822 } } // namespace v8::internal
6824 6823
6825 #endif // V8_TARGET_ARCH_X64 6824 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698