OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
247 } | 247 } |
248 ExternalReference miss = descriptor->miss_handler(); | 248 ExternalReference miss = descriptor->miss_handler(); |
249 __ CallExternalReference(miss, descriptor->register_param_count_); | 249 __ CallExternalReference(miss, descriptor->register_param_count_); |
250 } | 250 } |
251 | 251 |
252 __ Ret(); | 252 __ Ret(); |
253 } | 253 } |
254 | 254 |
255 | 255 |
256 void ToNumberStub::Generate(MacroAssembler* masm) { | 256 void ToNumberStub::Generate(MacroAssembler* masm) { |
257 // The ToNumber stub takes one argument in eax. | 257 // The ToNumber stub takes one argument in rax. |
258 Label check_heap_number, call_builtin; | 258 Label check_heap_number, call_builtin; |
259 __ SmiTest(rax); | 259 __ SmiTest(rax); |
260 __ j(not_zero, &check_heap_number, Label::kNear); | 260 __ j(not_zero, &check_heap_number, Label::kNear); |
261 __ Ret(); | 261 __ Ret(); |
262 | 262 |
263 __ bind(&check_heap_number); | 263 __ bind(&check_heap_number); |
264 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 264 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
265 Heap::kHeapNumberMapRootIndex); | 265 Heap::kHeapNumberMapRootIndex); |
266 __ j(not_equal, &call_builtin, Label::kNear); | 266 __ j(not_equal, &call_builtin, Label::kNear); |
267 __ Ret(); | 267 __ Ret(); |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
326 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); | 326 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
327 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); | 327 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); |
328 | 328 |
329 // Return and remove the on-stack parameter. | 329 // Return and remove the on-stack parameter. |
330 __ ret(1 * kPointerSize); | 330 __ ret(1 * kPointerSize); |
331 | 331 |
332 __ bind(&check_optimized); | 332 __ bind(&check_optimized); |
333 | 333 |
334 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); | 334 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); |
335 | 335 |
336 // rcx holds native context, ebx points to fixed array of 3-element entries | 336 // rcx holds native context, rbx points to fixed array of 3-element entries |
337 // (native context, optimized code, literals). | 337 // (native context, optimized code, literals). |
338 // The optimized code map must never be empty, so check the first elements. | 338 // The optimized code map must never be empty, so check the first elements. |
339 Label install_optimized; | 339 Label install_optimized; |
340 // Speculatively move code object into edx. | 340 // Speculatively move code object into edx. |
341 __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); | 341 __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); |
342 __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); | 342 __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); |
343 __ j(equal, &install_optimized); | 343 __ j(equal, &install_optimized); |
344 | 344 |
345 // Iterate through the rest of map backwards. rdx holds an index. | 345 // Iterate through the rest of map backwards. rdx holds an index. |
346 Label loop; | 346 Label loop; |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
445 | 445 |
446 // Need to collect. Call into runtime system. | 446 // Need to collect. Call into runtime system. |
447 __ bind(&gc); | 447 __ bind(&gc); |
448 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | 448 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
449 } | 449 } |
450 | 450 |
451 | 451 |
452 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | 452 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
453 // Stack layout on entry: | 453 // Stack layout on entry: |
454 // | 454 // |
455 // [rsp + (1 * kPointerSize)]: function | 455 // [rsp + (1 * kPointerSize)] : function |
456 // [rsp + (2 * kPointerSize)]: serialized scope info | 456 // [rsp + (2 * kPointerSize)] : serialized scope info |
457 | 457 |
458 // Try to allocate the context in new space. | 458 // Try to allocate the context in new space. |
459 Label gc; | 459 Label gc; |
460 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 460 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
461 __ Allocate(FixedArray::SizeFor(length), | 461 __ Allocate(FixedArray::SizeFor(length), |
462 rax, rbx, rcx, &gc, TAG_OBJECT); | 462 rax, rbx, rcx, &gc, TAG_OBJECT); |
463 | 463 |
464 // Get the function from the stack. | 464 // Get the function from the stack. |
465 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 465 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
466 | 466 |
(...skipping 950 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1417 | 1417 |
1418 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, | 1418 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, |
1419 Label* alloc_failure, | 1419 Label* alloc_failure, |
1420 OverwriteMode mode) { | 1420 OverwriteMode mode) { |
1421 Label skip_allocation; | 1421 Label skip_allocation; |
1422 switch (mode) { | 1422 switch (mode) { |
1423 case OVERWRITE_LEFT: { | 1423 case OVERWRITE_LEFT: { |
1424 // If the argument in rdx is already an object, we skip the | 1424 // If the argument in rdx is already an object, we skip the |
1425 // allocation of a heap number. | 1425 // allocation of a heap number. |
1426 __ JumpIfNotSmi(rdx, &skip_allocation); | 1426 __ JumpIfNotSmi(rdx, &skip_allocation); |
1427 // Allocate a heap number for the result. Keep eax and edx intact | 1427 // Allocate a heap number for the result. Keep rax and rdx intact |
1428 // for the possible runtime call. | 1428 // for the possible runtime call. |
1429 __ AllocateHeapNumber(rbx, rcx, alloc_failure); | 1429 __ AllocateHeapNumber(rbx, rcx, alloc_failure); |
1430 // Now rdx can be overwritten losing one of the arguments as we are | 1430 // Now rdx can be overwritten losing one of the arguments as we are |
1431 // now done and will not need it any more. | 1431 // now done and will not need it any more. |
1432 __ movq(rdx, rbx); | 1432 __ movq(rdx, rbx); |
1433 __ bind(&skip_allocation); | 1433 __ bind(&skip_allocation); |
1434 // Use object in rdx as a result holder | 1434 // Use object in rdx as a result holder |
1435 __ movq(rax, rdx); | 1435 __ movq(rax, rdx); |
1436 break; | 1436 break; |
1437 } | 1437 } |
(...skipping 18 matching lines...) Expand all Loading... |
1456 | 1456 |
1457 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 1457 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
1458 __ push(rdx); | 1458 __ push(rdx); |
1459 __ push(rax); | 1459 __ push(rax); |
1460 } | 1460 } |
1461 | 1461 |
1462 | 1462 |
1463 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 1463 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
1464 // TAGGED case: | 1464 // TAGGED case: |
1465 // Input: | 1465 // Input: |
1466 // rsp[8]: argument (should be number). | 1466 // rsp[8] : argument (should be number). |
1467 // rsp[0]: return address. | 1467 // rsp[0] : return address. |
1468 // Output: | 1468 // Output: |
1469 // rax: tagged double result. | 1469 // rax: tagged double result. |
1470 // UNTAGGED case: | 1470 // UNTAGGED case: |
1471 // Input:: | 1471 // Input:: |
1472 // rsp[0]: return address. | 1472 // rsp[0] : return address. |
1473 // xmm1: untagged double input argument | 1473 // xmm1 : untagged double input argument |
1474 // Output: | 1474 // Output: |
1475 // xmm1: untagged double result. | 1475 // xmm1 : untagged double result. |
1476 | 1476 |
1477 Label runtime_call; | 1477 Label runtime_call; |
1478 Label runtime_call_clear_stack; | 1478 Label runtime_call_clear_stack; |
1479 Label skip_cache; | 1479 Label skip_cache; |
1480 const bool tagged = (argument_type_ == TAGGED); | 1480 const bool tagged = (argument_type_ == TAGGED); |
1481 if (tagged) { | 1481 if (tagged) { |
1482 Label input_not_smi, loaded; | 1482 Label input_not_smi, loaded; |
1483 // Test that rax is a number. | 1483 // Test that rax is a number. |
1484 __ movq(rax, Operand(rsp, kPointerSize)); | 1484 __ movq(rax, Operand(rsp, kPointerSize)); |
1485 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); | 1485 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); |
(...skipping 720 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2206 __ cvtlsi2sd(double_exponent, exponent); | 2206 __ cvtlsi2sd(double_exponent, exponent); |
2207 | 2207 |
2208 // Returning or bailing out. | 2208 // Returning or bailing out. |
2209 Counters* counters = masm->isolate()->counters(); | 2209 Counters* counters = masm->isolate()->counters(); |
2210 if (exponent_type_ == ON_STACK) { | 2210 if (exponent_type_ == ON_STACK) { |
2211 // The arguments are still on the stack. | 2211 // The arguments are still on the stack. |
2212 __ bind(&call_runtime); | 2212 __ bind(&call_runtime); |
2213 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 2213 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
2214 | 2214 |
2215 // The stub is called from non-optimized code, which expects the result | 2215 // The stub is called from non-optimized code, which expects the result |
2216 // as heap number in eax. | 2216 // as heap number in rax. |
2217 __ bind(&done); | 2217 __ bind(&done); |
2218 __ AllocateHeapNumber(rax, rcx, &call_runtime); | 2218 __ AllocateHeapNumber(rax, rcx, &call_runtime); |
2219 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); | 2219 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); |
2220 __ IncrementCounter(counters->math_pow(), 1); | 2220 __ IncrementCounter(counters->math_pow(), 1); |
2221 __ ret(2 * kPointerSize); | 2221 __ ret(2 * kPointerSize); |
2222 } else { | 2222 } else { |
2223 __ bind(&call_runtime); | 2223 __ bind(&call_runtime); |
2224 // Move base to the correct argument register. Exponent is already in xmm1. | 2224 // Move base to the correct argument register. Exponent is already in xmm1. |
2225 __ movsd(xmm0, double_base); | 2225 __ movsd(xmm0, double_base); |
2226 ASSERT(double_exponent.is(xmm1)); | 2226 ASSERT(double_exponent.is(xmm1)); |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2417 __ bind(&slow); | 2417 __ bind(&slow); |
2418 __ pop(rbx); // Return address. | 2418 __ pop(rbx); // Return address. |
2419 __ push(rdx); | 2419 __ push(rdx); |
2420 __ push(rbx); | 2420 __ push(rbx); |
2421 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); | 2421 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); |
2422 } | 2422 } |
2423 | 2423 |
2424 | 2424 |
2425 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { | 2425 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
2426 // Stack layout: | 2426 // Stack layout: |
2427 // rsp[0] : return address | 2427 // rsp[0] : return address |
2428 // rsp[8] : number of parameters (tagged) | 2428 // rsp[8] : number of parameters (tagged) |
2429 // rsp[16] : receiver displacement | 2429 // rsp[16] : receiver displacement |
2430 // rsp[24] : function | 2430 // rsp[24] : function |
2431 // Registers used over the whole function: | 2431 // Registers used over the whole function: |
2432 // rbx: the mapped parameter count (untagged) | 2432 // rbx: the mapped parameter count (untagged) |
2433 // rax: the allocated object (tagged). | 2433 // rax: the allocated object (tagged). |
2434 | 2434 |
2435 Factory* factory = masm->isolate()->factory(); | 2435 Factory* factory = masm->isolate()->factory(); |
2436 | 2436 |
2437 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); | 2437 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); |
2438 // rbx = parameter count (untagged) | 2438 // rbx = parameter count (untagged) |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2633 // Do the runtime call to allocate the arguments object. | 2633 // Do the runtime call to allocate the arguments object. |
2634 // rcx = argument count (untagged) | 2634 // rcx = argument count (untagged) |
2635 __ bind(&runtime); | 2635 __ bind(&runtime); |
2636 __ Integer32ToSmi(rcx, rcx); | 2636 __ Integer32ToSmi(rcx, rcx); |
2637 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. | 2637 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. |
2638 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2638 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
2639 } | 2639 } |
2640 | 2640 |
2641 | 2641 |
2642 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { | 2642 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { |
2643 // esp[0] : return address | 2643 // rsp[0] : return address |
2644 // esp[8] : number of parameters | 2644 // rsp[8] : number of parameters |
2645 // esp[16] : receiver displacement | 2645 // rsp[16] : receiver displacement |
2646 // esp[24] : function | 2646 // rsp[24] : function |
2647 | 2647 |
2648 // Check if the calling frame is an arguments adaptor frame. | 2648 // Check if the calling frame is an arguments adaptor frame. |
2649 Label runtime; | 2649 Label runtime; |
2650 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2650 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
2651 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2651 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
2652 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2652 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2653 __ j(not_equal, &runtime); | 2653 __ j(not_equal, &runtime); |
2654 | 2654 |
2655 // Patch the arguments.length and the parameters pointer. | 2655 // Patch the arguments.length and the parameters pointer. |
2656 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2656 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2657 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 2657 __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
2658 __ SmiToInteger64(rcx, rcx); | 2658 __ SmiToInteger64(rcx, rcx); |
2659 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 2659 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
2660 StandardFrameConstants::kCallerSPOffset)); | 2660 StandardFrameConstants::kCallerSPOffset)); |
2661 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 2661 __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
2662 | 2662 |
2663 __ bind(&runtime); | 2663 __ bind(&runtime); |
2664 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2664 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
2665 } | 2665 } |
2666 | 2666 |
2667 | 2667 |
2668 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 2668 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
2669 // rsp[0] : return address | 2669 // rsp[0] : return address |
2670 // rsp[8] : number of parameters | 2670 // rsp[8] : number of parameters |
2671 // rsp[16] : receiver displacement | 2671 // rsp[16] : receiver displacement |
2672 // rsp[24] : function | 2672 // rsp[24] : function |
2673 | 2673 |
2674 // Check if the calling frame is an arguments adaptor frame. | 2674 // Check if the calling frame is an arguments adaptor frame. |
2675 Label adaptor_frame, try_allocate, runtime; | 2675 Label adaptor_frame, try_allocate, runtime; |
2676 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2676 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
2677 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2677 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
2678 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2678 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2679 __ j(equal, &adaptor_frame); | 2679 __ j(equal, &adaptor_frame); |
2680 | 2680 |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2767 | 2767 |
2768 void RegExpExecStub::Generate(MacroAssembler* masm) { | 2768 void RegExpExecStub::Generate(MacroAssembler* masm) { |
2769 // Just jump directly to runtime if native RegExp is not selected at compile | 2769 // Just jump directly to runtime if native RegExp is not selected at compile |
2770 // time or if regexp entry in generated code is turned off runtime switch or | 2770 // time or if regexp entry in generated code is turned off runtime switch or |
2771 // at compilation. | 2771 // at compilation. |
2772 #ifdef V8_INTERPRETED_REGEXP | 2772 #ifdef V8_INTERPRETED_REGEXP |
2773 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2773 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
2774 #else // V8_INTERPRETED_REGEXP | 2774 #else // V8_INTERPRETED_REGEXP |
2775 | 2775 |
2776 // Stack frame on entry. | 2776 // Stack frame on entry. |
2777 // rsp[0]: return address | 2777 // rsp[0] : return address |
2778 // rsp[8]: last_match_info (expected JSArray) | 2778 // rsp[8] : last_match_info (expected JSArray) |
2779 // rsp[16]: previous index | 2779 // rsp[16] : previous index |
2780 // rsp[24]: subject string | 2780 // rsp[24] : subject string |
2781 // rsp[32]: JSRegExp object | 2781 // rsp[32] : JSRegExp object |
2782 | 2782 |
2783 static const int kLastMatchInfoOffset = 1 * kPointerSize; | 2783 static const int kLastMatchInfoOffset = 1 * kPointerSize; |
2784 static const int kPreviousIndexOffset = 2 * kPointerSize; | 2784 static const int kPreviousIndexOffset = 2 * kPointerSize; |
2785 static const int kSubjectOffset = 3 * kPointerSize; | 2785 static const int kSubjectOffset = 3 * kPointerSize; |
2786 static const int kJSRegExpOffset = 4 * kPointerSize; | 2786 static const int kJSRegExpOffset = 4 * kPointerSize; |
2787 | 2787 |
2788 Label runtime; | 2788 Label runtime; |
2789 // Ensure that a RegExp stack is allocated. | 2789 // Ensure that a RegExp stack is allocated. |
2790 Isolate* isolate = masm->isolate(); | 2790 Isolate* isolate = masm->isolate(); |
2791 ExternalReference address_of_regexp_stack_memory_address = | 2791 ExternalReference address_of_regexp_stack_memory_address = |
(...skipping 1584 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4376 | 4376 |
4377 // Restore frame pointer and return. | 4377 // Restore frame pointer and return. |
4378 __ pop(rbp); | 4378 __ pop(rbp); |
4379 __ ret(0); | 4379 __ ret(0); |
4380 } | 4380 } |
4381 | 4381 |
4382 | 4382 |
4383 void InstanceofStub::Generate(MacroAssembler* masm) { | 4383 void InstanceofStub::Generate(MacroAssembler* masm) { |
4384 // Implements "value instanceof function" operator. | 4384 // Implements "value instanceof function" operator. |
4385 // Expected input state with no inline cache: | 4385 // Expected input state with no inline cache: |
4386 // rsp[0] : return address | 4386 // rsp[0] : return address |
4387 // rsp[1] : function pointer | 4387 // rsp[8] : function pointer |
4388 // rsp[2] : value | 4388 // rsp[16] : value |
4389 // Expected input state with an inline one-element cache: | 4389 // Expected input state with an inline one-element cache: |
4390 // rsp[0] : return address | 4390 // rsp[0] : return address |
4391 // rsp[1] : offset from return address to location of inline cache | 4391 // rsp[8] : offset from return address to location of inline cache |
4392 // rsp[2] : function pointer | 4392 // rsp[16] : function pointer |
4393 // rsp[3] : value | 4393 // rsp[24] : value |
4394 // Returns a bitwise zero to indicate that the value | 4394 // Returns a bitwise zero to indicate that the value |
4395 // is and instance of the function and anything else to | 4395 // is and instance of the function and anything else to |
4396 // indicate that the value is not an instance. | 4396 // indicate that the value is not an instance. |
4397 | 4397 |
4398 static const int kOffsetToMapCheckValue = 2; | 4398 static const int kOffsetToMapCheckValue = 2; |
4399 static const int kOffsetToResultValue = 18; | 4399 static const int kOffsetToResultValue = 18; |
4400 // The last 4 bytes of the instruction sequence | 4400 // The last 4 bytes of the instruction sequence |
4401 // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset)) | 4401 // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset)) |
4402 // Move(kScratchRegister, Factory::the_hole_value()) | 4402 // Move(kScratchRegister, Factory::the_hole_value()) |
4403 // in front of the hole value address. | 4403 // in front of the hole value address. |
(...skipping 921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5325 Label hash_not_zero; | 5325 Label hash_not_zero; |
5326 __ j(not_zero, &hash_not_zero); | 5326 __ j(not_zero, &hash_not_zero); |
5327 __ Set(hash, StringHasher::kZeroHash); | 5327 __ Set(hash, StringHasher::kZeroHash); |
5328 __ bind(&hash_not_zero); | 5328 __ bind(&hash_not_zero); |
5329 } | 5329 } |
5330 | 5330 |
5331 void SubStringStub::Generate(MacroAssembler* masm) { | 5331 void SubStringStub::Generate(MacroAssembler* masm) { |
5332 Label runtime; | 5332 Label runtime; |
5333 | 5333 |
5334 // Stack frame on entry. | 5334 // Stack frame on entry. |
5335 // rsp[0]: return address | 5335 // rsp[0] : return address |
5336 // rsp[8]: to | 5336 // rsp[8] : to |
5337 // rsp[16]: from | 5337 // rsp[16] : from |
5338 // rsp[24]: string | 5338 // rsp[24] : string |
5339 | 5339 |
5340 const int kToOffset = 1 * kPointerSize; | 5340 const int kToOffset = 1 * kPointerSize; |
5341 const int kFromOffset = kToOffset + kPointerSize; | 5341 const int kFromOffset = kToOffset + kPointerSize; |
5342 const int kStringOffset = kFromOffset + kPointerSize; | 5342 const int kStringOffset = kFromOffset + kPointerSize; |
5343 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; | 5343 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; |
5344 | 5344 |
5345 // Make sure first argument is a string. | 5345 // Make sure first argument is a string. |
5346 __ movq(rax, Operand(rsp, kStringOffset)); | 5346 __ movq(rax, Operand(rsp, kStringOffset)); |
5347 STATIC_ASSERT(kSmiTag == 0); | 5347 STATIC_ASSERT(kSmiTag == 0); |
5348 __ testl(rax, Immediate(kSmiTagMask)); | 5348 __ testl(rax, Immediate(kSmiTagMask)); |
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5687 __ j(not_equal, chars_not_equal, near_jump); | 5687 __ j(not_equal, chars_not_equal, near_jump); |
5688 __ incq(index); | 5688 __ incq(index); |
5689 __ j(not_zero, &loop); | 5689 __ j(not_zero, &loop); |
5690 } | 5690 } |
5691 | 5691 |
5692 | 5692 |
5693 void StringCompareStub::Generate(MacroAssembler* masm) { | 5693 void StringCompareStub::Generate(MacroAssembler* masm) { |
5694 Label runtime; | 5694 Label runtime; |
5695 | 5695 |
5696 // Stack frame on entry. | 5696 // Stack frame on entry. |
5697 // rsp[0]: return address | 5697 // rsp[0] : return address |
5698 // rsp[8]: right string | 5698 // rsp[8] : right string |
5699 // rsp[16]: left string | 5699 // rsp[16] : left string |
5700 | 5700 |
5701 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left | 5701 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left |
5702 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right | 5702 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right |
5703 | 5703 |
5704 // Check for identity. | 5704 // Check for identity. |
5705 Label not_same; | 5705 Label not_same; |
5706 __ cmpq(rdx, rax); | 5706 __ cmpq(rdx, rax); |
5707 __ j(not_equal, ¬_same, Label::kNear); | 5707 __ j(not_equal, ¬_same, Label::kNear); |
5708 __ Move(rax, Smi::FromInt(EQUAL)); | 5708 __ Move(rax, Smi::FromInt(EQUAL)); |
5709 Counters* counters = masm->isolate()->counters(); | 5709 Counters* counters = masm->isolate()->counters(); |
(...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6186 __ testq(r0, r0); | 6186 __ testq(r0, r0); |
6187 __ j(zero, miss); | 6187 __ j(zero, miss); |
6188 __ jmp(done); | 6188 __ jmp(done); |
6189 } | 6189 } |
6190 | 6190 |
6191 | 6191 |
6192 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { | 6192 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
6193 // This stub overrides SometimesSetsUpAFrame() to return false. That means | 6193 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
6194 // we cannot call anything that could cause a GC from this stub. | 6194 // we cannot call anything that could cause a GC from this stub. |
6195 // Stack frame on entry: | 6195 // Stack frame on entry: |
6196 // esp[0 * kPointerSize]: return address. | 6196 // rsp[0 * kPointerSize] : return address. |
6197 // esp[1 * kPointerSize]: key's hash. | 6197 // rsp[1 * kPointerSize] : key's hash. |
6198 // esp[2 * kPointerSize]: key. | 6198 // rsp[2 * kPointerSize] : key. |
6199 // Registers: | 6199 // Registers: |
6200 // dictionary_: NameDictionary to probe. | 6200 // dictionary_: NameDictionary to probe. |
6201 // result_: used as scratch. | 6201 // result_: used as scratch. |
6202 // index_: will hold an index of entry if lookup is successful. | 6202 // index_: will hold an index of entry if lookup is successful. |
6203 // might alias with result_. | 6203 // might alias with result_. |
6204 // Returns: | 6204 // Returns: |
6205 // result_ is zero if lookup failed, non zero otherwise. | 6205 // result_ is zero if lookup failed, non zero otherwise. |
6206 | 6206 |
6207 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; | 6207 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
6208 | 6208 |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6569 __ pop(regs_.object()); | 6569 __ pop(regs_.object()); |
6570 | 6570 |
6571 __ bind(&need_incremental); | 6571 __ bind(&need_incremental); |
6572 | 6572 |
6573 // Fall through when we need to inform the incremental marker. | 6573 // Fall through when we need to inform the incremental marker. |
6574 } | 6574 } |
6575 | 6575 |
6576 | 6576 |
6577 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { | 6577 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { |
6578 // ----------- S t a t e ------------- | 6578 // ----------- S t a t e ------------- |
6579 // -- rax : element value to store | 6579 // -- rax : element value to store |
6580 // -- rcx : element index as smi | 6580 // -- rcx : element index as smi |
6581 // -- rsp[0] : return address | 6581 // -- rsp[0] : return address |
6582 // -- rsp[8] : array literal index in function | 6582 // -- rsp[8] : array literal index in function |
6583 // -- rsp[16]: array literal | 6583 // -- rsp[16] : array literal |
6584 // clobbers rbx, rdx, rdi | 6584 // clobbers rbx, rdx, rdi |
6585 // ----------------------------------- | 6585 // ----------------------------------- |
6586 | 6586 |
6587 Label element_done; | 6587 Label element_done; |
6588 Label double_elements; | 6588 Label double_elements; |
6589 Label smi_element; | 6589 Label smi_element; |
6590 Label slow_elements; | 6590 Label slow_elements; |
6591 Label fast_elements; | 6591 Label fast_elements; |
6592 | 6592 |
6593 // Get array literal index, array literal and its map. | 6593 // Get array literal index, array literal and its map. |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6735 // If we reached this point there is a problem. | 6735 // If we reached this point there is a problem. |
6736 __ Abort("Unexpected ElementsKind in array constructor"); | 6736 __ Abort("Unexpected ElementsKind in array constructor"); |
6737 } | 6737 } |
6738 | 6738 |
6739 | 6739 |
6740 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { | 6740 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
6741 // rbx - type info cell | 6741 // rbx - type info cell |
6742 // rdx - kind | 6742 // rdx - kind |
6743 // rax - number of arguments | 6743 // rax - number of arguments |
6744 // rdi - constructor? | 6744 // rdi - constructor? |
6745 // esp[0] - return address | 6745 // rsp[0] - return address |
6746 // esp[4] - last argument | 6746 // rsp[8] - last argument |
6747 ASSERT(FAST_SMI_ELEMENTS == 0); | 6747 ASSERT(FAST_SMI_ELEMENTS == 0); |
6748 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 6748 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
6749 ASSERT(FAST_ELEMENTS == 2); | 6749 ASSERT(FAST_ELEMENTS == 2); |
6750 ASSERT(FAST_HOLEY_ELEMENTS == 3); | 6750 ASSERT(FAST_HOLEY_ELEMENTS == 3); |
6751 ASSERT(FAST_DOUBLE_ELEMENTS == 4); | 6751 ASSERT(FAST_DOUBLE_ELEMENTS == 4); |
6752 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); | 6752 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
6753 | 6753 |
6754 Handle<Object> undefined_sentinel( | 6754 Handle<Object> undefined_sentinel( |
6755 masm->isolate()->heap()->undefined_value(), | 6755 masm->isolate()->heap()->undefined_value(), |
6756 masm->isolate()); | 6756 masm->isolate()); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6834 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 6834 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); |
6835 stubh2.GetCode(isolate)->set_is_pregenerated(true); | 6835 stubh2.GetCode(isolate)->set_is_pregenerated(true); |
6836 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 6836 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); |
6837 stubh3.GetCode(isolate)->set_is_pregenerated(true); | 6837 stubh3.GetCode(isolate)->set_is_pregenerated(true); |
6838 } | 6838 } |
6839 } | 6839 } |
6840 | 6840 |
6841 | 6841 |
6842 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 6842 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
6843 // ----------- S t a t e ------------- | 6843 // ----------- S t a t e ------------- |
6844 // -- rax : argc | 6844 // -- rax : argc |
6845 // -- rbx : type info cell | 6845 // -- rbx : type info cell |
6846 // -- rdi : constructor | 6846 // -- rdi : constructor |
6847 // -- rsp[0] : return address | 6847 // -- rsp[0] : return address |
6848 // -- rsp[4] : last argument | 6848 // -- rsp[8] : last argument |
6849 // ----------------------------------- | 6849 // ----------------------------------- |
6850 Handle<Object> undefined_sentinel( | 6850 Handle<Object> undefined_sentinel( |
6851 masm->isolate()->heap()->undefined_value(), | 6851 masm->isolate()->heap()->undefined_value(), |
6852 masm->isolate()); | 6852 masm->isolate()); |
6853 | 6853 |
6854 if (FLAG_debug_code) { | 6854 if (FLAG_debug_code) { |
6855 // The array construct code is only set for the global and natives | 6855 // The array construct code is only set for the global and natives |
6856 // builtin Array functions which always have maps. | 6856 // builtin Array functions which always have maps. |
6857 | 6857 |
6858 // Initial map for the builtin Array function should be a map. | 6858 // Initial map for the builtin Array function should be a map. |
6859 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6859 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6860 // Will both indicate a NULL and a Smi. | 6860 // Will both indicate a NULL and a Smi. |
6861 STATIC_ASSERT(kSmiTag == 0); | 6861 STATIC_ASSERT(kSmiTag == 0); |
6862 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 6862 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
6863 __ Check(not_smi, "Unexpected initial map for Array function"); | 6863 __ Check(not_smi, "Unexpected initial map for Array function"); |
6864 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 6864 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
6865 __ Check(equal, "Unexpected initial map for Array function"); | 6865 __ Check(equal, "Unexpected initial map for Array function"); |
6866 | 6866 |
6867 // We should either have undefined in ebx or a valid cell | 6867 // We should either have undefined in rbx or a valid cell |
6868 Label okay_here; | 6868 Label okay_here; |
6869 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 6869 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
6870 __ Cmp(rbx, undefined_sentinel); | 6870 __ Cmp(rbx, undefined_sentinel); |
6871 __ j(equal, &okay_here); | 6871 __ j(equal, &okay_here); |
6872 __ Cmp(FieldOperand(rbx, 0), cell_map); | 6872 __ Cmp(FieldOperand(rbx, 0), cell_map); |
6873 __ Assert(equal, "Expected property cell in register rbx"); | 6873 __ Assert(equal, "Expected property cell in register rbx"); |
6874 __ bind(&okay_here); | 6874 __ bind(&okay_here); |
6875 } | 6875 } |
6876 | 6876 |
6877 Label no_info, switch_ready; | 6877 Label no_info, switch_ready; |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6942 __ TailCallStub(&stub1); | 6942 __ TailCallStub(&stub1); |
6943 | 6943 |
6944 __ bind(¬_one_case); | 6944 __ bind(¬_one_case); |
6945 InternalArrayNArgumentsConstructorStub stubN(kind); | 6945 InternalArrayNArgumentsConstructorStub stubN(kind); |
6946 __ TailCallStub(&stubN); | 6946 __ TailCallStub(&stubN); |
6947 } | 6947 } |
6948 | 6948 |
6949 | 6949 |
6950 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { | 6950 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { |
6951 // ----------- S t a t e ------------- | 6951 // ----------- S t a t e ------------- |
6952 // -- eax : argc | 6952 // -- rax : argc |
6953 // -- ebx : type info cell | 6953 // -- rbx : type info cell |
6954 // -- edi : constructor | 6954 // -- rdi : constructor |
6955 // -- esp[0] : return address | 6955 // -- rsp[0] : return address |
6956 // -- esp[4] : last argument | 6956 // -- rsp[8] : last argument |
6957 // ----------------------------------- | 6957 // ----------------------------------- |
6958 | 6958 |
6959 if (FLAG_debug_code) { | 6959 if (FLAG_debug_code) { |
6960 // The array construct code is only set for the global and natives | 6960 // The array construct code is only set for the global and natives |
6961 // builtin Array functions which always have maps. | 6961 // builtin Array functions which always have maps. |
6962 | 6962 |
6963 // Initial map for the builtin Array function should be a map. | 6963 // Initial map for the builtin Array function should be a map. |
6964 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6964 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6965 // Will both indicate a NULL and a Smi. | 6965 // Will both indicate a NULL and a Smi. |
6966 STATIC_ASSERT(kSmiTag == 0); | 6966 STATIC_ASSERT(kSmiTag == 0); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6998 __ bind(&fast_elements_case); | 6998 __ bind(&fast_elements_case); |
6999 GenerateCase(masm, FAST_ELEMENTS); | 6999 GenerateCase(masm, FAST_ELEMENTS); |
7000 } | 7000 } |
7001 | 7001 |
7002 | 7002 |
7003 #undef __ | 7003 #undef __ |
7004 | 7004 |
7005 } } // namespace v8::internal | 7005 } } // namespace v8::internal |
7006 | 7006 |
7007 #endif // V8_TARGET_ARCH_X64 | 7007 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |