OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
15 // | 15 // |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #if defined(V8_TARGET_ARCH_X64) | 30 #if V8_TARGET_ARCH_X64 |
31 | 31 |
32 #include "bootstrapper.h" | 32 #include "bootstrapper.h" |
33 #include "code-stubs.h" | 33 #include "code-stubs.h" |
34 #include "regexp-macro-assembler.h" | 34 #include "regexp-macro-assembler.h" |
35 #include "stub-cache.h" | 35 #include "stub-cache.h" |
36 #include "runtime.h" | 36 #include "runtime.h" |
37 | 37 |
38 namespace v8 { | 38 namespace v8 { |
39 namespace internal { | 39 namespace internal { |
40 | 40 |
(...skipping 13 matching lines...) Expand all Loading... |
54 Isolate* isolate, | 54 Isolate* isolate, |
55 CodeStubInterfaceDescriptor* descriptor) { | 55 CodeStubInterfaceDescriptor* descriptor) { |
56 static Register registers[] = { rax, rbx, rcx, rdx }; | 56 static Register registers[] = { rax, rbx, rcx, rdx }; |
57 descriptor->register_param_count_ = 4; | 57 descriptor->register_param_count_ = 4; |
58 descriptor->register_params_ = registers; | 58 descriptor->register_params_ = registers; |
59 descriptor->deoptimization_handler_ = | 59 descriptor->deoptimization_handler_ = |
60 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; | 60 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; |
61 } | 61 } |
62 | 62 |
63 | 63 |
| 64 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( |
| 65 Isolate* isolate, |
| 66 CodeStubInterfaceDescriptor* descriptor) { |
| 67 static Register registers[] = { rbx }; |
| 68 descriptor->register_param_count_ = 1; |
| 69 descriptor->register_params_ = registers; |
| 70 descriptor->deoptimization_handler_ = NULL; |
| 71 } |
| 72 |
| 73 |
64 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 74 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
65 Isolate* isolate, | 75 Isolate* isolate, |
66 CodeStubInterfaceDescriptor* descriptor) { | 76 CodeStubInterfaceDescriptor* descriptor) { |
67 static Register registers[] = { rdx, rax }; | 77 static Register registers[] = { rdx, rax }; |
68 descriptor->register_param_count_ = 2; | 78 descriptor->register_param_count_ = 2; |
69 descriptor->register_params_ = registers; | 79 descriptor->register_params_ = registers; |
70 descriptor->deoptimization_handler_ = | 80 descriptor->deoptimization_handler_ = |
71 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); | 81 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); |
72 } | 82 } |
73 | 83 |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
215 | 225 |
216 void ToBooleanStub::InitializeInterfaceDescriptor( | 226 void ToBooleanStub::InitializeInterfaceDescriptor( |
217 Isolate* isolate, | 227 Isolate* isolate, |
218 CodeStubInterfaceDescriptor* descriptor) { | 228 CodeStubInterfaceDescriptor* descriptor) { |
219 static Register registers[] = { rax }; | 229 static Register registers[] = { rax }; |
220 descriptor->register_param_count_ = 1; | 230 descriptor->register_param_count_ = 1; |
221 descriptor->register_params_ = registers; | 231 descriptor->register_params_ = registers; |
222 descriptor->deoptimization_handler_ = | 232 descriptor->deoptimization_handler_ = |
223 FUNCTION_ADDR(ToBooleanIC_Miss); | 233 FUNCTION_ADDR(ToBooleanIC_Miss); |
224 descriptor->SetMissHandler( | 234 descriptor->SetMissHandler( |
225 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); | 235 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); |
226 } | 236 } |
227 | 237 |
228 | 238 |
| 239 void UnaryOpStub::InitializeInterfaceDescriptor( |
| 240 Isolate* isolate, |
| 241 CodeStubInterfaceDescriptor* descriptor) { |
| 242 static Register registers[] = { rax }; |
| 243 descriptor->register_param_count_ = 1; |
| 244 descriptor->register_params_ = registers; |
| 245 descriptor->deoptimization_handler_ = |
| 246 FUNCTION_ADDR(UnaryOpIC_Miss); |
| 247 } |
| 248 |
| 249 |
| 250 void StoreGlobalStub::InitializeInterfaceDescriptor( |
| 251 Isolate* isolate, |
| 252 CodeStubInterfaceDescriptor* descriptor) { |
| 253 static Register registers[] = { rdx, rcx, rax }; |
| 254 descriptor->register_param_count_ = 3; |
| 255 descriptor->register_params_ = registers; |
| 256 descriptor->deoptimization_handler_ = |
| 257 FUNCTION_ADDR(StoreIC_MissFromStubFailure); |
| 258 } |
| 259 |
| 260 |
229 #define __ ACCESS_MASM(masm) | 261 #define __ ACCESS_MASM(masm) |
230 | 262 |
231 | 263 |
232 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { | 264 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { |
233 // Update the static counter each time a new code stub is generated. | 265 // Update the static counter each time a new code stub is generated. |
234 Isolate* isolate = masm->isolate(); | 266 Isolate* isolate = masm->isolate(); |
235 isolate->counters()->code_stubs()->Increment(); | 267 isolate->counters()->code_stubs()->Increment(); |
236 | 268 |
237 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); | 269 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); |
238 int param_count = descriptor->register_param_count_; | 270 int param_count = descriptor->register_param_count_; |
239 { | 271 { |
240 // Call the runtime system in a fresh internal frame. | 272 // Call the runtime system in a fresh internal frame. |
241 FrameScope scope(masm, StackFrame::INTERNAL); | 273 FrameScope scope(masm, StackFrame::INTERNAL); |
242 ASSERT(descriptor->register_param_count_ == 0 || | 274 ASSERT(descriptor->register_param_count_ == 0 || |
243 rax.is(descriptor->register_params_[param_count - 1])); | 275 rax.is(descriptor->register_params_[param_count - 1])); |
244 // Push arguments | 276 // Push arguments |
245 for (int i = 0; i < param_count; ++i) { | 277 for (int i = 0; i < param_count; ++i) { |
246 __ push(descriptor->register_params_[i]); | 278 __ push(descriptor->register_params_[i]); |
247 } | 279 } |
248 ExternalReference miss = descriptor->miss_handler(); | 280 ExternalReference miss = descriptor->miss_handler(); |
249 __ CallExternalReference(miss, descriptor->register_param_count_); | 281 __ CallExternalReference(miss, descriptor->register_param_count_); |
250 } | 282 } |
251 | 283 |
252 __ Ret(); | 284 __ Ret(); |
253 } | 285 } |
254 | 286 |
255 | 287 |
256 void ToNumberStub::Generate(MacroAssembler* masm) { | 288 void ToNumberStub::Generate(MacroAssembler* masm) { |
257 // The ToNumber stub takes one argument in eax. | 289 // The ToNumber stub takes one argument in rax. |
258 Label check_heap_number, call_builtin; | 290 Label check_heap_number, call_builtin; |
259 __ SmiTest(rax); | 291 __ SmiTest(rax); |
260 __ j(not_zero, &check_heap_number, Label::kNear); | 292 __ j(not_zero, &check_heap_number, Label::kNear); |
261 __ Ret(); | 293 __ Ret(); |
262 | 294 |
263 __ bind(&check_heap_number); | 295 __ bind(&check_heap_number); |
264 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 296 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
265 Heap::kHeapNumberMapRootIndex); | 297 Heap::kHeapNumberMapRootIndex); |
266 __ j(not_equal, &call_builtin, Label::kNear); | 298 __ j(not_equal, &call_builtin, Label::kNear); |
267 __ Ret(); | 299 __ Ret(); |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
326 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); | 358 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
327 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); | 359 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); |
328 | 360 |
329 // Return and remove the on-stack parameter. | 361 // Return and remove the on-stack parameter. |
330 __ ret(1 * kPointerSize); | 362 __ ret(1 * kPointerSize); |
331 | 363 |
332 __ bind(&check_optimized); | 364 __ bind(&check_optimized); |
333 | 365 |
334 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); | 366 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); |
335 | 367 |
336 // rcx holds native context, ebx points to fixed array of 3-element entries | 368 // rcx holds native context, rbx points to fixed array of 3-element entries |
337 // (native context, optimized code, literals). | 369 // (native context, optimized code, literals). |
338 // The optimized code map must never be empty, so check the first elements. | 370 // The optimized code map must never be empty, so check the first elements. |
339 Label install_optimized; | 371 Label install_optimized; |
340 // Speculatively move code object into edx. | 372 // Speculatively move code object into edx. |
341 __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); | 373 __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); |
342 __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); | 374 __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); |
343 __ j(equal, &install_optimized); | 375 __ j(equal, &install_optimized); |
344 | 376 |
345 // Iterate through the rest of map backwards. rdx holds an index. | 377 // Iterate through the rest of map backwards. rdx holds an index. |
346 Label loop; | 378 Label loop; |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
445 | 477 |
446 // Need to collect. Call into runtime system. | 478 // Need to collect. Call into runtime system. |
447 __ bind(&gc); | 479 __ bind(&gc); |
448 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | 480 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
449 } | 481 } |
450 | 482 |
451 | 483 |
452 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | 484 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
453 // Stack layout on entry: | 485 // Stack layout on entry: |
454 // | 486 // |
455 // [rsp + (1 * kPointerSize)]: function | 487 // [rsp + (1 * kPointerSize)] : function |
456 // [rsp + (2 * kPointerSize)]: serialized scope info | 488 // [rsp + (2 * kPointerSize)] : serialized scope info |
457 | 489 |
458 // Try to allocate the context in new space. | 490 // Try to allocate the context in new space. |
459 Label gc; | 491 Label gc; |
460 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 492 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
461 __ Allocate(FixedArray::SizeFor(length), | 493 __ Allocate(FixedArray::SizeFor(length), |
462 rax, rbx, rcx, &gc, TAG_OBJECT); | 494 rax, rbx, rcx, &gc, TAG_OBJECT); |
463 | 495 |
464 // Get the function from the stack. | 496 // Get the function from the stack. |
465 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 497 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
466 | 498 |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
636 // As the then-branch, but move double-value to result before shifting. | 668 // As the then-branch, but move double-value to result before shifting. |
637 __ xorl(result, double_value); | 669 __ xorl(result, double_value); |
638 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); | 670 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); |
639 __ shll_cl(result); | 671 __ shll_cl(result); |
640 } | 672 } |
641 | 673 |
642 __ bind(&done); | 674 __ bind(&done); |
643 } | 675 } |
644 | 676 |
645 | 677 |
646 void UnaryOpStub::Generate(MacroAssembler* masm) { | |
647 switch (operand_type_) { | |
648 case UnaryOpIC::UNINITIALIZED: | |
649 GenerateTypeTransition(masm); | |
650 break; | |
651 case UnaryOpIC::SMI: | |
652 GenerateSmiStub(masm); | |
653 break; | |
654 case UnaryOpIC::NUMBER: | |
655 GenerateNumberStub(masm); | |
656 break; | |
657 case UnaryOpIC::GENERIC: | |
658 GenerateGenericStub(masm); | |
659 break; | |
660 } | |
661 } | |
662 | |
663 | |
664 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | |
665 __ pop(rcx); // Save return address. | |
666 | |
667 __ push(rax); // the operand | |
668 __ Push(Smi::FromInt(op_)); | |
669 __ Push(Smi::FromInt(mode_)); | |
670 __ Push(Smi::FromInt(operand_type_)); | |
671 | |
672 __ push(rcx); // Push return address. | |
673 | |
674 // Patch the caller to an appropriate specialized stub and return the | |
675 // operation result to the caller of the stub. | |
676 __ TailCallExternalReference( | |
677 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1); | |
678 } | |
679 | |
680 | |
681 // TODO(svenpanne): Use virtual functions instead of switch. | |
682 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | |
683 switch (op_) { | |
684 case Token::SUB: | |
685 GenerateSmiStubSub(masm); | |
686 break; | |
687 case Token::BIT_NOT: | |
688 GenerateSmiStubBitNot(masm); | |
689 break; | |
690 default: | |
691 UNREACHABLE(); | |
692 } | |
693 } | |
694 | |
695 | |
696 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | |
697 Label slow; | |
698 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); | |
699 __ bind(&slow); | |
700 GenerateTypeTransition(masm); | |
701 } | |
702 | |
703 | |
704 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | |
705 Label non_smi; | |
706 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | |
707 __ bind(&non_smi); | |
708 GenerateTypeTransition(masm); | |
709 } | |
710 | |
711 | |
712 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | |
713 Label* non_smi, | |
714 Label* slow, | |
715 Label::Distance non_smi_near, | |
716 Label::Distance slow_near) { | |
717 Label done; | |
718 __ JumpIfNotSmi(rax, non_smi, non_smi_near); | |
719 __ SmiNeg(rax, rax, &done, Label::kNear); | |
720 __ jmp(slow, slow_near); | |
721 __ bind(&done); | |
722 __ ret(0); | |
723 } | |
724 | |
725 | |
726 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, | |
727 Label* non_smi, | |
728 Label::Distance non_smi_near) { | |
729 __ JumpIfNotSmi(rax, non_smi, non_smi_near); | |
730 __ SmiNot(rax, rax); | |
731 __ ret(0); | |
732 } | |
733 | |
734 | |
735 // TODO(svenpanne): Use virtual functions instead of switch. | |
736 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) { | |
737 switch (op_) { | |
738 case Token::SUB: | |
739 GenerateNumberStubSub(masm); | |
740 break; | |
741 case Token::BIT_NOT: | |
742 GenerateNumberStubBitNot(masm); | |
743 break; | |
744 default: | |
745 UNREACHABLE(); | |
746 } | |
747 } | |
748 | |
749 | |
750 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) { | |
751 Label non_smi, slow, call_builtin; | |
752 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); | |
753 __ bind(&non_smi); | |
754 GenerateHeapNumberCodeSub(masm, &slow); | |
755 __ bind(&slow); | |
756 GenerateTypeTransition(masm); | |
757 __ bind(&call_builtin); | |
758 GenerateGenericCodeFallback(masm); | |
759 } | |
760 | |
761 | |
762 void UnaryOpStub::GenerateNumberStubBitNot( | |
763 MacroAssembler* masm) { | |
764 Label non_smi, slow; | |
765 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | |
766 __ bind(&non_smi); | |
767 GenerateHeapNumberCodeBitNot(masm, &slow); | |
768 __ bind(&slow); | |
769 GenerateTypeTransition(masm); | |
770 } | |
771 | |
772 | |
773 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | |
774 Label* slow) { | |
775 // Check if the operand is a heap number. | |
776 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | |
777 Heap::kHeapNumberMapRootIndex); | |
778 __ j(not_equal, slow); | |
779 | |
780 // Operand is a float, negate its value by flipping the sign bit. | |
781 if (mode_ == UNARY_OVERWRITE) { | |
782 __ Set(kScratchRegister, 0x01); | |
783 __ shl(kScratchRegister, Immediate(63)); | |
784 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); | |
785 } else { | |
786 // Allocate a heap number before calculating the answer, | |
787 // so we don't have an untagged double around during GC. | |
788 Label slow_allocate_heapnumber, heapnumber_allocated; | |
789 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); | |
790 __ jmp(&heapnumber_allocated); | |
791 | |
792 __ bind(&slow_allocate_heapnumber); | |
793 { | |
794 FrameScope scope(masm, StackFrame::INTERNAL); | |
795 __ push(rax); | |
796 __ CallRuntime(Runtime::kNumberAlloc, 0); | |
797 __ movq(rcx, rax); | |
798 __ pop(rax); | |
799 } | |
800 __ bind(&heapnumber_allocated); | |
801 // rcx: allocated 'empty' number | |
802 | |
803 // Copy the double value to the new heap number, flipping the sign. | |
804 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | |
805 __ Set(kScratchRegister, 0x01); | |
806 __ shl(kScratchRegister, Immediate(63)); | |
807 __ xor_(rdx, kScratchRegister); // Flip sign. | |
808 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | |
809 __ movq(rax, rcx); | |
810 } | |
811 __ ret(0); | |
812 } | |
813 | |
814 | |
815 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, | |
816 Label* slow) { | |
817 // Check if the operand is a heap number. | |
818 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | |
819 Heap::kHeapNumberMapRootIndex); | |
820 __ j(not_equal, slow); | |
821 | |
822 // Convert the heap number in rax to an untagged integer in rcx. | |
823 IntegerConvert(masm, rax, rax); | |
824 | |
825 // Do the bitwise operation and smi tag the result. | |
826 __ notl(rax); | |
827 __ Integer32ToSmi(rax, rax); | |
828 __ ret(0); | |
829 } | |
830 | |
831 | |
832 // TODO(svenpanne): Use virtual functions instead of switch. | |
833 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | |
834 switch (op_) { | |
835 case Token::SUB: | |
836 GenerateGenericStubSub(masm); | |
837 break; | |
838 case Token::BIT_NOT: | |
839 GenerateGenericStubBitNot(masm); | |
840 break; | |
841 default: | |
842 UNREACHABLE(); | |
843 } | |
844 } | |
845 | |
846 | |
847 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | |
848 Label non_smi, slow; | |
849 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); | |
850 __ bind(&non_smi); | |
851 GenerateHeapNumberCodeSub(masm, &slow); | |
852 __ bind(&slow); | |
853 GenerateGenericCodeFallback(masm); | |
854 } | |
855 | |
856 | |
857 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | |
858 Label non_smi, slow; | |
859 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | |
860 __ bind(&non_smi); | |
861 GenerateHeapNumberCodeBitNot(masm, &slow); | |
862 __ bind(&slow); | |
863 GenerateGenericCodeFallback(masm); | |
864 } | |
865 | |
866 | |
867 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { | |
868 // Handle the slow case by jumping to the JavaScript builtin. | |
869 __ pop(rcx); // pop return address | |
870 __ push(rax); | |
871 __ push(rcx); // push return address | |
872 switch (op_) { | |
873 case Token::SUB: | |
874 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); | |
875 break; | |
876 case Token::BIT_NOT: | |
877 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); | |
878 break; | |
879 default: | |
880 UNREACHABLE(); | |
881 } | |
882 } | |
883 | |
884 | |
885 void UnaryOpStub::PrintName(StringStream* stream) { | |
886 const char* op_name = Token::Name(op_); | |
887 const char* overwrite_name = NULL; // Make g++ happy. | |
888 switch (mode_) { | |
889 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | |
890 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | |
891 } | |
892 stream->Add("UnaryOpStub_%s_%s_%s", | |
893 op_name, | |
894 overwrite_name, | |
895 UnaryOpIC::GetName(operand_type_)); | |
896 } | |
897 | |
898 | |
899 void BinaryOpStub::Initialize() {} | 678 void BinaryOpStub::Initialize() {} |
900 | 679 |
901 | 680 |
902 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 681 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
903 __ pop(rcx); // Save return address. | 682 __ pop(rcx); // Save return address. |
904 __ push(rdx); | 683 __ push(rdx); |
905 __ push(rax); | 684 __ push(rax); |
906 // Left and right arguments are now on top. | 685 // Left and right arguments are now on top. |
907 __ Push(Smi::FromInt(MinorKey())); | 686 __ Push(Smi::FromInt(MinorKey())); |
908 | 687 |
(...skipping 508 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1417 | 1196 |
1418 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, | 1197 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, |
1419 Label* alloc_failure, | 1198 Label* alloc_failure, |
1420 OverwriteMode mode) { | 1199 OverwriteMode mode) { |
1421 Label skip_allocation; | 1200 Label skip_allocation; |
1422 switch (mode) { | 1201 switch (mode) { |
1423 case OVERWRITE_LEFT: { | 1202 case OVERWRITE_LEFT: { |
1424 // If the argument in rdx is already an object, we skip the | 1203 // If the argument in rdx is already an object, we skip the |
1425 // allocation of a heap number. | 1204 // allocation of a heap number. |
1426 __ JumpIfNotSmi(rdx, &skip_allocation); | 1205 __ JumpIfNotSmi(rdx, &skip_allocation); |
1427 // Allocate a heap number for the result. Keep eax and edx intact | 1206 // Allocate a heap number for the result. Keep rax and rdx intact |
1428 // for the possible runtime call. | 1207 // for the possible runtime call. |
1429 __ AllocateHeapNumber(rbx, rcx, alloc_failure); | 1208 __ AllocateHeapNumber(rbx, rcx, alloc_failure); |
1430 // Now rdx can be overwritten losing one of the arguments as we are | 1209 // Now rdx can be overwritten losing one of the arguments as we are |
1431 // now done and will not need it any more. | 1210 // now done and will not need it any more. |
1432 __ movq(rdx, rbx); | 1211 __ movq(rdx, rbx); |
1433 __ bind(&skip_allocation); | 1212 __ bind(&skip_allocation); |
1434 // Use object in rdx as a result holder | 1213 // Use object in rdx as a result holder |
1435 __ movq(rax, rdx); | 1214 __ movq(rax, rdx); |
1436 break; | 1215 break; |
1437 } | 1216 } |
(...skipping 18 matching lines...) Expand all Loading... |
1456 | 1235 |
1457 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 1236 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
1458 __ push(rdx); | 1237 __ push(rdx); |
1459 __ push(rax); | 1238 __ push(rax); |
1460 } | 1239 } |
1461 | 1240 |
1462 | 1241 |
1463 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 1242 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
1464 // TAGGED case: | 1243 // TAGGED case: |
1465 // Input: | 1244 // Input: |
1466 // rsp[8]: argument (should be number). | 1245 // rsp[8] : argument (should be number). |
1467 // rsp[0]: return address. | 1246 // rsp[0] : return address. |
1468 // Output: | 1247 // Output: |
1469 // rax: tagged double result. | 1248 // rax: tagged double result. |
1470 // UNTAGGED case: | 1249 // UNTAGGED case: |
1471 // Input:: | 1250 // Input:: |
1472 // rsp[0]: return address. | 1251 // rsp[0] : return address. |
1473 // xmm1: untagged double input argument | 1252 // xmm1 : untagged double input argument |
1474 // Output: | 1253 // Output: |
1475 // xmm1: untagged double result. | 1254 // xmm1 : untagged double result. |
1476 | 1255 |
1477 Label runtime_call; | 1256 Label runtime_call; |
1478 Label runtime_call_clear_stack; | 1257 Label runtime_call_clear_stack; |
1479 Label skip_cache; | 1258 Label skip_cache; |
1480 const bool tagged = (argument_type_ == TAGGED); | 1259 const bool tagged = (argument_type_ == TAGGED); |
1481 if (tagged) { | 1260 if (tagged) { |
1482 Label input_not_smi, loaded; | 1261 Label input_not_smi, loaded; |
1483 // Test that rax is a number. | 1262 // Test that rax is a number. |
1484 __ movq(rax, Operand(rsp, kPointerSize)); | 1263 __ movq(rax, Operand(rsp, kPointerSize)); |
1485 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); | 1264 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1551 #ifdef DEBUG | 1330 #ifdef DEBUG |
1552 // Check that the layout of cache elements match expectations. | 1331 // Check that the layout of cache elements match expectations. |
1553 { // NOLINT - doesn't like a single brace on a line. | 1332 { // NOLINT - doesn't like a single brace on a line. |
1554 TranscendentalCache::SubCache::Element test_elem[2]; | 1333 TranscendentalCache::SubCache::Element test_elem[2]; |
1555 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); | 1334 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); |
1556 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); | 1335 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); |
1557 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); | 1336 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); |
1558 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1])); | 1337 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1])); |
1559 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output)); | 1338 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output)); |
1560 // Two uint_32's and a pointer per element. | 1339 // Two uint_32's and a pointer per element. |
1561 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start)); | 1340 CHECK_EQ(2 * kIntSize + 1 * kPointerSize, |
| 1341 static_cast<int>(elem2_start - elem_start)); |
1562 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start)); | 1342 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start)); |
1563 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start)); | 1343 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start)); |
1564 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start)); | 1344 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start)); |
1565 } | 1345 } |
1566 #endif | 1346 #endif |
1567 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16]. | 1347 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16]. |
1568 __ addl(rcx, rcx); | 1348 __ addl(rcx, rcx); |
1569 __ lea(rcx, Operand(rax, rcx, times_8, 0)); | 1349 __ lea(rcx, Operand(rax, rcx, times_8, 0)); |
1570 // Check if cache matches: Double value is stored in uint32_t[2] array. | 1350 // Check if cache matches: Double value is stored in uint32_t[2] array. |
1571 Label cache_miss; | 1351 Label cache_miss; |
(...skipping 633 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2205 __ cvtlsi2sd(double_exponent, exponent); | 1985 __ cvtlsi2sd(double_exponent, exponent); |
2206 | 1986 |
2207 // Returning or bailing out. | 1987 // Returning or bailing out. |
2208 Counters* counters = masm->isolate()->counters(); | 1988 Counters* counters = masm->isolate()->counters(); |
2209 if (exponent_type_ == ON_STACK) { | 1989 if (exponent_type_ == ON_STACK) { |
2210 // The arguments are still on the stack. | 1990 // The arguments are still on the stack. |
2211 __ bind(&call_runtime); | 1991 __ bind(&call_runtime); |
2212 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 1992 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
2213 | 1993 |
2214 // The stub is called from non-optimized code, which expects the result | 1994 // The stub is called from non-optimized code, which expects the result |
2215 // as heap number in eax. | 1995 // as heap number in rax. |
2216 __ bind(&done); | 1996 __ bind(&done); |
2217 __ AllocateHeapNumber(rax, rcx, &call_runtime); | 1997 __ AllocateHeapNumber(rax, rcx, &call_runtime); |
2218 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); | 1998 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); |
2219 __ IncrementCounter(counters->math_pow(), 1); | 1999 __ IncrementCounter(counters->math_pow(), 1); |
2220 __ ret(2 * kPointerSize); | 2000 __ ret(2 * kPointerSize); |
2221 } else { | 2001 } else { |
2222 __ bind(&call_runtime); | 2002 __ bind(&call_runtime); |
2223 // Move base to the correct argument register. Exponent is already in xmm1. | 2003 // Move base to the correct argument register. Exponent is already in xmm1. |
2224 __ movsd(xmm0, double_base); | 2004 __ movsd(xmm0, double_base); |
2225 ASSERT(double_exponent.is(xmm1)); | 2005 ASSERT(double_exponent.is(xmm1)); |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2416 __ bind(&slow); | 2196 __ bind(&slow); |
2417 __ pop(rbx); // Return address. | 2197 __ pop(rbx); // Return address. |
2418 __ push(rdx); | 2198 __ push(rdx); |
2419 __ push(rbx); | 2199 __ push(rbx); |
2420 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); | 2200 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); |
2421 } | 2201 } |
2422 | 2202 |
2423 | 2203 |
2424 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { | 2204 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
2425 // Stack layout: | 2205 // Stack layout: |
2426 // rsp[0] : return address | 2206 // rsp[0] : return address |
2427 // rsp[8] : number of parameters (tagged) | 2207 // rsp[8] : number of parameters (tagged) |
2428 // rsp[16] : receiver displacement | 2208 // rsp[16] : receiver displacement |
2429 // rsp[24] : function | 2209 // rsp[24] : function |
2430 // Registers used over the whole function: | 2210 // Registers used over the whole function: |
2431 // rbx: the mapped parameter count (untagged) | 2211 // rbx: the mapped parameter count (untagged) |
2432 // rax: the allocated object (tagged). | 2212 // rax: the allocated object (tagged). |
2433 | 2213 |
2434 Factory* factory = masm->isolate()->factory(); | 2214 Factory* factory = masm->isolate()->factory(); |
2435 | 2215 |
2436 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); | 2216 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); |
2437 // rbx = parameter count (untagged) | 2217 // rbx = parameter count (untagged) |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2632 // Do the runtime call to allocate the arguments object. | 2412 // Do the runtime call to allocate the arguments object. |
2633 // rcx = argument count (untagged) | 2413 // rcx = argument count (untagged) |
2634 __ bind(&runtime); | 2414 __ bind(&runtime); |
2635 __ Integer32ToSmi(rcx, rcx); | 2415 __ Integer32ToSmi(rcx, rcx); |
2636 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. | 2416 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. |
2637 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2417 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
2638 } | 2418 } |
2639 | 2419 |
2640 | 2420 |
2641 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { | 2421 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { |
2642 // esp[0] : return address | 2422 // rsp[0] : return address |
2643 // esp[8] : number of parameters | 2423 // rsp[8] : number of parameters |
2644 // esp[16] : receiver displacement | 2424 // rsp[16] : receiver displacement |
2645 // esp[24] : function | 2425 // rsp[24] : function |
2646 | 2426 |
2647 // Check if the calling frame is an arguments adaptor frame. | 2427 // Check if the calling frame is an arguments adaptor frame. |
2648 Label runtime; | 2428 Label runtime; |
2649 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2429 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
2650 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2430 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
2651 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2431 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2652 __ j(not_equal, &runtime); | 2432 __ j(not_equal, &runtime); |
2653 | 2433 |
2654 // Patch the arguments.length and the parameters pointer. | 2434 // Patch the arguments.length and the parameters pointer. |
2655 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2435 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2656 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 2436 __ movq(Operand(rsp, 1 * kPointerSize), rcx); |
2657 __ SmiToInteger64(rcx, rcx); | 2437 __ SmiToInteger64(rcx, rcx); |
2658 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 2438 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
2659 StandardFrameConstants::kCallerSPOffset)); | 2439 StandardFrameConstants::kCallerSPOffset)); |
2660 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 2440 __ movq(Operand(rsp, 2 * kPointerSize), rdx); |
2661 | 2441 |
2662 __ bind(&runtime); | 2442 __ bind(&runtime); |
2663 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2443 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
2664 } | 2444 } |
2665 | 2445 |
2666 | 2446 |
2667 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 2447 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
2668 // rsp[0] : return address | 2448 // rsp[0] : return address |
2669 // rsp[8] : number of parameters | 2449 // rsp[8] : number of parameters |
2670 // rsp[16] : receiver displacement | 2450 // rsp[16] : receiver displacement |
2671 // rsp[24] : function | 2451 // rsp[24] : function |
2672 | 2452 |
2673 // Check if the calling frame is an arguments adaptor frame. | 2453 // Check if the calling frame is an arguments adaptor frame. |
2674 Label adaptor_frame, try_allocate, runtime; | 2454 Label adaptor_frame, try_allocate, runtime; |
2675 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2455 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
2676 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2456 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
2677 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2457 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
2678 __ j(equal, &adaptor_frame); | 2458 __ j(equal, &adaptor_frame); |
2679 | 2459 |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2766 | 2546 |
2767 void RegExpExecStub::Generate(MacroAssembler* masm) { | 2547 void RegExpExecStub::Generate(MacroAssembler* masm) { |
2768 // Just jump directly to runtime if native RegExp is not selected at compile | 2548 // Just jump directly to runtime if native RegExp is not selected at compile |
2769 // time or if regexp entry in generated code is turned off runtime switch or | 2549 // time or if regexp entry in generated code is turned off runtime switch or |
2770 // at compilation. | 2550 // at compilation. |
2771 #ifdef V8_INTERPRETED_REGEXP | 2551 #ifdef V8_INTERPRETED_REGEXP |
2772 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2552 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
2773 #else // V8_INTERPRETED_REGEXP | 2553 #else // V8_INTERPRETED_REGEXP |
2774 | 2554 |
2775 // Stack frame on entry. | 2555 // Stack frame on entry. |
2776 // rsp[0]: return address | 2556 // rsp[0] : return address |
2777 // rsp[8]: last_match_info (expected JSArray) | 2557 // rsp[8] : last_match_info (expected JSArray) |
2778 // rsp[16]: previous index | 2558 // rsp[16] : previous index |
2779 // rsp[24]: subject string | 2559 // rsp[24] : subject string |
2780 // rsp[32]: JSRegExp object | 2560 // rsp[32] : JSRegExp object |
2781 | 2561 |
2782 static const int kLastMatchInfoOffset = 1 * kPointerSize; | 2562 static const int kLastMatchInfoOffset = 1 * kPointerSize; |
2783 static const int kPreviousIndexOffset = 2 * kPointerSize; | 2563 static const int kPreviousIndexOffset = 2 * kPointerSize; |
2784 static const int kSubjectOffset = 3 * kPointerSize; | 2564 static const int kSubjectOffset = 3 * kPointerSize; |
2785 static const int kJSRegExpOffset = 4 * kPointerSize; | 2565 static const int kJSRegExpOffset = 4 * kPointerSize; |
2786 | 2566 |
2787 Label runtime; | 2567 Label runtime; |
2788 // Ensure that a RegExp stack is allocated. | 2568 // Ensure that a RegExp stack is allocated. |
2789 Isolate* isolate = masm->isolate(); | 2569 Isolate* isolate = masm->isolate(); |
2790 ExternalReference address_of_regexp_stack_memory_address = | 2570 ExternalReference address_of_regexp_stack_memory_address = |
(...skipping 937 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3728 // Load the cache state into rcx. | 3508 // Load the cache state into rcx. |
3729 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | 3509 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
3730 | 3510 |
3731 // A monomorphic cache hit or an already megamorphic state: invoke the | 3511 // A monomorphic cache hit or an already megamorphic state: invoke the |
3732 // function without changing the state. | 3512 // function without changing the state. |
3733 __ cmpq(rcx, rdi); | 3513 __ cmpq(rcx, rdi); |
3734 __ j(equal, &done); | 3514 __ j(equal, &done); |
3735 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); | 3515 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); |
3736 __ j(equal, &done); | 3516 __ j(equal, &done); |
3737 | 3517 |
3738 // Special handling of the Array() function, which caches not only the | 3518 // If we came here, we need to see if we are the array function. |
3739 // monomorphic Array function but the initial ElementsKind with special | 3519 // If we didn't have a matching function, and we didn't find the megamorph |
3740 // sentinels | 3520 // sentinel, then we have in the cell either some other function or an |
3741 Handle<Object> terminal_kind_sentinel = | 3521 // AllocationSite. Do a map check on the object in rcx. |
3742 TypeFeedbackCells::MonomorphicArraySentinel(isolate, | 3522 Handle<Map> allocation_site_map( |
3743 LAST_FAST_ELEMENTS_KIND); | 3523 masm->isolate()->heap()->allocation_site_map(), |
3744 __ JumpIfNotSmi(rcx, &miss); | 3524 masm->isolate()); |
3745 __ Cmp(rcx, terminal_kind_sentinel); | 3525 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); |
3746 __ j(above, &miss); | 3526 __ j(not_equal, &miss); |
| 3527 |
3747 // Make sure the function is the Array() function | 3528 // Make sure the function is the Array() function |
3748 __ LoadArrayFunction(rcx); | 3529 __ LoadArrayFunction(rcx); |
3749 __ cmpq(rdi, rcx); | 3530 __ cmpq(rdi, rcx); |
3750 __ j(not_equal, &megamorphic); | 3531 __ j(not_equal, &megamorphic); |
3751 __ jmp(&done); | 3532 __ jmp(&done); |
3752 | 3533 |
3753 __ bind(&miss); | 3534 __ bind(&miss); |
3754 | 3535 |
3755 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3536 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
3756 // megamorphic. | 3537 // megamorphic. |
3757 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); | 3538 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); |
3758 __ j(equal, &initialize); | 3539 __ j(equal, &initialize); |
3759 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3540 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
3760 // write-barrier is needed. | 3541 // write-barrier is needed. |
3761 __ bind(&megamorphic); | 3542 __ bind(&megamorphic); |
3762 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 3543 __ Move(FieldOperand(rbx, Cell::kValueOffset), |
3763 TypeFeedbackCells::MegamorphicSentinel(isolate)); | 3544 TypeFeedbackCells::MegamorphicSentinel(isolate)); |
3764 __ jmp(&done, Label::kNear); | 3545 __ jmp(&done); |
3765 | 3546 |
3766 // An uninitialized cache is patched with the function or sentinel to | 3547 // An uninitialized cache is patched with the function or sentinel to |
3767 // indicate the ElementsKind if function is the Array constructor. | 3548 // indicate the ElementsKind if function is the Array constructor. |
3768 __ bind(&initialize); | 3549 __ bind(&initialize); |
3769 // Make sure the function is the Array() function | 3550 // Make sure the function is the Array() function |
3770 __ LoadArrayFunction(rcx); | 3551 __ LoadArrayFunction(rcx); |
3771 __ cmpq(rdi, rcx); | 3552 __ cmpq(rdi, rcx); |
3772 __ j(not_equal, ¬_array_function); | 3553 __ j(not_equal, ¬_array_function); |
3773 | 3554 |
3774 // The target function is the Array constructor, install a sentinel value in | 3555 // The target function is the Array constructor, |
3775 // the constructor's type info cell that will track the initial ElementsKind | 3556 // Create an AllocationSite if we don't already have it, store it in the cell |
3776 // that should be used for the array when its constructed. | 3557 { |
3777 Handle<Object> initial_kind_sentinel = | 3558 FrameScope scope(masm, StackFrame::INTERNAL); |
3778 TypeFeedbackCells::MonomorphicArraySentinel(isolate, | 3559 |
3779 GetInitialFastElementsKind()); | 3560 __ push(rax); |
3780 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 3561 __ push(rdi); |
3781 initial_kind_sentinel); | 3562 __ push(rbx); |
| 3563 |
| 3564 CreateAllocationSiteStub create_stub; |
| 3565 __ CallStub(&create_stub); |
| 3566 |
| 3567 __ pop(rbx); |
| 3568 __ pop(rdi); |
| 3569 __ pop(rax); |
| 3570 } |
3782 __ jmp(&done); | 3571 __ jmp(&done); |
3783 | 3572 |
3784 __ bind(¬_array_function); | 3573 __ bind(¬_array_function); |
3785 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); | 3574 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); |
3786 // No need for a write barrier here - cells are rescanned. | 3575 // No need for a write barrier here - cells are rescanned. |
3787 | 3576 |
3788 __ bind(&done); | 3577 __ bind(&done); |
3789 } | 3578 } |
3790 | 3579 |
3791 | 3580 |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3941 } | 3730 } |
3942 | 3731 |
3943 | 3732 |
3944 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 3733 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
3945 CEntryStub::GenerateAheadOfTime(isolate); | 3734 CEntryStub::GenerateAheadOfTime(isolate); |
3946 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3735 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3947 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 3736 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
3948 // It is important that the store buffer overflow stubs are generated first. | 3737 // It is important that the store buffer overflow stubs are generated first. |
3949 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3738 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3950 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 3739 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 3740 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
3951 } | 3741 } |
3952 | 3742 |
3953 | 3743 |
3954 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 3744 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
3955 } | 3745 } |
3956 | 3746 |
3957 | 3747 |
3958 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 3748 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
3959 CEntryStub stub(1, kDontSaveFPRegs); | 3749 CEntryStub stub(1, kDontSaveFPRegs); |
3960 stub.GetCode(isolate)->set_is_pregenerated(true); | 3750 stub.GetCode(isolate)->set_is_pregenerated(true); |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4122 // rbx: pointer to C function (C callee-saved) | 3912 // rbx: pointer to C function (C callee-saved) |
4123 // rbp: frame pointer of calling JS frame (restored after C call) | 3913 // rbp: frame pointer of calling JS frame (restored after C call) |
4124 // rsp: stack pointer (restored after C call) | 3914 // rsp: stack pointer (restored after C call) |
4125 // rsi: current context (restored) | 3915 // rsi: current context (restored) |
4126 | 3916 |
4127 // NOTE: Invocations of builtins may return failure objects | 3917 // NOTE: Invocations of builtins may return failure objects |
4128 // instead of a proper result. The builtin entry handles | 3918 // instead of a proper result. The builtin entry handles |
4129 // this by performing a garbage collection and retrying the | 3919 // this by performing a garbage collection and retrying the |
4130 // builtin once. | 3920 // builtin once. |
4131 | 3921 |
| 3922 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
| 3923 |
4132 // Enter the exit frame that transitions from JavaScript to C++. | 3924 // Enter the exit frame that transitions from JavaScript to C++. |
4133 #ifdef _WIN64 | 3925 #ifdef _WIN64 |
4134 int arg_stack_space = (result_size_ < 2 ? 2 : 4); | 3926 int arg_stack_space = (result_size_ < 2 ? 2 : 4); |
4135 #else | 3927 #else |
4136 int arg_stack_space = 0; | 3928 int arg_stack_space = 0; |
4137 #endif | 3929 #endif |
4138 __ EnterExitFrame(arg_stack_space, save_doubles_); | 3930 __ EnterExitFrame(arg_stack_space, save_doubles_); |
4139 | 3931 |
4140 // rax: Holds the context at this point, but should not be used. | 3932 // rax: Holds the context at this point, but should not be used. |
4141 // On entry to code generated by GenerateCore, it must hold | 3933 // On entry to code generated by GenerateCore, it must hold |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4202 | 3994 |
4203 __ bind(&throw_normal_exception); | 3995 __ bind(&throw_normal_exception); |
4204 __ Throw(rax); | 3996 __ Throw(rax); |
4205 } | 3997 } |
4206 | 3998 |
4207 | 3999 |
4208 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { | 4000 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { |
4209 Label invoke, handler_entry, exit; | 4001 Label invoke, handler_entry, exit; |
4210 Label not_outermost_js, not_outermost_js_2; | 4002 Label not_outermost_js, not_outermost_js_2; |
4211 | 4003 |
| 4004 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
| 4005 |
4212 { // NOLINT. Scope block confuses linter. | 4006 { // NOLINT. Scope block confuses linter. |
4213 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); | 4007 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); |
4214 // Set up frame. | 4008 // Set up frame. |
4215 __ push(rbp); | 4009 __ push(rbp); |
4216 __ movq(rbp, rsp); | 4010 __ movq(rbp, rsp); |
4217 | 4011 |
4218 // Push the stack frame type marker twice. | 4012 // Push the stack frame type marker twice. |
4219 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 4013 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
4220 // Scratch register is neither callee-save, nor an argument register on any | 4014 // Scratch register is neither callee-save, nor an argument register on any |
4221 // platform. It's free to use at this point. | 4015 // platform. It's free to use at this point. |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4368 | 4162 |
4369 // Restore frame pointer and return. | 4163 // Restore frame pointer and return. |
4370 __ pop(rbp); | 4164 __ pop(rbp); |
4371 __ ret(0); | 4165 __ ret(0); |
4372 } | 4166 } |
4373 | 4167 |
4374 | 4168 |
4375 void InstanceofStub::Generate(MacroAssembler* masm) { | 4169 void InstanceofStub::Generate(MacroAssembler* masm) { |
4376 // Implements "value instanceof function" operator. | 4170 // Implements "value instanceof function" operator. |
4377 // Expected input state with no inline cache: | 4171 // Expected input state with no inline cache: |
4378 // rsp[0] : return address | 4172 // rsp[0] : return address |
4379 // rsp[1] : function pointer | 4173 // rsp[8] : function pointer |
4380 // rsp[2] : value | 4174 // rsp[16] : value |
4381 // Expected input state with an inline one-element cache: | 4175 // Expected input state with an inline one-element cache: |
4382 // rsp[0] : return address | 4176 // rsp[0] : return address |
4383 // rsp[1] : offset from return address to location of inline cache | 4177 // rsp[8] : offset from return address to location of inline cache |
4384 // rsp[2] : function pointer | 4178 // rsp[16] : function pointer |
4385 // rsp[3] : value | 4179 // rsp[24] : value |
4386 // Returns a bitwise zero to indicate that the value | 4180 // Returns a bitwise zero to indicate that the value |
4387 // is and instance of the function and anything else to | 4181 // is and instance of the function and anything else to |
4388 // indicate that the value is not an instance. | 4182 // indicate that the value is not an instance. |
4389 | 4183 |
4390 static const int kOffsetToMapCheckValue = 2; | 4184 static const int kOffsetToMapCheckValue = 2; |
4391 static const int kOffsetToResultValue = 18; | 4185 static const int kOffsetToResultValue = 18; |
4392 // The last 4 bytes of the instruction sequence | 4186 // The last 4 bytes of the instruction sequence |
4393 // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset)) | 4187 // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset)) |
4394 // Move(kScratchRegister, Factory::the_hole_value()) | 4188 // Move(kScratchRegister, Factory::the_hole_value()) |
4395 // in front of the hole value address. | 4189 // in front of the hole value address. |
(...skipping 701 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5097 __ j(zero, &done, Label::kNear); | 4891 __ j(zero, &done, Label::kNear); |
5098 | 4892 |
5099 // Make count the number of bytes to copy. | 4893 // Make count the number of bytes to copy. |
5100 if (!ascii) { | 4894 if (!ascii) { |
5101 STATIC_ASSERT(2 == sizeof(uc16)); | 4895 STATIC_ASSERT(2 == sizeof(uc16)); |
5102 __ addl(count, count); | 4896 __ addl(count, count); |
5103 } | 4897 } |
5104 | 4898 |
5105 // Don't enter the rep movs if there are less than 4 bytes to copy. | 4899 // Don't enter the rep movs if there are less than 4 bytes to copy. |
5106 Label last_bytes; | 4900 Label last_bytes; |
5107 __ testl(count, Immediate(~7)); | 4901 __ testl(count, Immediate(~(kPointerSize - 1))); |
5108 __ j(zero, &last_bytes, Label::kNear); | 4902 __ j(zero, &last_bytes, Label::kNear); |
5109 | 4903 |
5110 // Copy from edi to esi using rep movs instruction. | 4904 // Copy from edi to esi using rep movs instruction. |
5111 __ movl(kScratchRegister, count); | 4905 __ movl(kScratchRegister, count); |
5112 __ shr(count, Immediate(3)); // Number of doublewords to copy. | 4906 __ shr(count, Immediate(kPointerSizeLog2)); // Number of doublewords to copy. |
5113 __ repmovsq(); | 4907 __ repmovsq(); |
5114 | 4908 |
5115 // Find number of bytes left. | 4909 // Find number of bytes left. |
5116 __ movl(count, kScratchRegister); | 4910 __ movl(count, kScratchRegister); |
5117 __ and_(count, Immediate(7)); | 4911 __ and_(count, Immediate(kPointerSize - 1)); |
5118 | 4912 |
5119 // Check if there are more bytes to copy. | 4913 // Check if there are more bytes to copy. |
5120 __ bind(&last_bytes); | 4914 __ bind(&last_bytes); |
5121 __ testl(count, count); | 4915 __ testl(count, count); |
5122 __ j(zero, &done, Label::kNear); | 4916 __ j(zero, &done, Label::kNear); |
5123 | 4917 |
5124 // Copy remaining characters. | 4918 // Copy remaining characters. |
5125 Label loop; | 4919 Label loop; |
5126 __ bind(&loop); | 4920 __ bind(&loop); |
5127 __ movb(kScratchRegister, Operand(src, 0)); | 4921 __ movb(kScratchRegister, Operand(src, 0)); |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5313 | 5107 |
5314 __ andl(hash, Immediate(String::kHashBitMask)); | 5108 __ andl(hash, Immediate(String::kHashBitMask)); |
5315 | 5109 |
5316 // if (hash == 0) hash = 27; | 5110 // if (hash == 0) hash = 27; |
5317 Label hash_not_zero; | 5111 Label hash_not_zero; |
5318 __ j(not_zero, &hash_not_zero); | 5112 __ j(not_zero, &hash_not_zero); |
5319 __ Set(hash, StringHasher::kZeroHash); | 5113 __ Set(hash, StringHasher::kZeroHash); |
5320 __ bind(&hash_not_zero); | 5114 __ bind(&hash_not_zero); |
5321 } | 5115 } |
5322 | 5116 |
| 5117 |
5323 void SubStringStub::Generate(MacroAssembler* masm) { | 5118 void SubStringStub::Generate(MacroAssembler* masm) { |
5324 Label runtime; | 5119 Label runtime; |
5325 | 5120 |
5326 // Stack frame on entry. | 5121 // Stack frame on entry. |
5327 // rsp[0]: return address | 5122 // rsp[0] : return address |
5328 // rsp[8]: to | 5123 // rsp[8] : to |
5329 // rsp[16]: from | 5124 // rsp[16] : from |
5330 // rsp[24]: string | 5125 // rsp[24] : string |
5331 | 5126 |
5332 const int kToOffset = 1 * kPointerSize; | 5127 const int kToOffset = 1 * kPointerSize; |
5333 const int kFromOffset = kToOffset + kPointerSize; | 5128 const int kFromOffset = kToOffset + kPointerSize; |
5334 const int kStringOffset = kFromOffset + kPointerSize; | 5129 const int kStringOffset = kFromOffset + kPointerSize; |
5335 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; | 5130 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; |
5336 | 5131 |
5337 // Make sure first argument is a string. | 5132 // Make sure first argument is a string. |
5338 __ movq(rax, Operand(rsp, kStringOffset)); | 5133 __ movq(rax, Operand(rsp, kStringOffset)); |
5339 STATIC_ASSERT(kSmiTag == 0); | 5134 STATIC_ASSERT(kSmiTag == 0); |
5340 __ testl(rax, Immediate(kSmiTagMask)); | 5135 __ testl(rax, Immediate(kSmiTagMask)); |
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5679 __ j(not_equal, chars_not_equal, near_jump); | 5474 __ j(not_equal, chars_not_equal, near_jump); |
5680 __ incq(index); | 5475 __ incq(index); |
5681 __ j(not_zero, &loop); | 5476 __ j(not_zero, &loop); |
5682 } | 5477 } |
5683 | 5478 |
5684 | 5479 |
5685 void StringCompareStub::Generate(MacroAssembler* masm) { | 5480 void StringCompareStub::Generate(MacroAssembler* masm) { |
5686 Label runtime; | 5481 Label runtime; |
5687 | 5482 |
5688 // Stack frame on entry. | 5483 // Stack frame on entry. |
5689 // rsp[0]: return address | 5484 // rsp[0] : return address |
5690 // rsp[8]: right string | 5485 // rsp[8] : right string |
5691 // rsp[16]: left string | 5486 // rsp[16] : left string |
5692 | 5487 |
5693 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left | 5488 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left |
5694 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right | 5489 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right |
5695 | 5490 |
5696 // Check for identity. | 5491 // Check for identity. |
5697 Label not_same; | 5492 Label not_same; |
5698 __ cmpq(rdx, rax); | 5493 __ cmpq(rdx, rax); |
5699 __ j(not_equal, ¬_same, Label::kNear); | 5494 __ j(not_equal, ¬_same, Label::kNear); |
5700 __ Move(rax, Smi::FromInt(EQUAL)); | 5495 __ Move(rax, Smi::FromInt(EQUAL)); |
5701 Counters* counters = masm->isolate()->counters(); | 5496 Counters* counters = masm->isolate()->counters(); |
(...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6178 __ testq(r0, r0); | 5973 __ testq(r0, r0); |
6179 __ j(zero, miss); | 5974 __ j(zero, miss); |
6180 __ jmp(done); | 5975 __ jmp(done); |
6181 } | 5976 } |
6182 | 5977 |
6183 | 5978 |
6184 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { | 5979 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
6185 // This stub overrides SometimesSetsUpAFrame() to return false. That means | 5980 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
6186 // we cannot call anything that could cause a GC from this stub. | 5981 // we cannot call anything that could cause a GC from this stub. |
6187 // Stack frame on entry: | 5982 // Stack frame on entry: |
6188 // esp[0 * kPointerSize]: return address. | 5983 // rsp[0 * kPointerSize] : return address. |
6189 // esp[1 * kPointerSize]: key's hash. | 5984 // rsp[1 * kPointerSize] : key's hash. |
6190 // esp[2 * kPointerSize]: key. | 5985 // rsp[2 * kPointerSize] : key. |
6191 // Registers: | 5986 // Registers: |
6192 // dictionary_: NameDictionary to probe. | 5987 // dictionary_: NameDictionary to probe. |
6193 // result_: used as scratch. | 5988 // result_: used as scratch. |
6194 // index_: will hold an index of entry if lookup is successful. | 5989 // index_: will hold an index of entry if lookup is successful. |
6195 // might alias with result_. | 5990 // might alias with result_. |
6196 // Returns: | 5991 // Returns: |
6197 // result_ is zero if lookup failed, non zero otherwise. | 5992 // result_ is zero if lookup failed, non zero otherwise. |
6198 | 5993 |
6199 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; | 5994 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
6200 | 5995 |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6561 __ pop(regs_.object()); | 6356 __ pop(regs_.object()); |
6562 | 6357 |
6563 __ bind(&need_incremental); | 6358 __ bind(&need_incremental); |
6564 | 6359 |
6565 // Fall through when we need to inform the incremental marker. | 6360 // Fall through when we need to inform the incremental marker. |
6566 } | 6361 } |
6567 | 6362 |
6568 | 6363 |
6569 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { | 6364 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { |
6570 // ----------- S t a t e ------------- | 6365 // ----------- S t a t e ------------- |
6571 // -- rax : element value to store | 6366 // -- rax : element value to store |
6572 // -- rcx : element index as smi | 6367 // -- rcx : element index as smi |
6573 // -- rsp[0] : return address | 6368 // -- rsp[0] : return address |
6574 // -- rsp[8] : array literal index in function | 6369 // -- rsp[8] : array literal index in function |
6575 // -- rsp[16]: array literal | 6370 // -- rsp[16] : array literal |
6576 // clobbers rbx, rdx, rdi | 6371 // clobbers rbx, rdx, rdi |
6577 // ----------------------------------- | 6372 // ----------------------------------- |
6578 | 6373 |
6579 Label element_done; | 6374 Label element_done; |
6580 Label double_elements; | 6375 Label double_elements; |
6581 Label smi_element; | 6376 Label smi_element; |
6582 Label slow_elements; | 6377 Label slow_elements; |
6583 Label fast_elements; | 6378 Label fast_elements; |
6584 | 6379 |
6585 // Get array literal index, array literal and its map. | 6380 // Get array literal index, array literal and its map. |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6656 __ pop(rcx); | 6451 __ pop(rcx); |
6657 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 6452 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
6658 ? kPointerSize | 6453 ? kPointerSize |
6659 : 0; | 6454 : 0; |
6660 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); | 6455 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); |
6661 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. | 6456 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. |
6662 } | 6457 } |
6663 | 6458 |
6664 | 6459 |
6665 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 6460 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
6666 if (entry_hook_ != NULL) { | 6461 if (masm->isolate()->function_entry_hook() != NULL) { |
| 6462 // It's always safe to call the entry hook stub, as the hook itself |
| 6463 // is not allowed to call back to V8. |
| 6464 AllowStubCallsScope allow_stub_calls(masm, true); |
| 6465 |
6667 ProfileEntryHookStub stub; | 6466 ProfileEntryHookStub stub; |
6668 masm->CallStub(&stub); | 6467 masm->CallStub(&stub); |
6669 } | 6468 } |
6670 } | 6469 } |
6671 | 6470 |
6672 | 6471 |
6673 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 6472 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
6674 // Save volatile registers. | 6473 // This stub can be called from essentially anywhere, so it needs to save |
6675 // Live registers at this point are the same as at the start of any | 6474 // all volatile and callee-save registers. |
6676 // JS function: | 6475 const size_t kNumSavedRegisters = 2; |
6677 // o rdi: the JS function object being called (i.e. ourselves) | 6476 __ push(arg_reg_1); |
6678 // o rsi: our context | 6477 __ push(arg_reg_2); |
6679 // o rbp: our caller's frame pointer | |
6680 // o rsp: stack pointer (pointing to return address) | |
6681 // o rcx: rcx is zero for method calls and non-zero for function calls. | |
6682 #ifdef _WIN64 | |
6683 const int kNumSavedRegisters = 1; | |
6684 | |
6685 __ push(rcx); | |
6686 #else | |
6687 const int kNumSavedRegisters = 3; | |
6688 | |
6689 __ push(rcx); | |
6690 __ push(rdi); | |
6691 __ push(rsi); | |
6692 #endif | |
6693 | 6478 |
6694 // Calculate the original stack pointer and store it in the second arg. | 6479 // Calculate the original stack pointer and store it in the second arg. |
6695 #ifdef _WIN64 | 6480 __ lea(arg_reg_2, Operand(rsp, (kNumSavedRegisters + 1) * kPointerSize)); |
6696 __ lea(rdx, Operand(rsp, (kNumSavedRegisters + 1) * kPointerSize)); | |
6697 #else | |
6698 __ lea(rsi, Operand(rsp, (kNumSavedRegisters + 1) * kPointerSize)); | |
6699 #endif | |
6700 | 6481 |
6701 // Calculate the function address to the first arg. | 6482 // Calculate the function address to the first arg. |
6702 #ifdef _WIN64 | 6483 __ movq(arg_reg_1, Operand(rsp, kNumSavedRegisters * kPointerSize)); |
6703 __ movq(rcx, Operand(rsp, kNumSavedRegisters * kPointerSize)); | 6484 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); |
6704 __ subq(rcx, Immediate(Assembler::kShortCallInstructionLength)); | 6485 |
6705 #else | 6486 // Save the remainder of the volatile registers. |
6706 __ movq(rdi, Operand(rsp, kNumSavedRegisters * kPointerSize)); | 6487 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); |
6707 __ subq(rdi, Immediate(Assembler::kShortCallInstructionLength)); | |
6708 #endif | |
6709 | 6488 |
6710 // Call the entry hook function. | 6489 // Call the entry hook function. |
6711 __ movq(rax, &entry_hook_, RelocInfo::NONE64); | 6490 __ movq(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), |
6712 __ movq(rax, Operand(rax, 0)); | 6491 RelocInfo::NONE64); |
6713 | 6492 |
6714 AllowExternalCallThatCantCauseGC scope(masm); | 6493 AllowExternalCallThatCantCauseGC scope(masm); |
6715 | 6494 |
6716 const int kArgumentCount = 2; | 6495 const int kArgumentCount = 2; |
6717 __ PrepareCallCFunction(kArgumentCount); | 6496 __ PrepareCallCFunction(kArgumentCount); |
6718 __ CallCFunction(rax, kArgumentCount); | 6497 __ CallCFunction(rax, kArgumentCount); |
6719 | 6498 |
6720 // Restore volatile regs. | 6499 // Restore volatile regs. |
6721 #ifdef _WIN64 | 6500 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); |
6722 __ pop(rcx); | 6501 __ pop(arg_reg_2); |
6723 #else | 6502 __ pop(arg_reg_1); |
6724 __ pop(rsi); | |
6725 __ pop(rdi); | |
6726 __ pop(rcx); | |
6727 #endif | |
6728 | 6503 |
6729 __ Ret(); | 6504 __ Ret(); |
6730 } | 6505 } |
6731 | 6506 |
6732 | 6507 |
6733 template<class T> | 6508 template<class T> |
6734 static void CreateArrayDispatch(MacroAssembler* masm) { | 6509 static void CreateArrayDispatch(MacroAssembler* masm) { |
6735 int last_index = GetSequenceIndexFromFastElementsKind( | 6510 int last_index = GetSequenceIndexFromFastElementsKind( |
6736 TERMINAL_FAST_ELEMENTS_KIND); | 6511 TERMINAL_FAST_ELEMENTS_KIND); |
6737 for (int i = 0; i <= last_index; ++i) { | 6512 for (int i = 0; i <= last_index; ++i) { |
6738 Label next; | 6513 Label next; |
6739 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6514 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6740 __ cmpl(rdx, Immediate(kind)); | 6515 __ cmpl(rdx, Immediate(kind)); |
6741 __ j(not_equal, &next); | 6516 __ j(not_equal, &next); |
6742 T stub(kind); | 6517 T stub(kind); |
6743 __ TailCallStub(&stub); | 6518 __ TailCallStub(&stub); |
6744 __ bind(&next); | 6519 __ bind(&next); |
6745 } | 6520 } |
6746 | 6521 |
6747 // If we reached this point there is a problem. | 6522 // If we reached this point there is a problem. |
6748 __ Abort("Unexpected ElementsKind in array constructor"); | 6523 __ Abort("Unexpected ElementsKind in array constructor"); |
6749 } | 6524 } |
6750 | 6525 |
6751 | 6526 |
6752 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { | 6527 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
6753 // rbx - type info cell | 6528 // rbx - type info cell |
6754 // rdx - kind | 6529 // rdx - kind |
6755 // rax - number of arguments | 6530 // rax - number of arguments |
6756 // rdi - constructor? | 6531 // rdi - constructor? |
6757 // esp[0] - return address | 6532 // rsp[0] - return address |
6758 // esp[4] - last argument | 6533 // rsp[8] - last argument |
6759 ASSERT(FAST_SMI_ELEMENTS == 0); | 6534 ASSERT(FAST_SMI_ELEMENTS == 0); |
6760 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 6535 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
6761 ASSERT(FAST_ELEMENTS == 2); | 6536 ASSERT(FAST_ELEMENTS == 2); |
6762 ASSERT(FAST_HOLEY_ELEMENTS == 3); | 6537 ASSERT(FAST_HOLEY_ELEMENTS == 3); |
6763 ASSERT(FAST_DOUBLE_ELEMENTS == 4); | 6538 ASSERT(FAST_DOUBLE_ELEMENTS == 4); |
6764 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); | 6539 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
6765 | 6540 |
6766 Handle<Object> undefined_sentinel( | 6541 Handle<Object> undefined_sentinel( |
6767 masm->isolate()->heap()->undefined_value(), | 6542 masm->isolate()->heap()->undefined_value(), |
6768 masm->isolate()); | 6543 masm->isolate()); |
6769 | 6544 |
6770 // is the low bit set? If so, we are holey and that is good. | 6545 // is the low bit set? If so, we are holey and that is good. |
6771 __ testb(rdx, Immediate(1)); | 6546 __ testb(rdx, Immediate(1)); |
6772 Label normal_sequence; | 6547 Label normal_sequence; |
6773 __ j(not_zero, &normal_sequence); | 6548 __ j(not_zero, &normal_sequence); |
6774 | 6549 |
6775 // look at the first argument | 6550 // look at the first argument |
6776 __ movq(rcx, Operand(rsp, kPointerSize)); | 6551 __ movq(rcx, Operand(rsp, kPointerSize)); |
6777 __ testq(rcx, rcx); | 6552 __ testq(rcx, rcx); |
6778 __ j(zero, &normal_sequence); | 6553 __ j(zero, &normal_sequence); |
6779 | 6554 |
6780 // We are going to create a holey array, but our kind is non-holey. | 6555 // We are going to create a holey array, but our kind is non-holey. |
6781 // Fix kind and retry | 6556 // Fix kind and retry (only if we have an allocation site in the cell). |
6782 __ incl(rdx); | 6557 __ incl(rdx); |
6783 __ Cmp(rbx, undefined_sentinel); | 6558 __ Cmp(rbx, undefined_sentinel); |
6784 __ j(equal, &normal_sequence); | 6559 __ j(equal, &normal_sequence); |
| 6560 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
| 6561 Handle<Map> allocation_site_map( |
| 6562 masm->isolate()->heap()->allocation_site_map(), |
| 6563 masm->isolate()); |
| 6564 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); |
| 6565 __ j(not_equal, &normal_sequence); |
6785 | 6566 |
6786 // Save the resulting elements kind in type info | 6567 // Save the resulting elements kind in type info |
6787 __ Integer32ToSmi(rdx, rdx); | 6568 __ Integer32ToSmi(rdx, rdx); |
6788 __ movq(FieldOperand(rbx, kPointerSize), rdx); | 6569 __ movq(FieldOperand(rcx, AllocationSite::kPayloadOffset), rdx); |
6789 __ SmiToInteger32(rdx, rdx); | 6570 __ SmiToInteger32(rdx, rdx); |
6790 | 6571 |
6791 __ bind(&normal_sequence); | 6572 __ bind(&normal_sequence); |
6792 int last_index = GetSequenceIndexFromFastElementsKind( | 6573 int last_index = GetSequenceIndexFromFastElementsKind( |
6793 TERMINAL_FAST_ELEMENTS_KIND); | 6574 TERMINAL_FAST_ELEMENTS_KIND); |
6794 for (int i = 0; i <= last_index; ++i) { | 6575 for (int i = 0; i <= last_index; ++i) { |
6795 Label next; | 6576 Label next; |
6796 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6577 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6797 __ cmpl(rdx, Immediate(kind)); | 6578 __ cmpl(rdx, Immediate(kind)); |
6798 __ j(not_equal, &next); | 6579 __ j(not_equal, &next); |
6799 ArraySingleArgumentConstructorStub stub(kind); | 6580 ArraySingleArgumentConstructorStub stub(kind); |
6800 __ TailCallStub(&stub); | 6581 __ TailCallStub(&stub); |
6801 __ bind(&next); | 6582 __ bind(&next); |
6802 } | 6583 } |
6803 | 6584 |
6804 // If we reached this point there is a problem. | 6585 // If we reached this point there is a problem. |
6805 __ Abort("Unexpected ElementsKind in array constructor"); | 6586 __ Abort("Unexpected ElementsKind in array constructor"); |
6806 } | 6587 } |
6807 | 6588 |
6808 | 6589 |
6809 template<class T> | 6590 template<class T> |
6810 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 6591 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
6811 int to_index = GetSequenceIndexFromFastElementsKind( | 6592 int to_index = GetSequenceIndexFromFastElementsKind( |
6812 TERMINAL_FAST_ELEMENTS_KIND); | 6593 TERMINAL_FAST_ELEMENTS_KIND); |
6813 for (int i = 0; i <= to_index; ++i) { | 6594 for (int i = 0; i <= to_index; ++i) { |
6814 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6595 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6815 T stub(kind); | 6596 T stub(kind); |
6816 stub.GetCode(isolate)->set_is_pregenerated(true); | 6597 stub.GetCode(isolate)->set_is_pregenerated(true); |
6817 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { | 6598 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { |
6818 T stub1(kind, true); | 6599 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
6819 stub1.GetCode(isolate)->set_is_pregenerated(true); | 6600 stub1.GetCode(isolate)->set_is_pregenerated(true); |
6820 } | 6601 } |
6821 } | 6602 } |
6822 } | 6603 } |
6823 | 6604 |
6824 | 6605 |
6825 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 6606 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
6826 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 6607 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
6827 isolate); | 6608 isolate); |
6828 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( | 6609 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( |
(...skipping 13 matching lines...) Expand all Loading... |
6842 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 6623 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); |
6843 stubh2.GetCode(isolate)->set_is_pregenerated(true); | 6624 stubh2.GetCode(isolate)->set_is_pregenerated(true); |
6844 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 6625 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); |
6845 stubh3.GetCode(isolate)->set_is_pregenerated(true); | 6626 stubh3.GetCode(isolate)->set_is_pregenerated(true); |
6846 } | 6627 } |
6847 } | 6628 } |
6848 | 6629 |
6849 | 6630 |
6850 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 6631 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
6851 // ----------- S t a t e ------------- | 6632 // ----------- S t a t e ------------- |
6852 // -- rax : argc | 6633 // -- rax : argc |
6853 // -- rbx : type info cell | 6634 // -- rbx : type info cell |
6854 // -- rdi : constructor | 6635 // -- rdi : constructor |
6855 // -- rsp[0] : return address | 6636 // -- rsp[0] : return address |
6856 // -- rsp[4] : last argument | 6637 // -- rsp[8] : last argument |
6857 // ----------------------------------- | 6638 // ----------------------------------- |
6858 Handle<Object> undefined_sentinel( | 6639 Handle<Object> undefined_sentinel( |
6859 masm->isolate()->heap()->undefined_value(), | 6640 masm->isolate()->heap()->undefined_value(), |
6860 masm->isolate()); | 6641 masm->isolate()); |
6861 | 6642 |
6862 if (FLAG_debug_code) { | 6643 if (FLAG_debug_code) { |
6863 // The array construct code is only set for the global and natives | 6644 // The array construct code is only set for the global and natives |
6864 // builtin Array functions which always have maps. | 6645 // builtin Array functions which always have maps. |
6865 | 6646 |
6866 // Initial map for the builtin Array function should be a map. | 6647 // Initial map for the builtin Array function should be a map. |
6867 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6648 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6868 // Will both indicate a NULL and a Smi. | 6649 // Will both indicate a NULL and a Smi. |
6869 STATIC_ASSERT(kSmiTag == 0); | 6650 STATIC_ASSERT(kSmiTag == 0); |
6870 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 6651 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
6871 __ Check(not_smi, "Unexpected initial map for Array function"); | 6652 __ Check(not_smi, "Unexpected initial map for Array function"); |
6872 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 6653 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
6873 __ Check(equal, "Unexpected initial map for Array function"); | 6654 __ Check(equal, "Unexpected initial map for Array function"); |
6874 | 6655 |
6875 // We should either have undefined in ebx or a valid cell | 6656 // We should either have undefined in rbx or a valid cell |
6876 Label okay_here; | 6657 Label okay_here; |
6877 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 6658 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
6878 __ Cmp(rbx, undefined_sentinel); | 6659 __ Cmp(rbx, undefined_sentinel); |
6879 __ j(equal, &okay_here); | 6660 __ j(equal, &okay_here); |
6880 __ Cmp(FieldOperand(rbx, 0), cell_map); | 6661 __ Cmp(FieldOperand(rbx, 0), cell_map); |
6881 __ Assert(equal, "Expected property cell in register rbx"); | 6662 __ Assert(equal, "Expected property cell in register rbx"); |
6882 __ bind(&okay_here); | 6663 __ bind(&okay_here); |
6883 } | 6664 } |
6884 | 6665 |
6885 Label no_info, switch_ready; | 6666 Label no_info, switch_ready; |
6886 // Get the elements kind and case on that. | 6667 // Get the elements kind and case on that. |
6887 __ Cmp(rbx, undefined_sentinel); | 6668 __ Cmp(rbx, undefined_sentinel); |
6888 __ j(equal, &no_info); | 6669 __ j(equal, &no_info); |
6889 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); | 6670 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); |
6890 __ JumpIfNotSmi(rdx, &no_info); | 6671 |
| 6672 // The type cell may have undefined in its value. |
| 6673 __ Cmp(rdx, undefined_sentinel); |
| 6674 __ j(equal, &no_info); |
| 6675 |
| 6676 // We should have an allocation site object |
| 6677 if (FLAG_debug_code) { |
| 6678 __ Cmp(FieldOperand(rdx, 0), |
| 6679 Handle<Map>(masm->isolate()->heap()->allocation_site_map())); |
| 6680 __ Assert(equal, "Expected AllocationSite object in register rdx"); |
| 6681 } |
| 6682 |
| 6683 __ movq(rdx, FieldOperand(rdx, AllocationSite::kPayloadOffset)); |
6891 __ SmiToInteger32(rdx, rdx); | 6684 __ SmiToInteger32(rdx, rdx); |
6892 __ jmp(&switch_ready); | 6685 __ jmp(&switch_ready); |
6893 __ bind(&no_info); | 6686 __ bind(&no_info); |
6894 __ movq(rdx, Immediate(GetInitialFastElementsKind())); | 6687 __ movq(rdx, Immediate(GetInitialFastElementsKind())); |
6895 __ bind(&switch_ready); | 6688 __ bind(&switch_ready); |
6896 | 6689 |
6897 if (argument_count_ == ANY) { | 6690 if (argument_count_ == ANY) { |
6898 Label not_zero_case, not_one_case; | 6691 Label not_zero_case, not_one_case; |
6899 __ testq(rax, rax); | 6692 __ testq(rax, rax); |
6900 __ j(not_zero, ¬_zero_case); | 6693 __ j(not_zero, ¬_zero_case); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6950 __ TailCallStub(&stub1); | 6743 __ TailCallStub(&stub1); |
6951 | 6744 |
6952 __ bind(¬_one_case); | 6745 __ bind(¬_one_case); |
6953 InternalArrayNArgumentsConstructorStub stubN(kind); | 6746 InternalArrayNArgumentsConstructorStub stubN(kind); |
6954 __ TailCallStub(&stubN); | 6747 __ TailCallStub(&stubN); |
6955 } | 6748 } |
6956 | 6749 |
6957 | 6750 |
6958 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { | 6751 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { |
6959 // ----------- S t a t e ------------- | 6752 // ----------- S t a t e ------------- |
6960 // -- eax : argc | 6753 // -- rax : argc |
6961 // -- ebx : type info cell | 6754 // -- rbx : type info cell |
6962 // -- edi : constructor | 6755 // -- rdi : constructor |
6963 // -- esp[0] : return address | 6756 // -- rsp[0] : return address |
6964 // -- esp[4] : last argument | 6757 // -- rsp[8] : last argument |
6965 // ----------------------------------- | 6758 // ----------------------------------- |
6966 | 6759 |
6967 if (FLAG_debug_code) { | 6760 if (FLAG_debug_code) { |
6968 // The array construct code is only set for the global and natives | 6761 // The array construct code is only set for the global and natives |
6969 // builtin Array functions which always have maps. | 6762 // builtin Array functions which always have maps. |
6970 | 6763 |
6971 // Initial map for the builtin Array function should be a map. | 6764 // Initial map for the builtin Array function should be a map. |
6972 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6765 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6973 // Will both indicate a NULL and a Smi. | 6766 // Will both indicate a NULL and a Smi. |
6974 STATIC_ASSERT(kSmiTag == 0); | 6767 STATIC_ASSERT(kSmiTag == 0); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7006 __ bind(&fast_elements_case); | 6799 __ bind(&fast_elements_case); |
7007 GenerateCase(masm, FAST_ELEMENTS); | 6800 GenerateCase(masm, FAST_ELEMENTS); |
7008 } | 6801 } |
7009 | 6802 |
7010 | 6803 |
7011 #undef __ | 6804 #undef __ |
7012 | 6805 |
7013 } } // namespace v8::internal | 6806 } } // namespace v8::internal |
7014 | 6807 |
7015 #endif // V8_TARGET_ARCH_X64 | 6808 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |