OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
148 } | 148 } |
149 | 149 |
150 | 150 |
151 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) { | 151 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) { |
152 ASSERT(a != b); | 152 ASSERT(a != b); |
153 return CanReach(a, b, next_visited_id_++); | 153 return CanReach(a, b, next_visited_id_++); |
154 } | 154 } |
155 | 155 |
156 | 156 |
157 void LGapResolver::RegisterMove(LMoveOperands move) { | 157 void LGapResolver::RegisterMove(LMoveOperands move) { |
158 if (move.from()->IsConstantOperand()) { | 158 if (move.source()->IsConstantOperand()) { |
159 // Constant moves should be last in the machine code. Therefore add them | 159 // Constant moves should be last in the machine code. Therefore add them |
160 // first to the result set. | 160 // first to the result set. |
161 AddResultMove(move.from(), move.to()); | 161 AddResultMove(move.source(), move.destination()); |
162 } else { | 162 } else { |
163 LGapNode* from = LookupNode(move.from()); | 163 LGapNode* from = LookupNode(move.source()); |
164 LGapNode* to = LookupNode(move.to()); | 164 LGapNode* to = LookupNode(move.destination()); |
165 if (to->IsAssigned() && to->assigned_from() == from) { | 165 if (to->IsAssigned() && to->assigned_from() == from) { |
166 move.Eliminate(); | 166 move.Eliminate(); |
167 return; | 167 return; |
168 } | 168 } |
169 ASSERT(!to->IsAssigned()); | 169 ASSERT(!to->IsAssigned()); |
170 if (CanReach(from, to)) { | 170 if (CanReach(from, to)) { |
171 // This introduces a cycle. Save. | 171 // This introduces a cycle. Save. |
172 identified_cycles_.Add(from); | 172 identified_cycles_.Add(from); |
173 } | 173 } |
174 to->set_assigned_from(from); | 174 to->set_assigned_from(from); |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
331 } | 331 } |
332 | 332 |
333 // Deferred code is the last part of the instruction sequence. Mark | 333 // Deferred code is the last part of the instruction sequence. Mark |
334 // the generated code as done unless we bailed out. | 334 // the generated code as done unless we bailed out. |
335 if (!is_aborted()) status_ = DONE; | 335 if (!is_aborted()) status_ = DONE; |
336 return !is_aborted(); | 336 return !is_aborted(); |
337 } | 337 } |
338 | 338 |
339 | 339 |
340 bool LCodeGen::GenerateSafepointTable() { | 340 bool LCodeGen::GenerateSafepointTable() { |
341 Abort("Unimplemented: %s", "GeneratePrologue"); | 341 ASSERT(is_done()); |
342 return false; | 342 safepoints_.Emit(masm(), StackSlotCount()); |
| 343 return !is_aborted(); |
343 } | 344 } |
344 | 345 |
345 | 346 |
346 Register LCodeGen::ToRegister(int index) const { | 347 Register LCodeGen::ToRegister(int index) const { |
347 return Register::FromAllocationIndex(index); | 348 return Register::FromAllocationIndex(index); |
348 } | 349 } |
349 | 350 |
350 | 351 |
351 XMMRegister LCodeGen::ToDoubleRegister(int index) const { | 352 XMMRegister LCodeGen::ToDoubleRegister(int index) const { |
352 return XMMRegister::FromAllocationIndex(index); | 353 return XMMRegister::FromAllocationIndex(index); |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
485 translation->StoreLiteral(src_index); | 486 translation->StoreLiteral(src_index); |
486 } else { | 487 } else { |
487 UNREACHABLE(); | 488 UNREACHABLE(); |
488 } | 489 } |
489 } | 490 } |
490 | 491 |
491 | 492 |
492 void LCodeGen::CallCode(Handle<Code> code, | 493 void LCodeGen::CallCode(Handle<Code> code, |
493 RelocInfo::Mode mode, | 494 RelocInfo::Mode mode, |
494 LInstruction* instr) { | 495 LInstruction* instr) { |
495 Abort("Unimplemented: %s", "CallCode"); | 496 if (instr != NULL) { |
| 497 LPointerMap* pointers = instr->pointer_map(); |
| 498 RecordPosition(pointers->position()); |
| 499 __ call(code, mode); |
| 500 RegisterLazyDeoptimization(instr); |
| 501 } else { |
| 502 LPointerMap no_pointers(0); |
| 503 RecordPosition(no_pointers.position()); |
| 504 __ call(code, mode); |
| 505 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex); |
| 506 } |
| 507 |
| 508 // Signal that we don't inline smi code before these stubs in the |
| 509 // optimizing code generator. |
| 510 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 511 code->kind() == Code::COMPARE_IC) { |
| 512 __ nop(); |
| 513 } |
496 } | 514 } |
497 | 515 |
498 | 516 |
499 void LCodeGen::CallRuntime(Runtime::Function* function, | 517 void LCodeGen::CallRuntime(Runtime::Function* function, |
500 int num_arguments, | 518 int num_arguments, |
501 LInstruction* instr) { | 519 LInstruction* instr) { |
502 Abort("Unimplemented: %s", "CallRuntime"); | 520 Abort("Unimplemented: %s", "CallRuntime"); |
503 } | 521 } |
504 | 522 |
505 | 523 |
506 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 524 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
507 // Create the environment to bailout to. If the call has side effects | 525 // Create the environment to bailout to. If the call has side effects |
508 // execution has to continue after the call otherwise execution can continue | 526 // execution has to continue after the call otherwise execution can continue |
509 // from a previous bailout point repeating the call. | 527 // from a previous bailout point repeating the call. |
510 LEnvironment* deoptimization_environment; | 528 LEnvironment* deoptimization_environment; |
511 if (instr->HasDeoptimizationEnvironment()) { | 529 if (instr->HasDeoptimizationEnvironment()) { |
512 deoptimization_environment = instr->deoptimization_environment(); | 530 deoptimization_environment = instr->deoptimization_environment(); |
513 } else { | 531 } else { |
514 deoptimization_environment = instr->environment(); | 532 deoptimization_environment = instr->environment(); |
515 } | 533 } |
516 | 534 |
517 RegisterEnvironmentForDeoptimization(deoptimization_environment); | 535 RegisterEnvironmentForDeoptimization(deoptimization_environment); |
518 RecordSafepoint(instr->pointer_map(), | 536 RecordSafepoint(instr->pointer_map(), |
519 deoptimization_environment->deoptimization_index()); | 537 deoptimization_environment->deoptimization_index()); |
520 } | 538 } |
521 | 539 |
522 | 540 |
523 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | 541 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { |
524 Abort("Unimplemented: %s", "RegisterEnvironmentForDeoptimization"); | 542 if (!environment->HasBeenRegistered()) { |
| 543 // Physical stack frame layout: |
| 544 // -x ............. -4 0 ..................................... y |
| 545 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
| 546 |
| 547 // Layout of the environment: |
| 548 // 0 ..................................................... size-1 |
| 549 // [parameters] [locals] [expression stack including arguments] |
| 550 |
| 551 // Layout of the translation: |
| 552 // 0 ........................................................ size - 1 + 4 |
| 553 // [expression stack including arguments] [locals] [4 words] [parameters] |
| 554 // |>------------ translation_size ------------<| |
| 555 |
| 556 int frame_count = 0; |
| 557 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
| 558 ++frame_count; |
| 559 } |
| 560 Translation translation(&translations_, frame_count); |
| 561 WriteTranslation(environment, &translation); |
| 562 int deoptimization_index = deoptimizations_.length(); |
| 563 environment->Register(deoptimization_index, translation.index()); |
| 564 deoptimizations_.Add(environment); |
| 565 } |
525 } | 566 } |
526 | 567 |
527 | 568 |
528 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 569 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
529 Abort("Unimplemented: %s", "Deoptimiz"); | 570 Abort("Unimplemented: %s", "Deoptimiz"); |
530 } | 571 } |
531 | 572 |
532 | 573 |
533 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 574 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
534 int length = deoptimizations_.length(); | 575 int length = deoptimizations_.length(); |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
644 // xmm0 must always be a scratch register. | 685 // xmm0 must always be a scratch register. |
645 XMMRegister xmm_scratch = xmm0; | 686 XMMRegister xmm_scratch = xmm0; |
646 LUnallocated marker_operand(LUnallocated::NONE); | 687 LUnallocated marker_operand(LUnallocated::NONE); |
647 | 688 |
648 Register cpu_scratch = kScratchRegister; | 689 Register cpu_scratch = kScratchRegister; |
649 | 690 |
650 const ZoneList<LMoveOperands>* moves = | 691 const ZoneList<LMoveOperands>* moves = |
651 resolver_.Resolve(move->move_operands(), &marker_operand); | 692 resolver_.Resolve(move->move_operands(), &marker_operand); |
652 for (int i = moves->length() - 1; i >= 0; --i) { | 693 for (int i = moves->length() - 1; i >= 0; --i) { |
653 LMoveOperands move = moves->at(i); | 694 LMoveOperands move = moves->at(i); |
654 LOperand* from = move.from(); | 695 LOperand* from = move.source(); |
655 LOperand* to = move.to(); | 696 LOperand* to = move.destination(); |
656 ASSERT(!from->IsDoubleRegister() || | 697 ASSERT(!from->IsDoubleRegister() || |
657 !ToDoubleRegister(from).is(xmm_scratch)); | 698 !ToDoubleRegister(from).is(xmm_scratch)); |
658 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); | 699 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); |
659 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); | 700 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); |
660 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); | 701 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); |
661 if (from->IsConstantOperand()) { | 702 if (from->IsConstantOperand()) { |
662 LConstantOperand* constant_from = LConstantOperand::cast(from); | 703 LConstantOperand* constant_from = LConstantOperand::cast(from); |
663 if (to->IsRegister()) { | 704 if (to->IsRegister()) { |
664 if (IsInteger32Constant(constant_from)) { | 705 if (IsInteger32Constant(constant_from)) { |
665 __ movl(ToRegister(to), Immediate(ToInteger32(constant_from))); | 706 __ movl(ToRegister(to), Immediate(ToInteger32(constant_from))); |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
777 Abort("Unimplemented: %s", "DoShiftI"); | 818 Abort("Unimplemented: %s", "DoShiftI"); |
778 } | 819 } |
779 | 820 |
780 | 821 |
781 void LCodeGen::DoSubI(LSubI* instr) { | 822 void LCodeGen::DoSubI(LSubI* instr) { |
782 Abort("Unimplemented: %s", "DoSubI"); | 823 Abort("Unimplemented: %s", "DoSubI"); |
783 } | 824 } |
784 | 825 |
785 | 826 |
786 void LCodeGen::DoConstantI(LConstantI* instr) { | 827 void LCodeGen::DoConstantI(LConstantI* instr) { |
787 Abort("Unimplemented: %s", "DoConstantI"); | 828 ASSERT(instr->result()->IsRegister()); |
| 829 __ movl(ToRegister(instr->result()), Immediate(instr->value())); |
788 } | 830 } |
789 | 831 |
790 | 832 |
791 void LCodeGen::DoConstantD(LConstantD* instr) { | 833 void LCodeGen::DoConstantD(LConstantD* instr) { |
792 Abort("Unimplemented: %s", "DoConstantI"); | 834 ASSERT(instr->result()->IsDoubleRegister()); |
| 835 XMMRegister res = ToDoubleRegister(instr->result()); |
| 836 double v = instr->value(); |
| 837 // Use xor to produce +0.0 in a fast and compact way, but avoid to |
| 838 // do so if the constant is -0.0. |
| 839 if (BitCast<uint64_t, double>(v) == 0) { |
| 840 __ xorpd(res, res); |
| 841 } else { |
| 842 Register tmp = ToRegister(instr->TempAt(0)); |
| 843 int32_t v_int32 = static_cast<int32_t>(v); |
| 844 if (static_cast<double>(v_int32) == v) { |
| 845 __ movl(tmp, Immediate(v_int32)); |
| 846 __ cvtlsi2sd(res, tmp); |
| 847 } else { |
| 848 uint64_t int_val = BitCast<uint64_t, double>(v); |
| 849 __ Set(tmp, int_val); |
| 850 __ movd(res, tmp); |
| 851 } |
| 852 } |
793 } | 853 } |
794 | 854 |
795 | 855 |
796 void LCodeGen::DoConstantT(LConstantT* instr) { | 856 void LCodeGen::DoConstantT(LConstantT* instr) { |
797 ASSERT(instr->result()->IsRegister()); | 857 ASSERT(instr->result()->IsRegister()); |
798 __ Move(ToRegister(instr->result()), instr->value()); | 858 __ Move(ToRegister(instr->result()), instr->value()); |
799 } | 859 } |
800 | 860 |
801 | 861 |
802 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { | 862 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { |
(...skipping 15 matching lines...) Expand all Loading... |
818 Abort("Unimplemented: %s", "DoBitNotI"); | 878 Abort("Unimplemented: %s", "DoBitNotI"); |
819 } | 879 } |
820 | 880 |
821 | 881 |
822 void LCodeGen::DoThrow(LThrow* instr) { | 882 void LCodeGen::DoThrow(LThrow* instr) { |
823 Abort("Unimplemented: %s", "DoThrow"); | 883 Abort("Unimplemented: %s", "DoThrow"); |
824 } | 884 } |
825 | 885 |
826 | 886 |
827 void LCodeGen::DoAddI(LAddI* instr) { | 887 void LCodeGen::DoAddI(LAddI* instr) { |
828 Abort("Unimplemented: %s", "DoAddI"); | 888 LOperand* left = instr->InputAt(0); |
| 889 LOperand* right = instr->InputAt(1); |
| 890 ASSERT(left->Equals(instr->result())); |
| 891 |
| 892 if (right->IsConstantOperand()) { |
| 893 __ addl(ToRegister(left), |
| 894 Immediate(ToInteger32(LConstantOperand::cast(right)))); |
| 895 } else if (right->IsRegister()) { |
| 896 __ addl(ToRegister(left), ToRegister(right)); |
| 897 } else { |
| 898 __ addl(ToRegister(left), ToOperand(right)); |
| 899 } |
| 900 |
| 901 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 902 DeoptimizeIf(overflow, instr->environment()); |
| 903 } |
829 } | 904 } |
830 | 905 |
831 | 906 |
832 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { | 907 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { |
833 Abort("Unimplemented: %s", "DoArithmeticD"); | 908 Abort("Unimplemented: %s", "DoArithmeticD"); |
834 } | 909 } |
835 | 910 |
836 | 911 |
837 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 912 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
838 Abort("Unimplemented: %s", "DoArithmeticT"); | 913 ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); |
| 914 ASSERT(ToRegister(instr->InputAt(1)).is(rax)); |
| 915 ASSERT(ToRegister(instr->result()).is(rax)); |
| 916 |
| 917 GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS); |
| 918 stub.SetArgsInRegisters(); |
| 919 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
839 } | 920 } |
840 | 921 |
841 | 922 |
842 int LCodeGen::GetNextEmittedBlock(int block) { | 923 int LCodeGen::GetNextEmittedBlock(int block) { |
843 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 924 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
844 LLabel* label = chunk_->GetLabel(i); | 925 LLabel* label = chunk_->GetLabel(i); |
845 if (!label->HasReplacement()) return i; | 926 if (!label->HasReplacement()) return i; |
846 } | 927 } |
847 return -1; | 928 return -1; |
848 } | 929 } |
849 | 930 |
850 | 931 |
851 void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) { | 932 void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) { |
852 Abort("Unimplemented: %s", "EmitBranch"); | 933 Abort("Unimplemented: %s", "EmitBranch"); |
853 } | 934 } |
854 | 935 |
855 | 936 |
856 void LCodeGen::DoBranch(LBranch* instr) { | 937 void LCodeGen::DoBranch(LBranch* instr) { |
857 Abort("Unimplemented: %s", "DoBranch"); | 938 Abort("Unimplemented: %s", "DoBranch"); |
858 } | 939 } |
859 | 940 |
860 | 941 |
861 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { | 942 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { |
862 Abort("Unimplemented: %s", "EmitGoto"); | 943 block = chunk_->LookupDestination(block); |
| 944 int next_block = GetNextEmittedBlock(current_block_); |
| 945 if (block != next_block) { |
| 946 // Perform stack overflow check if this goto needs it before jumping. |
| 947 if (deferred_stack_check != NULL) { |
| 948 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 949 __ j(above_equal, chunk_->GetAssemblyLabel(block)); |
| 950 __ jmp(deferred_stack_check->entry()); |
| 951 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
| 952 } else { |
| 953 __ jmp(chunk_->GetAssemblyLabel(block)); |
| 954 } |
| 955 } |
863 } | 956 } |
864 | 957 |
865 | 958 |
866 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 959 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
867 Abort("Unimplemented: %s", "DoDeferredStackCheck"); | 960 Abort("Unimplemented: %s", "DoDeferredStackCheck"); |
868 } | 961 } |
869 | 962 |
870 | 963 |
871 void LCodeGen::DoGoto(LGoto* instr) { | 964 void LCodeGen::DoGoto(LGoto* instr) { |
872 class DeferredStackCheck: public LDeferredCode { | 965 class DeferredStackCheck: public LDeferredCode { |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
972 void LCodeGen::DoIsSmi(LIsSmi* instr) { | 1065 void LCodeGen::DoIsSmi(LIsSmi* instr) { |
973 Abort("Unimplemented: %s", "DoIsSmi"); | 1066 Abort("Unimplemented: %s", "DoIsSmi"); |
974 } | 1067 } |
975 | 1068 |
976 | 1069 |
977 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { | 1070 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { |
978 Abort("Unimplemented: %s", "DoIsSmiAndBranch"); | 1071 Abort("Unimplemented: %s", "DoIsSmiAndBranch"); |
979 } | 1072 } |
980 | 1073 |
981 | 1074 |
982 InstanceType LHasInstanceType::TestType() { | |
983 InstanceType from = hydrogen()->from(); | |
984 InstanceType to = hydrogen()->to(); | |
985 if (from == FIRST_TYPE) return to; | |
986 ASSERT(from == to || to == LAST_TYPE); | |
987 return from; | |
988 } | |
989 | |
990 | |
991 | |
992 Condition LHasInstanceType::BranchCondition() { | |
993 InstanceType from = hydrogen()->from(); | |
994 InstanceType to = hydrogen()->to(); | |
995 if (from == to) return equal; | |
996 if (to == LAST_TYPE) return above_equal; | |
997 if (from == FIRST_TYPE) return below_equal; | |
998 UNREACHABLE(); | |
999 return equal; | |
1000 } | |
1001 | |
1002 | |
1003 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { | 1075 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { |
1004 Abort("Unimplemented: %s", "DoHasInstanceType"); | 1076 Abort("Unimplemented: %s", "DoHasInstanceType"); |
1005 } | 1077 } |
1006 | 1078 |
1007 | 1079 |
1008 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { | 1080 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { |
1009 Abort("Unimplemented: %s", "DoHasInstanceTypeAndBranch"); | 1081 Abort("Unimplemented: %s", "DoHasInstanceTypeAndBranch"); |
1010 } | 1082 } |
1011 | 1083 |
1012 | 1084 |
(...skipping 453 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1466 | 1538 |
1467 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 1539 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
1468 Abort("Unimplemented: %s", "DoOsrEntry"); | 1540 Abort("Unimplemented: %s", "DoOsrEntry"); |
1469 } | 1541 } |
1470 | 1542 |
1471 #undef __ | 1543 #undef __ |
1472 | 1544 |
1473 } } // namespace v8::internal | 1545 } } // namespace v8::internal |
1474 | 1546 |
1475 #endif // V8_TARGET_ARCH_X64 | 1547 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |