| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 235 // Update the write barrier. This clobbers a3 and a0. | 235 // Update the write barrier. This clobbers a3 and a0. |
| 236 __ RecordWriteContextSlot( | 236 __ RecordWriteContextSlot( |
| 237 cp, target.offset(), a0, a3, GetRAState(), kSaveFPRegs); | 237 cp, target.offset(), a0, a3, GetRAState(), kSaveFPRegs); |
| 238 } | 238 } |
| 239 } | 239 } |
| 240 Comment(";;; End allocate local context"); | 240 Comment(";;; End allocate local context"); |
| 241 } | 241 } |
| 242 | 242 |
| 243 // Trace the call. | 243 // Trace the call. |
| 244 if (FLAG_trace && info()->IsOptimizing()) { | 244 if (FLAG_trace && info()->IsOptimizing()) { |
| 245 // We have not executed any compiled code yet, so cp still holds the |
| 246 // incoming context. |
| 245 __ CallRuntime(Runtime::kTraceEnter, 0); | 247 __ CallRuntime(Runtime::kTraceEnter, 0); |
| 246 } | 248 } |
| 247 return !is_aborted(); | 249 return !is_aborted(); |
| 248 } | 250 } |
| 249 | 251 |
| 250 | 252 |
| 251 void LCodeGen::GenerateOsrPrologue() { | 253 void LCodeGen::GenerateOsrPrologue() { |
| 252 // Generate the OSR entry prologue at the first unknown OSR value, or if there | 254 // Generate the OSR entry prologue at the first unknown OSR value, or if there |
| 253 // are none, at the OSR entrypoint instruction. | 255 // are none, at the OSR entrypoint instruction. |
| 254 if (osr_pc_offset_ >= 0) return; | 256 if (osr_pc_offset_ >= 0) return; |
| (...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 716 ASSERT(instr != NULL); | 718 ASSERT(instr != NULL); |
| 717 LPointerMap* pointers = instr->pointer_map(); | 719 LPointerMap* pointers = instr->pointer_map(); |
| 718 ASSERT(pointers != NULL); | 720 ASSERT(pointers != NULL); |
| 719 RecordPosition(pointers->position()); | 721 RecordPosition(pointers->position()); |
| 720 | 722 |
| 721 __ CallRuntime(function, num_arguments); | 723 __ CallRuntime(function, num_arguments); |
| 722 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 724 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 723 } | 725 } |
| 724 | 726 |
| 725 | 727 |
| 728 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 729 if (context->IsRegister()) { |
| 730 __ Move(cp, ToRegister(context)); |
| 731 } else if (context->IsStackSlot()) { |
| 732 __ lw(cp, ToMemOperand(context)); |
| 733 } else { |
| 734 UNREACHABLE(); |
| 735 } |
| 736 } |
| 737 |
| 738 |
| 726 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 739 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 727 int argc, | 740 int argc, |
| 728 LInstruction* instr) { | 741 LInstruction* instr, |
| 742 LOperand* context) { |
| 743 LoadContextFromDeferred(context); |
| 729 __ CallRuntimeSaveDoubles(id); | 744 __ CallRuntimeSaveDoubles(id); |
| 730 RecordSafepointWithRegisters( | 745 RecordSafepointWithRegisters( |
| 731 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); | 746 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
| 732 } | 747 } |
| 733 | 748 |
| 734 | 749 |
| 735 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, | 750 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
| 736 Safepoint::DeoptMode mode) { | 751 Safepoint::DeoptMode mode) { |
| 737 if (!environment->HasBeenRegistered()) { | 752 if (!environment->HasBeenRegistered()) { |
| 738 // Physical stack frame layout: | 753 // Physical stack frame layout: |
| (...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 943 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 958 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 944 kind, arguments, deopt_mode); | 959 kind, arguments, deopt_mode); |
| 945 for (int i = 0; i < operands->length(); i++) { | 960 for (int i = 0; i < operands->length(); i++) { |
| 946 LOperand* pointer = operands->at(i); | 961 LOperand* pointer = operands->at(i); |
| 947 if (pointer->IsStackSlot()) { | 962 if (pointer->IsStackSlot()) { |
| 948 safepoint.DefinePointerSlot(pointer->index(), zone()); | 963 safepoint.DefinePointerSlot(pointer->index(), zone()); |
| 949 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 964 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 950 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); | 965 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); |
| 951 } | 966 } |
| 952 } | 967 } |
| 953 if (kind & Safepoint::kWithRegisters) { | |
| 954 // Register cp always contains a pointer to the context. | |
| 955 safepoint.DefinePointerRegister(cp, zone()); | |
| 956 } | |
| 957 } | 968 } |
| 958 | 969 |
| 959 | 970 |
| 960 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 971 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 961 Safepoint::DeoptMode deopt_mode) { | 972 Safepoint::DeoptMode deopt_mode) { |
| 962 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); | 973 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); |
| 963 } | 974 } |
| 964 | 975 |
| 965 | 976 |
| 966 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { | 977 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1039 DoGap(instr); | 1050 DoGap(instr); |
| 1040 } | 1051 } |
| 1041 | 1052 |
| 1042 | 1053 |
| 1043 void LCodeGen::DoParameter(LParameter* instr) { | 1054 void LCodeGen::DoParameter(LParameter* instr) { |
| 1044 // Nothing to do. | 1055 // Nothing to do. |
| 1045 } | 1056 } |
| 1046 | 1057 |
| 1047 | 1058 |
| 1048 void LCodeGen::DoCallStub(LCallStub* instr) { | 1059 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 1060 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1049 ASSERT(ToRegister(instr->result()).is(v0)); | 1061 ASSERT(ToRegister(instr->result()).is(v0)); |
| 1050 switch (instr->hydrogen()->major_key()) { | 1062 switch (instr->hydrogen()->major_key()) { |
| 1051 case CodeStub::RegExpConstructResult: { | 1063 case CodeStub::RegExpConstructResult: { |
| 1052 RegExpConstructResultStub stub; | 1064 RegExpConstructResultStub stub; |
| 1053 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1065 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1054 break; | 1066 break; |
| 1055 } | 1067 } |
| 1056 case CodeStub::RegExpExec: { | 1068 case CodeStub::RegExpExec: { |
| 1057 RegExpExecStub stub; | 1069 RegExpExecStub stub; |
| 1058 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1070 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| (...skipping 762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1821 __ Addu(scratch, string, scratch); | 1833 __ Addu(scratch, string, scratch); |
| 1822 __ sh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); | 1834 __ sh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); |
| 1823 } | 1835 } |
| 1824 } | 1836 } |
| 1825 } | 1837 } |
| 1826 | 1838 |
| 1827 | 1839 |
| 1828 void LCodeGen::DoThrow(LThrow* instr) { | 1840 void LCodeGen::DoThrow(LThrow* instr) { |
| 1829 Register input_reg = EmitLoadRegister(instr->value(), at); | 1841 Register input_reg = EmitLoadRegister(instr->value(), at); |
| 1830 __ push(input_reg); | 1842 __ push(input_reg); |
| 1843 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1831 CallRuntime(Runtime::kThrow, 1, instr); | 1844 CallRuntime(Runtime::kThrow, 1, instr); |
| 1832 | 1845 |
| 1833 if (FLAG_debug_code) { | 1846 if (FLAG_debug_code) { |
| 1834 __ stop("Unreachable code."); | 1847 __ stop("Unreachable code."); |
| 1835 } | 1848 } |
| 1836 } | 1849 } |
| 1837 | 1850 |
| 1838 | 1851 |
| 1839 void LCodeGen::DoAddI(LAddI* instr) { | 1852 void LCodeGen::DoAddI(LAddI* instr) { |
| 1840 LOperand* left = instr->left(); | 1853 LOperand* left = instr->left(); |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1972 break; | 1985 break; |
| 1973 } | 1986 } |
| 1974 default: | 1987 default: |
| 1975 UNREACHABLE(); | 1988 UNREACHABLE(); |
| 1976 break; | 1989 break; |
| 1977 } | 1990 } |
| 1978 } | 1991 } |
| 1979 | 1992 |
| 1980 | 1993 |
| 1981 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1994 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1995 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1982 ASSERT(ToRegister(instr->left()).is(a1)); | 1996 ASSERT(ToRegister(instr->left()).is(a1)); |
| 1983 ASSERT(ToRegister(instr->right()).is(a0)); | 1997 ASSERT(ToRegister(instr->right()).is(a0)); |
| 1984 ASSERT(ToRegister(instr->result()).is(v0)); | 1998 ASSERT(ToRegister(instr->result()).is(v0)); |
| 1985 | 1999 |
| 1986 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 2000 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1987 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2001 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1988 // Other arch use a nop here, to signal that there is no inlined | 2002 // Other arch use a nop here, to signal that there is no inlined |
| 1989 // patchable code. Mips does not need the nop, since our marker | 2003 // patchable code. Mips does not need the nop, since our marker |
| 1990 // instruction (andi zero_reg) will never be used in normal code. | 2004 // instruction (andi zero_reg) will never be used in normal code. |
| 1991 } | 2005 } |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2418 case Token::GTE: | 2432 case Token::GTE: |
| 2419 return ge; | 2433 return ge; |
| 2420 default: | 2434 default: |
| 2421 UNREACHABLE(); | 2435 UNREACHABLE(); |
| 2422 return kNoCondition; | 2436 return kNoCondition; |
| 2423 } | 2437 } |
| 2424 } | 2438 } |
| 2425 | 2439 |
| 2426 | 2440 |
| 2427 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { | 2441 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
| 2442 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2428 Token::Value op = instr->op(); | 2443 Token::Value op = instr->op(); |
| 2429 | 2444 |
| 2430 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 2445 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
| 2431 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2446 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2432 | 2447 |
| 2433 Condition condition = ComputeCompareCondition(op); | 2448 Condition condition = ComputeCompareCondition(op); |
| 2434 | 2449 |
| 2435 EmitBranch(instr, condition, v0, Operand(zero_reg)); | 2450 EmitBranch(instr, condition, v0, Operand(zero_reg)); |
| 2436 } | 2451 } |
| 2437 | 2452 |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2577 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2592 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
| 2578 Register reg = ToRegister(instr->value()); | 2593 Register reg = ToRegister(instr->value()); |
| 2579 Register temp = ToRegister(instr->temp()); | 2594 Register temp = ToRegister(instr->temp()); |
| 2580 | 2595 |
| 2581 __ lw(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2596 __ lw(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 2582 EmitBranch(instr, eq, temp, Operand(instr->map())); | 2597 EmitBranch(instr, eq, temp, Operand(instr->map())); |
| 2583 } | 2598 } |
| 2584 | 2599 |
| 2585 | 2600 |
| 2586 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2601 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2602 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2587 Label true_label, done; | 2603 Label true_label, done; |
| 2588 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0. | 2604 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0. |
| 2589 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1. | 2605 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1. |
| 2590 Register result = ToRegister(instr->result()); | 2606 Register result = ToRegister(instr->result()); |
| 2591 ASSERT(result.is(v0)); | 2607 ASSERT(result.is(v0)); |
| 2592 | 2608 |
| 2593 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 2609 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 2594 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2610 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2595 | 2611 |
| 2596 __ Branch(&true_label, eq, result, Operand(zero_reg)); | 2612 __ Branch(&true_label, eq, result, Operand(zero_reg)); |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2687 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 2703 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 2688 flags = static_cast<InstanceofStub::Flags>( | 2704 flags = static_cast<InstanceofStub::Flags>( |
| 2689 flags | InstanceofStub::kArgsInRegisters); | 2705 flags | InstanceofStub::kArgsInRegisters); |
| 2690 flags = static_cast<InstanceofStub::Flags>( | 2706 flags = static_cast<InstanceofStub::Flags>( |
| 2691 flags | InstanceofStub::kCallSiteInlineCheck); | 2707 flags | InstanceofStub::kCallSiteInlineCheck); |
| 2692 flags = static_cast<InstanceofStub::Flags>( | 2708 flags = static_cast<InstanceofStub::Flags>( |
| 2693 flags | InstanceofStub::kReturnTrueFalseObject); | 2709 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2694 InstanceofStub stub(flags); | 2710 InstanceofStub stub(flags); |
| 2695 | 2711 |
| 2696 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2712 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2713 LoadContextFromDeferred(instr->context()); |
| 2697 | 2714 |
| 2698 // Get the temp register reserved by the instruction. This needs to be t0 as | 2715 // Get the temp register reserved by the instruction. This needs to be t0 as |
| 2699 // its slot of the pushing of safepoint registers is used to communicate the | 2716 // its slot of the pushing of safepoint registers is used to communicate the |
| 2700 // offset to the location of the map check. | 2717 // offset to the location of the map check. |
| 2701 Register temp = ToRegister(instr->temp()); | 2718 Register temp = ToRegister(instr->temp()); |
| 2702 ASSERT(temp.is(t0)); | 2719 ASSERT(temp.is(t0)); |
| 2703 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2720 __ LoadHeapObject(InstanceofStub::right(), instr->function()); |
| 2704 static const int kAdditionalDelta = 7; | 2721 static const int kAdditionalDelta = 7; |
| 2705 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2722 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
| 2706 Label before_push_delta; | 2723 Label before_push_delta; |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2724 | 2741 |
| 2725 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { | 2742 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { |
| 2726 Register object = ToRegister(instr->object()); | 2743 Register object = ToRegister(instr->object()); |
| 2727 Register result = ToRegister(instr->result()); | 2744 Register result = ToRegister(instr->result()); |
| 2728 __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 2745 __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 2729 __ lbu(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); | 2746 __ lbu(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); |
| 2730 } | 2747 } |
| 2731 | 2748 |
| 2732 | 2749 |
| 2733 void LCodeGen::DoCmpT(LCmpT* instr) { | 2750 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 2751 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2734 Token::Value op = instr->op(); | 2752 Token::Value op = instr->op(); |
| 2735 | 2753 |
| 2736 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 2754 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
| 2737 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2755 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2738 // On MIPS there is no need for a "no inlined smi code" marker (nop). | 2756 // On MIPS there is no need for a "no inlined smi code" marker (nop). |
| 2739 | 2757 |
| 2740 Condition condition = ComputeCompareCondition(op); | 2758 Condition condition = ComputeCompareCondition(op); |
| 2741 // A minor optimization that relies on LoadRoot always emitting one | 2759 // A minor optimization that relies on LoadRoot always emitting one |
| 2742 // instruction. | 2760 // instruction. |
| 2743 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm()); | 2761 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm()); |
| 2744 Label done, check; | 2762 Label done, check; |
| 2745 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg)); | 2763 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg)); |
| 2746 __ bind(&check); | 2764 __ bind(&check); |
| 2747 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); | 2765 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); |
| 2748 ASSERT_EQ(1, masm()->InstructionsGeneratedSince(&check)); | 2766 ASSERT_EQ(1, masm()->InstructionsGeneratedSince(&check)); |
| 2749 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 2767 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
| 2750 __ bind(&done); | 2768 __ bind(&done); |
| 2751 } | 2769 } |
| 2752 | 2770 |
| 2753 | 2771 |
| 2754 void LCodeGen::DoReturn(LReturn* instr) { | 2772 void LCodeGen::DoReturn(LReturn* instr) { |
| 2755 if (FLAG_trace && info()->IsOptimizing()) { | 2773 if (FLAG_trace && info()->IsOptimizing()) { |
| 2756 // Push the return value on the stack as the parameter. | 2774 // Push the return value on the stack as the parameter. |
| 2757 // Runtime::TraceExit returns its parameter in v0. | 2775 // Runtime::TraceExit returns its parameter in v0. We're leaving the code |
| 2776 // managed by the register allocator and tearing down the frame, it's |
| 2777 // safe to write to the context register. |
| 2758 __ push(v0); | 2778 __ push(v0); |
| 2779 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2759 __ CallRuntime(Runtime::kTraceExit, 1); | 2780 __ CallRuntime(Runtime::kTraceExit, 1); |
| 2760 } | 2781 } |
| 2761 if (info()->saves_caller_doubles()) { | 2782 if (info()->saves_caller_doubles()) { |
| 2762 ASSERT(NeedsEagerFrame()); | 2783 ASSERT(NeedsEagerFrame()); |
| 2763 BitVector* doubles = chunk()->allocated_double_registers(); | 2784 BitVector* doubles = chunk()->allocated_double_registers(); |
| 2764 BitVector::Iterator save_iterator(doubles); | 2785 BitVector::Iterator save_iterator(doubles); |
| 2765 int count = 0; | 2786 int count = 0; |
| 2766 while (!save_iterator.Done()) { | 2787 while (!save_iterator.Done()) { |
| 2767 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()), | 2788 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()), |
| 2768 MemOperand(sp, count * kDoubleSize)); | 2789 MemOperand(sp, count * kDoubleSize)); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2803 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); | 2824 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); |
| 2804 __ lw(result, FieldMemOperand(at, Cell::kValueOffset)); | 2825 __ lw(result, FieldMemOperand(at, Cell::kValueOffset)); |
| 2805 if (instr->hydrogen()->RequiresHoleCheck()) { | 2826 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2806 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2827 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 2807 DeoptimizeIf(eq, instr->environment(), result, Operand(at)); | 2828 DeoptimizeIf(eq, instr->environment(), result, Operand(at)); |
| 2808 } | 2829 } |
| 2809 } | 2830 } |
| 2810 | 2831 |
| 2811 | 2832 |
| 2812 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { | 2833 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { |
| 2834 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2813 ASSERT(ToRegister(instr->global_object()).is(a0)); | 2835 ASSERT(ToRegister(instr->global_object()).is(a0)); |
| 2814 ASSERT(ToRegister(instr->result()).is(v0)); | 2836 ASSERT(ToRegister(instr->result()).is(v0)); |
| 2815 | 2837 |
| 2816 __ li(a2, Operand(instr->name())); | 2838 __ li(a2, Operand(instr->name())); |
| 2817 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET | 2839 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET |
| 2818 : RelocInfo::CODE_TARGET_CONTEXT; | 2840 : RelocInfo::CODE_TARGET_CONTEXT; |
| 2819 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2841 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 2820 CallCode(ic, mode, instr); | 2842 CallCode(ic, mode, instr); |
| 2821 } | 2843 } |
| 2822 | 2844 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2840 DeoptimizeIf(eq, instr->environment(), payload, Operand(at)); | 2862 DeoptimizeIf(eq, instr->environment(), payload, Operand(at)); |
| 2841 } | 2863 } |
| 2842 | 2864 |
| 2843 // Store the value. | 2865 // Store the value. |
| 2844 __ sw(value, FieldMemOperand(cell, Cell::kValueOffset)); | 2866 __ sw(value, FieldMemOperand(cell, Cell::kValueOffset)); |
| 2845 // Cells are always rescanned, so no write barrier here. | 2867 // Cells are always rescanned, so no write barrier here. |
| 2846 } | 2868 } |
| 2847 | 2869 |
| 2848 | 2870 |
| 2849 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { | 2871 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
| 2872 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2850 ASSERT(ToRegister(instr->global_object()).is(a1)); | 2873 ASSERT(ToRegister(instr->global_object()).is(a1)); |
| 2851 ASSERT(ToRegister(instr->value()).is(a0)); | 2874 ASSERT(ToRegister(instr->value()).is(a0)); |
| 2852 | 2875 |
| 2853 __ li(a2, Operand(instr->name())); | 2876 __ li(a2, Operand(instr->name())); |
| 2854 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 2877 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 2855 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 2878 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 2856 : isolate()->builtins()->StoreIC_Initialize(); | 2879 : isolate()->builtins()->StoreIC_Initialize(); |
| 2857 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2880 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
| 2858 } | 2881 } |
| 2859 | 2882 |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2937 if (access.IsInobject()) { | 2960 if (access.IsInobject()) { |
| 2938 __ lw(result, FieldMemOperand(object, offset)); | 2961 __ lw(result, FieldMemOperand(object, offset)); |
| 2939 } else { | 2962 } else { |
| 2940 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 2963 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 2941 __ lw(result, FieldMemOperand(result, offset)); | 2964 __ lw(result, FieldMemOperand(result, offset)); |
| 2942 } | 2965 } |
| 2943 } | 2966 } |
| 2944 | 2967 |
| 2945 | 2968 |
| 2946 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2969 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 2970 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2947 ASSERT(ToRegister(instr->object()).is(a0)); | 2971 ASSERT(ToRegister(instr->object()).is(a0)); |
| 2948 ASSERT(ToRegister(instr->result()).is(v0)); | 2972 ASSERT(ToRegister(instr->result()).is(v0)); |
| 2949 | 2973 |
| 2950 // Name is always in a2. | 2974 // Name is always in a2. |
| 2951 __ li(a2, Operand(instr->name())); | 2975 __ li(a2, Operand(instr->name())); |
| 2952 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2976 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 2953 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2977 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2954 } | 2978 } |
| 2955 | 2979 |
| 2956 | 2980 |
| (...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3242 } else { | 3266 } else { |
| 3243 ASSERT_EQ(-1, shift_size); | 3267 ASSERT_EQ(-1, shift_size); |
| 3244 __ srl(scratch0(), scratch0(), 1); | 3268 __ srl(scratch0(), scratch0(), 1); |
| 3245 __ Addu(scratch0(), base, scratch0()); | 3269 __ Addu(scratch0(), base, scratch0()); |
| 3246 return MemOperand(scratch0()); | 3270 return MemOperand(scratch0()); |
| 3247 } | 3271 } |
| 3248 } | 3272 } |
| 3249 | 3273 |
| 3250 | 3274 |
| 3251 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 3275 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 3276 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3252 ASSERT(ToRegister(instr->object()).is(a1)); | 3277 ASSERT(ToRegister(instr->object()).is(a1)); |
| 3253 ASSERT(ToRegister(instr->key()).is(a0)); | 3278 ASSERT(ToRegister(instr->key()).is(a0)); |
| 3254 | 3279 |
| 3255 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 3280 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 3256 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3281 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3257 } | 3282 } |
| 3258 | 3283 |
| 3259 | 3284 |
| 3260 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3285 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 3261 Register scratch = scratch0(); | 3286 Register scratch = scratch0(); |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3387 ASSERT(instr->HasPointerMap()); | 3412 ASSERT(instr->HasPointerMap()); |
| 3388 LPointerMap* pointers = instr->pointer_map(); | 3413 LPointerMap* pointers = instr->pointer_map(); |
| 3389 RecordPosition(pointers->position()); | 3414 RecordPosition(pointers->position()); |
| 3390 SafepointGenerator safepoint_generator( | 3415 SafepointGenerator safepoint_generator( |
| 3391 this, pointers, Safepoint::kLazyDeopt); | 3416 this, pointers, Safepoint::kLazyDeopt); |
| 3392 // The number of arguments is stored in receiver which is a0, as expected | 3417 // The number of arguments is stored in receiver which is a0, as expected |
| 3393 // by InvokeFunction. | 3418 // by InvokeFunction. |
| 3394 ParameterCount actual(receiver); | 3419 ParameterCount actual(receiver); |
| 3395 __ InvokeFunction(function, actual, CALL_FUNCTION, | 3420 __ InvokeFunction(function, actual, CALL_FUNCTION, |
| 3396 safepoint_generator, CALL_AS_METHOD); | 3421 safepoint_generator, CALL_AS_METHOD); |
| 3397 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3398 } | 3422 } |
| 3399 | 3423 |
| 3400 | 3424 |
| 3401 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3425 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 3402 LOperand* argument = instr->value(); | 3426 LOperand* argument = instr->value(); |
| 3403 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { | 3427 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { |
| 3404 Abort(kDoPushArgumentNotImplementedForDoubleType); | 3428 Abort(kDoPushArgumentNotImplementedForDoubleType); |
| 3405 } else { | 3429 } else { |
| 3406 Register argument_reg = EmitLoadRegister(argument, at); | 3430 Register argument_reg = EmitLoadRegister(argument, at); |
| 3407 __ push(argument_reg); | 3431 __ push(argument_reg); |
| 3408 } | 3432 } |
| 3409 } | 3433 } |
| 3410 | 3434 |
| 3411 | 3435 |
| 3412 void LCodeGen::DoDrop(LDrop* instr) { | 3436 void LCodeGen::DoDrop(LDrop* instr) { |
| 3413 __ Drop(instr->count()); | 3437 __ Drop(instr->count()); |
| 3414 } | 3438 } |
| 3415 | 3439 |
| 3416 | 3440 |
| 3417 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 3441 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
| 3418 Register result = ToRegister(instr->result()); | 3442 Register result = ToRegister(instr->result()); |
| 3419 __ lw(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 3443 __ lw(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3420 } | 3444 } |
| 3421 | 3445 |
| 3422 | 3446 |
| 3423 void LCodeGen::DoContext(LContext* instr) { | 3447 void LCodeGen::DoContext(LContext* instr) { |
| 3424 // If there is a non-return use, the context must be moved to a register. | 3448 // If there is a non-return use, the context must be moved to a register. |
| 3425 Register result = ToRegister(instr->result()); | 3449 Register result = ToRegister(instr->result()); |
| 3426 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) { | 3450 if (info()->IsOptimizing()) { |
| 3427 if (!it.value()->IsReturn()) { | 3451 __ lw(result, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3428 __ mov(result, cp); | 3452 } else { |
| 3429 return; | 3453 // If there is no frame, the context must be in cp. |
| 3430 } | 3454 ASSERT(result.is(cp)); |
| 3431 } | 3455 } |
| 3432 } | 3456 } |
| 3433 | 3457 |
| 3434 | 3458 |
| 3435 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 3459 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 3436 Register context = ToRegister(instr->context()); | 3460 Register context = ToRegister(instr->context()); |
| 3437 Register result = ToRegister(instr->result()); | 3461 Register result = ToRegister(instr->result()); |
| 3438 __ lw(result, | 3462 __ lw(result, |
| 3439 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 3463 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 3440 } | 3464 } |
| 3441 | 3465 |
| 3442 | 3466 |
| 3443 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3467 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3468 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3444 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); | 3469 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); |
| 3445 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); | 3470 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); |
| 3446 // The context is the first argument. | 3471 // The context is the first argument. |
| 3447 __ Push(cp, scratch0(), scratch1()); | 3472 __ Push(cp, scratch0(), scratch1()); |
| 3448 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3473 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
| 3449 } | 3474 } |
| 3450 | 3475 |
| 3451 | 3476 |
| 3452 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 3477 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 3478 Register context = ToRegister(instr->context()); |
| 3453 Register result = ToRegister(instr->result()); | 3479 Register result = ToRegister(instr->result()); |
| 3454 __ lw(result, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 3480 __ lw(result, ContextOperand(context, Context::GLOBAL_OBJECT_INDEX)); |
| 3455 } | 3481 } |
| 3456 | 3482 |
| 3457 | 3483 |
| 3458 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 3484 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 3459 Register global = ToRegister(instr->global_object()); | 3485 Register global = ToRegister(instr->global_object()); |
| 3460 Register result = ToRegister(instr->result()); | 3486 Register result = ToRegister(instr->result()); |
| 3461 __ lw(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset)); | 3487 __ lw(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset)); |
| 3462 } | 3488 } |
| 3463 | 3489 |
| 3464 | 3490 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3497 | 3523 |
| 3498 // Set up deoptimization. | 3524 // Set up deoptimization. |
| 3499 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 3525 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 3500 } else { | 3526 } else { |
| 3501 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3527 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3502 ParameterCount count(arity); | 3528 ParameterCount count(arity); |
| 3503 ParameterCount expected(formal_parameter_count); | 3529 ParameterCount expected(formal_parameter_count); |
| 3504 __ InvokeFunction( | 3530 __ InvokeFunction( |
| 3505 function, expected, count, CALL_FUNCTION, generator, call_kind); | 3531 function, expected, count, CALL_FUNCTION, generator, call_kind); |
| 3506 } | 3532 } |
| 3507 | |
| 3508 // Restore context. | |
| 3509 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3510 } | 3533 } |
| 3511 | 3534 |
| 3512 | 3535 |
| 3513 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 3536 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 3514 ASSERT(ToRegister(instr->result()).is(v0)); | 3537 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3515 __ mov(a0, v0); | 3538 __ mov(a0, v0); |
| 3516 CallKnownFunction(instr->hydrogen()->function(), | 3539 CallKnownFunction(instr->hydrogen()->function(), |
| 3517 instr->hydrogen()->formal_parameter_count(), | 3540 instr->hydrogen()->formal_parameter_count(), |
| 3518 instr->arity(), | 3541 instr->arity(), |
| 3519 instr, | 3542 instr, |
| 3520 CALL_AS_METHOD, | 3543 CALL_AS_METHOD, |
| 3521 A1_UNINITIALIZED); | 3544 A1_UNINITIALIZED); |
| 3522 } | 3545 } |
| 3523 | 3546 |
| 3524 | 3547 |
| 3525 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3548 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
| 3549 ASSERT(instr->context() != NULL); |
| 3550 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3526 Register input = ToRegister(instr->value()); | 3551 Register input = ToRegister(instr->value()); |
| 3527 Register result = ToRegister(instr->result()); | 3552 Register result = ToRegister(instr->result()); |
| 3528 Register scratch = scratch0(); | 3553 Register scratch = scratch0(); |
| 3529 | 3554 |
| 3530 // Deoptimize if not a heap number. | 3555 // Deoptimize if not a heap number. |
| 3531 __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 3556 __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); |
| 3532 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3557 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 3533 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at)); | 3558 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at)); |
| 3534 | 3559 |
| 3535 Label done; | 3560 Label done; |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3557 // exponent: floating point exponent value. | 3582 // exponent: floating point exponent value. |
| 3558 | 3583 |
| 3559 Label allocated, slow; | 3584 Label allocated, slow; |
| 3560 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex); | 3585 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex); |
| 3561 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow); | 3586 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow); |
| 3562 __ Branch(&allocated); | 3587 __ Branch(&allocated); |
| 3563 | 3588 |
| 3564 // Slow case: Call the runtime system to do the number allocation. | 3589 // Slow case: Call the runtime system to do the number allocation. |
| 3565 __ bind(&slow); | 3590 __ bind(&slow); |
| 3566 | 3591 |
| 3567 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 3592 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr, |
| 3593 instr->context()); |
| 3568 // Set the pointer to the new heap number in tmp. | 3594 // Set the pointer to the new heap number in tmp. |
| 3569 if (!tmp1.is(v0)) | 3595 if (!tmp1.is(v0)) |
| 3570 __ mov(tmp1, v0); | 3596 __ mov(tmp1, v0); |
| 3571 // Restore input_reg after call to runtime. | 3597 // Restore input_reg after call to runtime. |
| 3572 __ LoadFromSafepointRegisterSlot(input, input); | 3598 __ LoadFromSafepointRegisterSlot(input, input); |
| 3573 __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); | 3599 __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); |
| 3574 | 3600 |
| 3575 __ bind(&allocated); | 3601 __ bind(&allocated); |
| 3576 // exponent: floating point exponent value. | 3602 // exponent: floating point exponent value. |
| 3577 // tmp1: allocated heap number. | 3603 // tmp1: allocated heap number. |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3875 Register temp2 = ToRegister(instr->temp2()); | 3901 Register temp2 = ToRegister(instr->temp2()); |
| 3876 | 3902 |
| 3877 MathExpGenerator::EmitMathExp( | 3903 MathExpGenerator::EmitMathExp( |
| 3878 masm(), input, result, double_scratch1, double_scratch2, | 3904 masm(), input, result, double_scratch1, double_scratch2, |
| 3879 temp1, temp2, scratch0()); | 3905 temp1, temp2, scratch0()); |
| 3880 } | 3906 } |
| 3881 | 3907 |
| 3882 | 3908 |
| 3883 void LCodeGen::DoMathLog(LMathLog* instr) { | 3909 void LCodeGen::DoMathLog(LMathLog* instr) { |
| 3884 ASSERT(ToDoubleRegister(instr->result()).is(f4)); | 3910 ASSERT(ToDoubleRegister(instr->result()).is(f4)); |
| 3911 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3912 // OK because this instruction is marked as a call. |
| 3913 __ mov(cp, zero_reg); |
| 3885 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 3914 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
| 3886 TranscendentalCacheStub::UNTAGGED); | 3915 TranscendentalCacheStub::UNTAGGED); |
| 3887 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3916 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3888 } | 3917 } |
| 3889 | 3918 |
| 3890 | 3919 |
| 3891 void LCodeGen::DoMathTan(LMathTan* instr) { | 3920 void LCodeGen::DoMathTan(LMathTan* instr) { |
| 3892 ASSERT(ToDoubleRegister(instr->result()).is(f4)); | 3921 ASSERT(ToDoubleRegister(instr->result()).is(f4)); |
| 3922 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3923 // OK because this instruction is marked as a call. |
| 3924 __ mov(cp, zero_reg); |
| 3893 TranscendentalCacheStub stub(TranscendentalCache::TAN, | 3925 TranscendentalCacheStub stub(TranscendentalCache::TAN, |
| 3894 TranscendentalCacheStub::UNTAGGED); | 3926 TranscendentalCacheStub::UNTAGGED); |
| 3895 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3927 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3896 } | 3928 } |
| 3897 | 3929 |
| 3898 | 3930 |
| 3899 void LCodeGen::DoMathCos(LMathCos* instr) { | 3931 void LCodeGen::DoMathCos(LMathCos* instr) { |
| 3900 ASSERT(ToDoubleRegister(instr->result()).is(f4)); | 3932 ASSERT(ToDoubleRegister(instr->result()).is(f4)); |
| 3933 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3934 // OK because this instruction is marked as a call. |
| 3935 __ mov(cp, zero_reg); |
| 3901 TranscendentalCacheStub stub(TranscendentalCache::COS, | 3936 TranscendentalCacheStub stub(TranscendentalCache::COS, |
| 3902 TranscendentalCacheStub::UNTAGGED); | 3937 TranscendentalCacheStub::UNTAGGED); |
| 3903 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3938 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3904 } | 3939 } |
| 3905 | 3940 |
| 3906 | 3941 |
| 3907 void LCodeGen::DoMathSin(LMathSin* instr) { | 3942 void LCodeGen::DoMathSin(LMathSin* instr) { |
| 3908 ASSERT(ToDoubleRegister(instr->result()).is(f4)); | 3943 ASSERT(ToDoubleRegister(instr->result()).is(f4)); |
| 3944 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3945 // OK because this instruction is marked as a call. |
| 3946 __ mov(cp, zero_reg); |
| 3909 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 3947 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
| 3910 TranscendentalCacheStub::UNTAGGED); | 3948 TranscendentalCacheStub::UNTAGGED); |
| 3911 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3949 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3912 } | 3950 } |
| 3913 | 3951 |
| 3914 | 3952 |
| 3915 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3953 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3954 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3916 ASSERT(ToRegister(instr->function()).is(a1)); | 3955 ASSERT(ToRegister(instr->function()).is(a1)); |
| 3917 ASSERT(instr->HasPointerMap()); | 3956 ASSERT(instr->HasPointerMap()); |
| 3918 | 3957 |
| 3919 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3958 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
| 3920 if (known_function.is_null()) { | 3959 if (known_function.is_null()) { |
| 3921 LPointerMap* pointers = instr->pointer_map(); | 3960 LPointerMap* pointers = instr->pointer_map(); |
| 3922 RecordPosition(pointers->position()); | 3961 RecordPosition(pointers->position()); |
| 3923 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3962 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3924 ParameterCount count(instr->arity()); | 3963 ParameterCount count(instr->arity()); |
| 3925 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3964 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
| 3926 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3927 } else { | 3965 } else { |
| 3928 CallKnownFunction(known_function, | 3966 CallKnownFunction(known_function, |
| 3929 instr->hydrogen()->formal_parameter_count(), | 3967 instr->hydrogen()->formal_parameter_count(), |
| 3930 instr->arity(), | 3968 instr->arity(), |
| 3931 instr, | 3969 instr, |
| 3932 CALL_AS_METHOD, | 3970 CALL_AS_METHOD, |
| 3933 A1_CONTAINS_TARGET); | 3971 A1_CONTAINS_TARGET); |
| 3934 } | 3972 } |
| 3935 } | 3973 } |
| 3936 | 3974 |
| 3937 | 3975 |
| 3938 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3976 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 3977 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3939 ASSERT(ToRegister(instr->result()).is(v0)); | 3978 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3940 | 3979 |
| 3941 int arity = instr->arity(); | 3980 int arity = instr->arity(); |
| 3942 Handle<Code> ic = | 3981 Handle<Code> ic = |
| 3943 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); | 3982 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); |
| 3944 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3983 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3945 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3946 } | 3984 } |
| 3947 | 3985 |
| 3948 | 3986 |
| 3949 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 3987 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
| 3988 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3950 ASSERT(ToRegister(instr->result()).is(v0)); | 3989 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3951 | 3990 |
| 3952 int arity = instr->arity(); | 3991 int arity = instr->arity(); |
| 3953 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; | 3992 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; |
| 3954 Handle<Code> ic = | 3993 Handle<Code> ic = |
| 3955 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 3994 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
| 3956 __ li(a2, Operand(instr->name())); | 3995 __ li(a2, Operand(instr->name())); |
| 3957 CallCode(ic, mode, instr); | 3996 CallCode(ic, mode, instr); |
| 3958 // Restore context register. | |
| 3959 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3960 } | 3997 } |
| 3961 | 3998 |
| 3962 | 3999 |
| 3963 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 4000 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 4001 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3964 ASSERT(ToRegister(instr->function()).is(a1)); | 4002 ASSERT(ToRegister(instr->function()).is(a1)); |
| 3965 ASSERT(ToRegister(instr->result()).is(v0)); | 4003 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3966 | 4004 |
| 3967 int arity = instr->arity(); | 4005 int arity = instr->arity(); |
| 3968 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); | 4006 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); |
| 3969 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4007 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3970 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3971 } | 4008 } |
| 3972 | 4009 |
| 3973 | 4010 |
| 3974 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 4011 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 4012 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3975 ASSERT(ToRegister(instr->result()).is(v0)); | 4013 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3976 | 4014 |
| 3977 int arity = instr->arity(); | 4015 int arity = instr->arity(); |
| 3978 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; | 4016 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; |
| 3979 Handle<Code> ic = | 4017 Handle<Code> ic = |
| 3980 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 4018 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
| 3981 __ li(a2, Operand(instr->name())); | 4019 __ li(a2, Operand(instr->name())); |
| 3982 CallCode(ic, mode, instr); | 4020 CallCode(ic, mode, instr); |
| 3983 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3984 } | 4021 } |
| 3985 | 4022 |
| 3986 | 4023 |
| 3987 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 4024 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 3988 ASSERT(ToRegister(instr->result()).is(v0)); | 4025 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3989 CallKnownFunction(instr->hydrogen()->target(), | 4026 CallKnownFunction(instr->hydrogen()->target(), |
| 3990 instr->hydrogen()->formal_parameter_count(), | 4027 instr->hydrogen()->formal_parameter_count(), |
| 3991 instr->arity(), | 4028 instr->arity(), |
| 3992 instr, | 4029 instr, |
| 3993 CALL_AS_FUNCTION, | 4030 CALL_AS_FUNCTION, |
| 3994 A1_UNINITIALIZED); | 4031 A1_UNINITIALIZED); |
| 3995 } | 4032 } |
| 3996 | 4033 |
| 3997 | 4034 |
| 3998 void LCodeGen::DoCallNew(LCallNew* instr) { | 4035 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 4036 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3999 ASSERT(ToRegister(instr->constructor()).is(a1)); | 4037 ASSERT(ToRegister(instr->constructor()).is(a1)); |
| 4000 ASSERT(ToRegister(instr->result()).is(v0)); | 4038 ASSERT(ToRegister(instr->result()).is(v0)); |
| 4001 | 4039 |
| 4002 __ li(a0, Operand(instr->arity())); | 4040 __ li(a0, Operand(instr->arity())); |
| 4003 // No cell in a2 for construct type feedback in optimized code | 4041 // No cell in a2 for construct type feedback in optimized code |
| 4004 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); | 4042 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); |
| 4005 __ li(a2, Operand(undefined_value)); | 4043 __ li(a2, Operand(undefined_value)); |
| 4006 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 4044 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); |
| 4007 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4045 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 4008 } | 4046 } |
| 4009 | 4047 |
| 4010 | 4048 |
| 4011 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 4049 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 4050 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4012 ASSERT(ToRegister(instr->constructor()).is(a1)); | 4051 ASSERT(ToRegister(instr->constructor()).is(a1)); |
| 4013 ASSERT(ToRegister(instr->result()).is(v0)); | 4052 ASSERT(ToRegister(instr->result()).is(v0)); |
| 4014 | 4053 |
| 4015 __ li(a0, Operand(instr->arity())); | 4054 __ li(a0, Operand(instr->arity())); |
| 4016 __ li(a2, Operand(instr->hydrogen()->property_cell())); | 4055 __ li(a2, Operand(instr->hydrogen()->property_cell())); |
| 4017 ElementsKind kind = instr->hydrogen()->elements_kind(); | 4056 ElementsKind kind = instr->hydrogen()->elements_kind(); |
| 4018 AllocationSiteOverrideMode override_mode = | 4057 AllocationSiteOverrideMode override_mode = |
| 4019 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 4058 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
| 4020 ? DISABLE_ALLOCATION_SITES | 4059 ? DISABLE_ALLOCATION_SITES |
| 4021 : DONT_OVERRIDE; | 4060 : DONT_OVERRIDE; |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4153 GetRAState(), | 4192 GetRAState(), |
| 4154 kSaveFPRegs, | 4193 kSaveFPRegs, |
| 4155 EMIT_REMEMBERED_SET, | 4194 EMIT_REMEMBERED_SET, |
| 4156 check_needed); | 4195 check_needed); |
| 4157 } | 4196 } |
| 4158 } | 4197 } |
| 4159 } | 4198 } |
| 4160 | 4199 |
| 4161 | 4200 |
| 4162 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 4201 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 4202 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4163 ASSERT(ToRegister(instr->object()).is(a1)); | 4203 ASSERT(ToRegister(instr->object()).is(a1)); |
| 4164 ASSERT(ToRegister(instr->value()).is(a0)); | 4204 ASSERT(ToRegister(instr->value()).is(a0)); |
| 4165 | 4205 |
| 4166 // Name is always in a2. | 4206 // Name is always in a2. |
| 4167 __ li(a2, Operand(instr->name())); | 4207 __ li(a2, Operand(instr->name())); |
| 4168 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4208 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 4169 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4209 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 4170 : isolate()->builtins()->StoreIC_Initialize(); | 4210 : isolate()->builtins()->StoreIC_Initialize(); |
| 4171 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4211 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 4172 } | 4212 } |
| (...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4395 DoStoreKeyedExternalArray(instr); | 4435 DoStoreKeyedExternalArray(instr); |
| 4396 } else if (instr->hydrogen()->value()->representation().IsDouble()) { | 4436 } else if (instr->hydrogen()->value()->representation().IsDouble()) { |
| 4397 DoStoreKeyedFixedDoubleArray(instr); | 4437 DoStoreKeyedFixedDoubleArray(instr); |
| 4398 } else { | 4438 } else { |
| 4399 DoStoreKeyedFixedArray(instr); | 4439 DoStoreKeyedFixedArray(instr); |
| 4400 } | 4440 } |
| 4401 } | 4441 } |
| 4402 | 4442 |
| 4403 | 4443 |
| 4404 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 4444 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 4445 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4405 ASSERT(ToRegister(instr->object()).is(a2)); | 4446 ASSERT(ToRegister(instr->object()).is(a2)); |
| 4406 ASSERT(ToRegister(instr->key()).is(a1)); | 4447 ASSERT(ToRegister(instr->key()).is(a1)); |
| 4407 ASSERT(ToRegister(instr->value()).is(a0)); | 4448 ASSERT(ToRegister(instr->value()).is(a0)); |
| 4408 | 4449 |
| 4409 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4450 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 4410 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 4451 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 4411 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 4452 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 4412 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4453 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 4413 } | 4454 } |
| 4414 | 4455 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 4427 __ Branch(¬_applicable, ne, scratch, Operand(from_map)); | 4468 __ Branch(¬_applicable, ne, scratch, Operand(from_map)); |
| 4428 | 4469 |
| 4429 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 4470 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
| 4430 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4471 Register new_map_reg = ToRegister(instr->new_map_temp()); |
| 4431 __ li(new_map_reg, Operand(to_map)); | 4472 __ li(new_map_reg, Operand(to_map)); |
| 4432 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); | 4473 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); |
| 4433 // Write barrier. | 4474 // Write barrier. |
| 4434 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4475 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 4435 scratch, GetRAState(), kDontSaveFPRegs); | 4476 scratch, GetRAState(), kDontSaveFPRegs); |
| 4436 } else { | 4477 } else { |
| 4478 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4437 PushSafepointRegistersScope scope( | 4479 PushSafepointRegistersScope scope( |
| 4438 this, Safepoint::kWithRegistersAndDoubles); | 4480 this, Safepoint::kWithRegistersAndDoubles); |
| 4439 __ mov(a0, object_reg); | 4481 __ mov(a0, object_reg); |
| 4440 __ li(a1, Operand(to_map)); | 4482 __ li(a1, Operand(to_map)); |
| 4441 TransitionElementsKindStub stub(from_kind, to_kind); | 4483 TransitionElementsKindStub stub(from_kind, to_kind); |
| 4442 __ CallStub(&stub); | 4484 __ CallStub(&stub); |
| 4443 RecordSafepointWithRegistersAndDoubles( | 4485 RecordSafepointWithRegistersAndDoubles( |
| 4444 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4486 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4445 } | 4487 } |
| 4446 __ bind(¬_applicable); | 4488 __ bind(¬_applicable); |
| 4447 } | 4489 } |
| 4448 | 4490 |
| 4449 | 4491 |
| 4450 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4492 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 4451 Register object = ToRegister(instr->object()); | 4493 Register object = ToRegister(instr->object()); |
| 4452 Register temp = ToRegister(instr->temp()); | 4494 Register temp = ToRegister(instr->temp()); |
| 4453 Label fail; | 4495 Label fail; |
| 4454 __ TestJSArrayForAllocationMemento(object, temp, ne, &fail); | 4496 __ TestJSArrayForAllocationMemento(object, temp, ne, &fail); |
| 4455 DeoptimizeIf(al, instr->environment()); | 4497 DeoptimizeIf(al, instr->environment()); |
| 4456 __ bind(&fail); | 4498 __ bind(&fail); |
| 4457 } | 4499 } |
| 4458 | 4500 |
| 4459 | 4501 |
| 4460 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4502 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4503 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4461 __ push(ToRegister(instr->left())); | 4504 __ push(ToRegister(instr->left())); |
| 4462 __ push(ToRegister(instr->right())); | 4505 __ push(ToRegister(instr->right())); |
| 4463 StringAddStub stub(instr->hydrogen()->flags()); | 4506 StringAddStub stub(instr->hydrogen()->flags()); |
| 4464 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4507 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 4465 } | 4508 } |
| 4466 | 4509 |
| 4467 | 4510 |
| 4468 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4511 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 4469 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4512 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
| 4470 public: | 4513 public: |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4505 // DoStringCharCodeAt above. | 4548 // DoStringCharCodeAt above. |
| 4506 if (instr->index()->IsConstantOperand()) { | 4549 if (instr->index()->IsConstantOperand()) { |
| 4507 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 4550 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 4508 __ Addu(scratch, zero_reg, Operand(Smi::FromInt(const_index))); | 4551 __ Addu(scratch, zero_reg, Operand(Smi::FromInt(const_index))); |
| 4509 __ push(scratch); | 4552 __ push(scratch); |
| 4510 } else { | 4553 } else { |
| 4511 Register index = ToRegister(instr->index()); | 4554 Register index = ToRegister(instr->index()); |
| 4512 __ SmiTag(index); | 4555 __ SmiTag(index); |
| 4513 __ push(index); | 4556 __ push(index); |
| 4514 } | 4557 } |
| 4515 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); | 4558 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr, |
| 4559 instr->context()); |
| 4516 __ AssertSmi(v0); | 4560 __ AssertSmi(v0); |
| 4517 __ SmiUntag(v0); | 4561 __ SmiUntag(v0); |
| 4518 __ StoreToSafepointRegisterSlot(v0, result); | 4562 __ StoreToSafepointRegisterSlot(v0, result); |
| 4519 } | 4563 } |
| 4520 | 4564 |
| 4521 | 4565 |
| 4522 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { | 4566 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { |
| 4523 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { | 4567 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { |
| 4524 public: | 4568 public: |
| 4525 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) | 4569 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4558 Register result = ToRegister(instr->result()); | 4602 Register result = ToRegister(instr->result()); |
| 4559 | 4603 |
| 4560 // TODO(3095996): Get rid of this. For now, we need to make the | 4604 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4561 // result register contain a valid pointer because it is already | 4605 // result register contain a valid pointer because it is already |
| 4562 // contained in the register pointer map. | 4606 // contained in the register pointer map. |
| 4563 __ mov(result, zero_reg); | 4607 __ mov(result, zero_reg); |
| 4564 | 4608 |
| 4565 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4609 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 4566 __ SmiTag(char_code); | 4610 __ SmiTag(char_code); |
| 4567 __ push(char_code); | 4611 __ push(char_code); |
| 4568 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); | 4612 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); |
| 4569 __ StoreToSafepointRegisterSlot(v0, result); | 4613 __ StoreToSafepointRegisterSlot(v0, result); |
| 4570 } | 4614 } |
| 4571 | 4615 |
| 4572 | 4616 |
| 4573 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 4617 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
| 4574 LOperand* input = instr->value(); | 4618 LOperand* input = instr->value(); |
| 4575 ASSERT(input->IsRegister() || input->IsStackSlot()); | 4619 ASSERT(input->IsRegister() || input->IsStackSlot()); |
| 4576 LOperand* output = instr->result(); | 4620 LOperand* output = instr->result(); |
| 4577 ASSERT(output->IsDoubleRegister()); | 4621 ASSERT(output->IsDoubleRegister()); |
| 4578 FPURegister single_scratch = double_scratch0().low(); | 4622 FPURegister single_scratch = double_scratch0().low(); |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4698 __ Branch(&done); | 4742 __ Branch(&done); |
| 4699 } | 4743 } |
| 4700 | 4744 |
| 4701 // Slow case: Call the runtime system to do the number allocation. | 4745 // Slow case: Call the runtime system to do the number allocation. |
| 4702 __ bind(&slow); | 4746 __ bind(&slow); |
| 4703 | 4747 |
| 4704 // TODO(3095996): Put a valid pointer value in the stack slot where the result | 4748 // TODO(3095996): Put a valid pointer value in the stack slot where the result |
| 4705 // register is stored, as this register is in the pointer map, but contains an | 4749 // register is stored, as this register is in the pointer map, but contains an |
| 4706 // integer value. | 4750 // integer value. |
| 4707 __ StoreToSafepointRegisterSlot(zero_reg, dst); | 4751 __ StoreToSafepointRegisterSlot(zero_reg, dst); |
| 4708 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4752 // NumberTagI and NumberTagD use the context from the frame, rather than |
| 4753 // the environment's HContext or HInlinedContext value. |
| 4754 // They only call Runtime::kAllocateHeapNumber. |
| 4755 // The corresponding HChange instructions are added in a phase that does |
| 4756 // not have easy access to the local context. |
| 4757 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 4758 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4759 RecordSafepointWithRegisters( |
| 4760 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4709 __ Move(dst, v0); | 4761 __ Move(dst, v0); |
| 4710 __ Subu(dst, dst, kHeapObjectTag); | 4762 __ Subu(dst, dst, kHeapObjectTag); |
| 4711 | 4763 |
| 4712 // Done. Put the value in dbl_scratch into the value of the allocated heap | 4764 // Done. Put the value in dbl_scratch into the value of the allocated heap |
| 4713 // number. | 4765 // number. |
| 4714 __ bind(&done); | 4766 __ bind(&done); |
| 4715 __ sdc1(dbl_scratch, MemOperand(dst, HeapNumber::kValueOffset)); | 4767 __ sdc1(dbl_scratch, MemOperand(dst, HeapNumber::kValueOffset)); |
| 4716 __ Addu(dst, dst, kHeapObjectTag); | 4768 __ Addu(dst, dst, kHeapObjectTag); |
| 4717 __ StoreToSafepointRegisterSlot(dst, dst); | 4769 __ StoreToSafepointRegisterSlot(dst, dst); |
| 4718 } | 4770 } |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4754 | 4806 |
| 4755 | 4807 |
| 4756 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4808 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 4757 // TODO(3095996): Get rid of this. For now, we need to make the | 4809 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4758 // result register contain a valid pointer because it is already | 4810 // result register contain a valid pointer because it is already |
| 4759 // contained in the register pointer map. | 4811 // contained in the register pointer map. |
| 4760 Register reg = ToRegister(instr->result()); | 4812 Register reg = ToRegister(instr->result()); |
| 4761 __ mov(reg, zero_reg); | 4813 __ mov(reg, zero_reg); |
| 4762 | 4814 |
| 4763 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4815 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 4764 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4816 // NumberTagI and NumberTagD use the context from the frame, rather than |
| 4817 // the environment's HContext or HInlinedContext value. |
| 4818 // They only call Runtime::kAllocateHeapNumber. |
| 4819 // The corresponding HChange instructions are added in a phase that does |
| 4820 // not have easy access to the local context. |
| 4821 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 4822 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4823 RecordSafepointWithRegisters( |
| 4824 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4765 __ Subu(v0, v0, kHeapObjectTag); | 4825 __ Subu(v0, v0, kHeapObjectTag); |
| 4766 __ StoreToSafepointRegisterSlot(v0, reg); | 4826 __ StoreToSafepointRegisterSlot(v0, reg); |
| 4767 } | 4827 } |
| 4768 | 4828 |
| 4769 | 4829 |
| 4770 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4830 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 4771 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 4831 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| 4772 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value())); | 4832 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value())); |
| 4773 } | 4833 } |
| 4774 | 4834 |
| (...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5099 DeoptimizeIf(ne, instr->environment(), reg, | 5159 DeoptimizeIf(ne, instr->environment(), reg, |
| 5100 Operand(object)); | 5160 Operand(object)); |
| 5101 } | 5161 } |
| 5102 } | 5162 } |
| 5103 | 5163 |
| 5104 | 5164 |
| 5105 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5165 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
| 5106 { | 5166 { |
| 5107 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5167 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5108 __ push(object); | 5168 __ push(object); |
| 5109 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); | 5169 __ mov(cp, zero_reg); |
| 5170 __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance); |
| 5171 RecordSafepointWithRegisters( |
| 5172 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 5110 __ StoreToSafepointRegisterSlot(v0, scratch0()); | 5173 __ StoreToSafepointRegisterSlot(v0, scratch0()); |
| 5111 } | 5174 } |
| 5112 __ And(at, scratch0(), Operand(kSmiTagMask)); | 5175 __ And(at, scratch0(), Operand(kSmiTagMask)); |
| 5113 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg)); | 5176 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg)); |
| 5114 } | 5177 } |
| 5115 | 5178 |
| 5116 | 5179 |
| 5117 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5180 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 5118 class DeferredCheckMaps V8_FINAL : public LDeferredCode { | 5181 class DeferredCheckMaps V8_FINAL : public LDeferredCode { |
| 5119 public: | 5182 public: |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5297 __ SmiTag(size); | 5360 __ SmiTag(size); |
| 5298 __ push(size); | 5361 __ push(size); |
| 5299 } else { | 5362 } else { |
| 5300 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5363 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5301 __ Push(Smi::FromInt(size)); | 5364 __ Push(Smi::FromInt(size)); |
| 5302 } | 5365 } |
| 5303 | 5366 |
| 5304 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5367 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
| 5305 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5368 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
| 5306 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5369 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5307 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); | 5370 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr, |
| 5371 instr->context()); |
| 5308 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5372 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
| 5309 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5373 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5310 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); | 5374 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr, |
| 5375 instr->context()); |
| 5311 } else { | 5376 } else { |
| 5312 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); | 5377 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr, |
| 5378 instr->context()); |
| 5313 } | 5379 } |
| 5314 __ StoreToSafepointRegisterSlot(v0, result); | 5380 __ StoreToSafepointRegisterSlot(v0, result); |
| 5315 } | 5381 } |
| 5316 | 5382 |
| 5317 | 5383 |
| 5318 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5384 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
| 5319 ASSERT(ToRegister(instr->value()).is(a0)); | 5385 ASSERT(ToRegister(instr->value()).is(a0)); |
| 5320 ASSERT(ToRegister(instr->result()).is(v0)); | 5386 ASSERT(ToRegister(instr->result()).is(v0)); |
| 5321 __ push(a0); | 5387 __ push(a0); |
| 5322 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5388 CallRuntime(Runtime::kToFastProperties, 1, instr); |
| 5323 } | 5389 } |
| 5324 | 5390 |
| 5325 | 5391 |
| 5326 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5392 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5393 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5327 Label materialized; | 5394 Label materialized; |
| 5328 // Registers will be used as follows: | 5395 // Registers will be used as follows: |
| 5329 // t3 = literals array. | 5396 // t3 = literals array. |
| 5330 // a1 = regexp literal. | 5397 // a1 = regexp literal. |
| 5331 // a0 = regexp literal clone. | 5398 // a0 = regexp literal clone. |
| 5332 // a2 and t0-t2 are used as temporaries. | 5399 // a2 and t0-t2 are used as temporaries. |
| 5333 int literal_offset = | 5400 int literal_offset = |
| 5334 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5401 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
| 5335 __ LoadHeapObject(t3, instr->hydrogen()->literals()); | 5402 __ LoadHeapObject(t3, instr->hydrogen()->literals()); |
| 5336 __ lw(a1, FieldMemOperand(t3, literal_offset)); | 5403 __ lw(a1, FieldMemOperand(t3, literal_offset)); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5369 __ sw(a2, FieldMemOperand(v0, i + kPointerSize)); | 5436 __ sw(a2, FieldMemOperand(v0, i + kPointerSize)); |
| 5370 } | 5437 } |
| 5371 if ((size % (2 * kPointerSize)) != 0) { | 5438 if ((size % (2 * kPointerSize)) != 0) { |
| 5372 __ lw(a3, FieldMemOperand(a1, size - kPointerSize)); | 5439 __ lw(a3, FieldMemOperand(a1, size - kPointerSize)); |
| 5373 __ sw(a3, FieldMemOperand(v0, size - kPointerSize)); | 5440 __ sw(a3, FieldMemOperand(v0, size - kPointerSize)); |
| 5374 } | 5441 } |
| 5375 } | 5442 } |
| 5376 | 5443 |
| 5377 | 5444 |
| 5378 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5445 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5446 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5379 // Use the fast case closure allocation code that allocates in new | 5447 // Use the fast case closure allocation code that allocates in new |
| 5380 // space for nested functions that don't need literals cloning. | 5448 // space for nested functions that don't need literals cloning. |
| 5381 bool pretenure = instr->hydrogen()->pretenure(); | 5449 bool pretenure = instr->hydrogen()->pretenure(); |
| 5382 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5450 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 5383 FastNewClosureStub stub(instr->hydrogen()->language_mode(), | 5451 FastNewClosureStub stub(instr->hydrogen()->language_mode(), |
| 5384 instr->hydrogen()->is_generator()); | 5452 instr->hydrogen()->is_generator()); |
| 5385 __ li(a2, Operand(instr->hydrogen()->shared_info())); | 5453 __ li(a2, Operand(instr->hydrogen()->shared_info())); |
| 5386 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5454 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5387 } else { | 5455 } else { |
| 5388 __ li(a2, Operand(instr->hydrogen()->shared_info())); | 5456 __ li(a2, Operand(instr->hydrogen()->shared_info())); |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5600 } | 5668 } |
| 5601 | 5669 |
| 5602 | 5670 |
| 5603 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5671 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5604 // Nothing to see here, move on! | 5672 // Nothing to see here, move on! |
| 5605 } | 5673 } |
| 5606 | 5674 |
| 5607 | 5675 |
| 5608 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5676 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5609 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5677 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5678 LoadContextFromDeferred(instr->context()); |
| 5610 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5679 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 5611 RecordSafepointWithLazyDeopt( | 5680 RecordSafepointWithLazyDeopt( |
| 5612 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 5681 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 5613 ASSERT(instr->HasEnvironment()); | 5682 ASSERT(instr->HasEnvironment()); |
| 5614 LEnvironment* env = instr->environment(); | 5683 LEnvironment* env = instr->environment(); |
| 5615 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5684 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5616 } | 5685 } |
| 5617 | 5686 |
| 5618 | 5687 |
| 5619 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 5688 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 5631 | 5700 |
| 5632 ASSERT(instr->HasEnvironment()); | 5701 ASSERT(instr->HasEnvironment()); |
| 5633 LEnvironment* env = instr->environment(); | 5702 LEnvironment* env = instr->environment(); |
| 5634 // There is no LLazyBailout instruction for stack-checks. We have to | 5703 // There is no LLazyBailout instruction for stack-checks. We have to |
| 5635 // prepare for lazy deoptimization explicitly here. | 5704 // prepare for lazy deoptimization explicitly here. |
| 5636 if (instr->hydrogen()->is_function_entry()) { | 5705 if (instr->hydrogen()->is_function_entry()) { |
| 5637 // Perform stack overflow check. | 5706 // Perform stack overflow check. |
| 5638 Label done; | 5707 Label done; |
| 5639 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5708 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5640 __ Branch(&done, hs, sp, Operand(at)); | 5709 __ Branch(&done, hs, sp, Operand(at)); |
| 5710 ASSERT(instr->context()->IsRegister()); |
| 5711 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5641 CallCode(isolate()->builtins()->StackCheck(), | 5712 CallCode(isolate()->builtins()->StackCheck(), |
| 5642 RelocInfo::CODE_TARGET, | 5713 RelocInfo::CODE_TARGET, |
| 5643 instr); | 5714 instr); |
| 5644 EnsureSpaceForLazyDeopt(); | 5715 EnsureSpaceForLazyDeopt(); |
| 5645 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5716 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5646 __ bind(&done); | 5717 __ bind(&done); |
| 5647 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5718 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5648 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5719 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5649 } else { | 5720 } else { |
| 5650 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5721 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5768 __ Subu(scratch, result, scratch); | 5839 __ Subu(scratch, result, scratch); |
| 5769 __ lw(result, FieldMemOperand(scratch, | 5840 __ lw(result, FieldMemOperand(scratch, |
| 5770 FixedArray::kHeaderSize - kPointerSize)); | 5841 FixedArray::kHeaderSize - kPointerSize)); |
| 5771 __ bind(&done); | 5842 __ bind(&done); |
| 5772 } | 5843 } |
| 5773 | 5844 |
| 5774 | 5845 |
| 5775 #undef __ | 5846 #undef __ |
| 5776 | 5847 |
| 5777 } } // namespace v8::internal | 5848 } } // namespace v8::internal |
| OLD | NEW |