| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 744 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 755 int deoptimization_index = deoptimizations_.length(); | 755 int deoptimization_index = deoptimizations_.length(); |
| 756 int pc_offset = masm()->pc_offset(); | 756 int pc_offset = masm()->pc_offset(); |
| 757 environment->Register(deoptimization_index, | 757 environment->Register(deoptimization_index, |
| 758 translation.index(), | 758 translation.index(), |
| 759 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 759 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
| 760 deoptimizations_.Add(environment, zone()); | 760 deoptimizations_.Add(environment, zone()); |
| 761 } | 761 } |
| 762 } | 762 } |
| 763 | 763 |
| 764 | 764 |
| 765 void LCodeGen::DeoptimizeIf(Condition cc, | 765 void LCodeGen::DeoptimizeIf(Condition condition, |
| 766 LEnvironment* environment, | 766 LEnvironment* environment, |
| 767 Deoptimizer::BailoutType bailout_type, | 767 Deoptimizer::BailoutType bailout_type, |
| 768 Register src1, | 768 Register src1, |
| 769 const Operand& src2) { | 769 const Operand& src2) { |
| 770 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 770 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 771 ASSERT(environment->HasBeenRegistered()); | 771 ASSERT(environment->HasBeenRegistered()); |
| 772 int id = environment->deoptimization_index(); | 772 int id = environment->deoptimization_index(); |
| 773 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 773 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 774 Address entry = | 774 Address entry = |
| 775 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 775 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 776 if (entry == NULL) { | 776 if (entry == NULL) { |
| 777 Abort(kBailoutWasNotPrepared); | 777 Abort(kBailoutWasNotPrepared); |
| 778 return; | 778 return; |
| 779 } | 779 } |
| 780 | 780 |
| 781 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 781 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. |
| 782 if (FLAG_deopt_every_n_times == 1 && | 782 if (FLAG_deopt_every_n_times == 1 && |
| 783 !info()->IsStub() && | 783 !info()->IsStub() && |
| 784 info()->opt_count() == id) { | 784 info()->opt_count() == id) { |
| 785 ASSERT(frame_is_built_); | 785 ASSERT(frame_is_built_); |
| 786 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 786 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 787 return; | 787 return; |
| 788 } | 788 } |
| 789 | 789 |
| 790 if (info()->ShouldTrapOnDeopt()) { | 790 if (info()->ShouldTrapOnDeopt()) { |
| 791 Label skip; | 791 Label skip; |
| 792 if (cc != al) { | 792 if (condition != al) { |
| 793 __ Branch(&skip, NegateCondition(cc), src1, src2); | 793 __ Branch(&skip, NegateCondition(condition), src1, src2); |
| 794 } | 794 } |
| 795 __ stop("trap_on_deopt"); | 795 __ stop("trap_on_deopt"); |
| 796 __ bind(&skip); | 796 __ bind(&skip); |
| 797 } | 797 } |
| 798 | 798 |
| 799 ASSERT(info()->IsStub() || frame_is_built_); | 799 ASSERT(info()->IsStub() || frame_is_built_); |
| 800 if (cc == al && frame_is_built_) { | 800 if (condition == al && frame_is_built_) { |
| 801 __ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); | 801 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); |
| 802 } else { | 802 } else { |
| 803 // We often have several deopts to the same entry, reuse the last | 803 // We often have several deopts to the same entry, reuse the last |
| 804 // jump entry if this is the case. | 804 // jump entry if this is the case. |
| 805 if (deopt_jump_table_.is_empty() || | 805 if (deopt_jump_table_.is_empty() || |
| 806 (deopt_jump_table_.last().address != entry) || | 806 (deopt_jump_table_.last().address != entry) || |
| 807 (deopt_jump_table_.last().bailout_type != bailout_type) || | 807 (deopt_jump_table_.last().bailout_type != bailout_type) || |
| 808 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 808 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
| 809 Deoptimizer::JumpTableEntry table_entry(entry, | 809 Deoptimizer::JumpTableEntry table_entry(entry, |
| 810 bailout_type, | 810 bailout_type, |
| 811 !frame_is_built_); | 811 !frame_is_built_); |
| 812 deopt_jump_table_.Add(table_entry, zone()); | 812 deopt_jump_table_.Add(table_entry, zone()); |
| 813 } | 813 } |
| 814 __ Branch(&deopt_jump_table_.last().label, cc, src1, src2); | 814 __ Branch(&deopt_jump_table_.last().label, condition, src1, src2); |
| 815 } | 815 } |
| 816 } | 816 } |
| 817 | 817 |
| 818 | 818 |
| 819 void LCodeGen::DeoptimizeIf(Condition cc, | 819 void LCodeGen::DeoptimizeIf(Condition condition, |
| 820 LEnvironment* environment, | 820 LEnvironment* environment, |
| 821 Register src1, | 821 Register src1, |
| 822 const Operand& src2) { | 822 const Operand& src2) { |
| 823 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 823 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 824 ? Deoptimizer::LAZY | 824 ? Deoptimizer::LAZY |
| 825 : Deoptimizer::EAGER; | 825 : Deoptimizer::EAGER; |
| 826 DeoptimizeIf(cc, environment, bailout_type, src1, src2); | 826 DeoptimizeIf(condition, environment, bailout_type, src1, src2); |
| 827 } | 827 } |
| 828 | 828 |
| 829 | 829 |
| 830 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 830 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
| 831 ZoneList<Handle<Map> > maps(1, zone()); | 831 ZoneList<Handle<Map> > maps(1, zone()); |
| 832 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 832 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 833 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 833 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
| 834 RelocInfo::Mode mode = it.rinfo()->rmode(); | 834 RelocInfo::Mode mode = it.rinfo()->rmode(); |
| 835 if (mode == RelocInfo::EMBEDDED_OBJECT && | 835 if (mode == RelocInfo::EMBEDDED_OBJECT && |
| 836 it.rinfo()->target_object()->IsMap()) { | 836 it.rinfo()->target_object()->IsMap()) { |
| (...skipping 1149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1986 | 1986 |
| 1987 int LCodeGen::GetNextEmittedBlock() const { | 1987 int LCodeGen::GetNextEmittedBlock() const { |
| 1988 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { | 1988 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { |
| 1989 if (!chunk_->GetLabel(i)->HasReplacement()) return i; | 1989 if (!chunk_->GetLabel(i)->HasReplacement()) return i; |
| 1990 } | 1990 } |
| 1991 return -1; | 1991 return -1; |
| 1992 } | 1992 } |
| 1993 | 1993 |
| 1994 template<class InstrType> | 1994 template<class InstrType> |
| 1995 void LCodeGen::EmitBranch(InstrType instr, | 1995 void LCodeGen::EmitBranch(InstrType instr, |
| 1996 Condition cc, Register src1, const Operand& src2) { | 1996 Condition condition, |
| 1997 Register src1, |
| 1998 const Operand& src2) { |
| 1997 int left_block = instr->TrueDestination(chunk_); | 1999 int left_block = instr->TrueDestination(chunk_); |
| 1998 int right_block = instr->FalseDestination(chunk_); | 2000 int right_block = instr->FalseDestination(chunk_); |
| 1999 | 2001 |
| 2000 int next_block = GetNextEmittedBlock(); | 2002 int next_block = GetNextEmittedBlock(); |
| 2001 if (right_block == left_block || cc == al) { | 2003 if (right_block == left_block || condition == al) { |
| 2002 EmitGoto(left_block); | 2004 EmitGoto(left_block); |
| 2003 } else if (left_block == next_block) { | 2005 } else if (left_block == next_block) { |
| 2004 __ Branch(chunk_->GetAssemblyLabel(right_block), | 2006 __ Branch(chunk_->GetAssemblyLabel(right_block), |
| 2005 NegateCondition(cc), src1, src2); | 2007 NegateCondition(condition), src1, src2); |
| 2006 } else if (right_block == next_block) { | 2008 } else if (right_block == next_block) { |
| 2007 __ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2); | 2009 __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2); |
| 2008 } else { | 2010 } else { |
| 2009 __ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2); | 2011 __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2); |
| 2010 __ Branch(chunk_->GetAssemblyLabel(right_block)); | 2012 __ Branch(chunk_->GetAssemblyLabel(right_block)); |
| 2011 } | 2013 } |
| 2012 } | 2014 } |
| 2013 | 2015 |
| 2014 | 2016 |
| 2015 template<class InstrType> | 2017 template<class InstrType> |
| 2016 void LCodeGen::EmitBranchF(InstrType instr, | 2018 void LCodeGen::EmitBranchF(InstrType instr, |
| 2017 Condition cc, FPURegister src1, FPURegister src2) { | 2019 Condition condition, |
| 2020 FPURegister src1, |
| 2021 FPURegister src2) { |
| 2018 int right_block = instr->FalseDestination(chunk_); | 2022 int right_block = instr->FalseDestination(chunk_); |
| 2019 int left_block = instr->TrueDestination(chunk_); | 2023 int left_block = instr->TrueDestination(chunk_); |
| 2020 | 2024 |
| 2021 int next_block = GetNextEmittedBlock(); | 2025 int next_block = GetNextEmittedBlock(); |
| 2022 if (right_block == left_block) { | 2026 if (right_block == left_block) { |
| 2023 EmitGoto(left_block); | 2027 EmitGoto(left_block); |
| 2024 } else if (left_block == next_block) { | 2028 } else if (left_block == next_block) { |
| 2025 __ BranchF(chunk_->GetAssemblyLabel(right_block), NULL, | 2029 __ BranchF(chunk_->GetAssemblyLabel(right_block), NULL, |
| 2026 NegateCondition(cc), src1, src2); | 2030 NegateCondition(condition), src1, src2); |
| 2027 } else if (right_block == next_block) { | 2031 } else if (right_block == next_block) { |
| 2028 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2); | 2032 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, |
| 2033 condition, src1, src2); |
| 2029 } else { | 2034 } else { |
| 2030 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2); | 2035 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, |
| 2036 condition, src1, src2); |
| 2031 __ Branch(chunk_->GetAssemblyLabel(right_block)); | 2037 __ Branch(chunk_->GetAssemblyLabel(right_block)); |
| 2032 } | 2038 } |
| 2033 } | 2039 } |
| 2034 | 2040 |
| 2035 | 2041 |
| 2042 template<class InstrType> |
| 2043 void LCodeGen::EmitFalseBranchF(InstrType instr, |
| 2044 Condition condition, |
| 2045 FPURegister src1, |
| 2046 FPURegister src2) { |
| 2047 int false_block = instr->FalseDestination(chunk_); |
| 2048 __ BranchF(chunk_->GetAssemblyLabel(false_block), NULL, |
| 2049 condition, src1, src2); |
| 2050 } |
| 2051 |
| 2052 |
| 2036 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { | 2053 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { |
| 2037 __ stop("LDebugBreak"); | 2054 __ stop("LDebugBreak"); |
| 2038 } | 2055 } |
| 2039 | 2056 |
| 2040 | 2057 |
| 2041 void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { | 2058 void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { |
| 2042 Representation r = instr->hydrogen()->value()->representation(); | 2059 Representation r = instr->hydrogen()->value()->representation(); |
| 2043 if (r.IsSmiOrInteger32() || r.IsDouble()) { | 2060 if (r.IsSmiOrInteger32() || r.IsDouble()) { |
| 2044 EmitBranch(instr, al, zero_reg, Operand(zero_reg)); | 2061 EmitBranch(instr, al, zero_reg, Operand(zero_reg)); |
| 2045 } else { | 2062 } else { |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2286 | 2303 |
| 2287 | 2304 |
| 2288 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { | 2305 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { |
| 2289 Register left = ToRegister(instr->left()); | 2306 Register left = ToRegister(instr->left()); |
| 2290 Register right = ToRegister(instr->right()); | 2307 Register right = ToRegister(instr->right()); |
| 2291 | 2308 |
| 2292 EmitBranch(instr, eq, left, Operand(right)); | 2309 EmitBranch(instr, eq, left, Operand(right)); |
| 2293 } | 2310 } |
| 2294 | 2311 |
| 2295 | 2312 |
| 2313 void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) { |
| 2314 if (instr->hydrogen()->representation().IsTagged()) { |
| 2315 Register input_reg = ToRegister(instr->object()); |
| 2316 __ li(at, Operand(factory()->the_hole_value())); |
| 2317 EmitBranch(instr, eq, input_reg, Operand(at)); |
| 2318 return; |
| 2319 } |
| 2320 |
| 2321 DoubleRegister input_reg = ToDoubleRegister(instr->object()); |
| 2322 EmitFalseBranchF(instr, eq, input_reg, input_reg); |
| 2323 |
| 2324 Register scratch = scratch0(); |
| 2325 __ FmoveHigh(scratch, input_reg); |
| 2326 EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32)); |
| 2327 } |
| 2328 |
| 2329 |
| 2296 Condition LCodeGen::EmitIsObject(Register input, | 2330 Condition LCodeGen::EmitIsObject(Register input, |
| 2297 Register temp1, | 2331 Register temp1, |
| 2298 Register temp2, | 2332 Register temp2, |
| 2299 Label* is_not_object, | 2333 Label* is_not_object, |
| 2300 Label* is_object) { | 2334 Label* is_object) { |
| 2301 __ JumpIfSmi(input, is_not_object); | 2335 __ JumpIfSmi(input, is_not_object); |
| 2302 | 2336 |
| 2303 __ LoadRoot(temp2, Heap::kNullValueRootIndex); | 2337 __ LoadRoot(temp2, Heap::kNullValueRootIndex); |
| 2304 __ Branch(is_object, eq, input, Operand(temp2)); | 2338 __ Branch(is_object, eq, input, Operand(temp2)); |
| 2305 | 2339 |
| (...skipping 1836 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4142 | 4176 |
| 4143 // Name is always in a2. | 4177 // Name is always in a2. |
| 4144 __ li(a2, Operand(instr->name())); | 4178 __ li(a2, Operand(instr->name())); |
| 4145 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4179 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 4146 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4180 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 4147 : isolate()->builtins()->StoreIC_Initialize(); | 4181 : isolate()->builtins()->StoreIC_Initialize(); |
| 4148 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4182 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 4149 } | 4183 } |
| 4150 | 4184 |
| 4151 | 4185 |
| 4152 void LCodeGen::ApplyCheckIf(Condition cc, | 4186 void LCodeGen::ApplyCheckIf(Condition condition, |
| 4153 LBoundsCheck* check, | 4187 LBoundsCheck* check, |
| 4154 Register src1, | 4188 Register src1, |
| 4155 const Operand& src2) { | 4189 const Operand& src2) { |
| 4156 if (FLAG_debug_code && check->hydrogen()->skip_check()) { | 4190 if (FLAG_debug_code && check->hydrogen()->skip_check()) { |
| 4157 Label done; | 4191 Label done; |
| 4158 __ Branch(&done, NegateCondition(cc), src1, src2); | 4192 __ Branch(&done, NegateCondition(condition), src1, src2); |
| 4159 __ stop("eliminated bounds check failed"); | 4193 __ stop("eliminated bounds check failed"); |
| 4160 __ bind(&done); | 4194 __ bind(&done); |
| 4161 } else { | 4195 } else { |
| 4162 DeoptimizeIf(cc, check->environment(), src1, src2); | 4196 DeoptimizeIf(condition, check->environment(), src1, src2); |
| 4163 } | 4197 } |
| 4164 } | 4198 } |
| 4165 | 4199 |
| 4166 | 4200 |
| 4167 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 4201 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 4168 if (instr->hydrogen()->skip_check()) return; | 4202 if (instr->hydrogen()->skip_check()) return; |
| 4169 | 4203 |
| 4170 Condition condition = instr->hydrogen()->allow_equality() ? hi : hs; | 4204 Condition condition = instr->hydrogen()->allow_equality() ? hi : hs; |
| 4171 if (instr->index()->IsConstantOperand()) { | 4205 if (instr->index()->IsConstantOperand()) { |
| 4172 int constant_index = | 4206 int constant_index = |
| (...skipping 522 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4695 private: | 4729 private: |
| 4696 LNumberTagD* instr_; | 4730 LNumberTagD* instr_; |
| 4697 }; | 4731 }; |
| 4698 | 4732 |
| 4699 DoubleRegister input_reg = ToDoubleRegister(instr->value()); | 4733 DoubleRegister input_reg = ToDoubleRegister(instr->value()); |
| 4700 Register scratch = scratch0(); | 4734 Register scratch = scratch0(); |
| 4701 Register reg = ToRegister(instr->result()); | 4735 Register reg = ToRegister(instr->result()); |
| 4702 Register temp1 = ToRegister(instr->temp()); | 4736 Register temp1 = ToRegister(instr->temp()); |
| 4703 Register temp2 = ToRegister(instr->temp2()); | 4737 Register temp2 = ToRegister(instr->temp2()); |
| 4704 | 4738 |
| 4705 bool convert_hole = false; | |
| 4706 HValue* change_input = instr->hydrogen()->value(); | |
| 4707 if (change_input->IsLoadKeyed()) { | |
| 4708 HLoadKeyed* load = HLoadKeyed::cast(change_input); | |
| 4709 convert_hole = load->UsesMustHandleHole(); | |
| 4710 } | |
| 4711 | |
| 4712 Label no_special_nan_handling; | |
| 4713 Label done; | |
| 4714 if (convert_hole) { | |
| 4715 DoubleRegister input_reg = ToDoubleRegister(instr->value()); | |
| 4716 __ BranchF(&no_special_nan_handling, NULL, eq, input_reg, input_reg); | |
| 4717 __ Move(reg, scratch0(), input_reg); | |
| 4718 Label canonicalize; | |
| 4719 __ Branch(&canonicalize, ne, scratch0(), Operand(kHoleNanUpper32)); | |
| 4720 __ li(reg, factory()->undefined_value()); | |
| 4721 __ Branch(&done); | |
| 4722 __ bind(&canonicalize); | |
| 4723 __ Move(input_reg, | |
| 4724 FixedDoubleArray::canonical_not_the_hole_nan_as_double()); | |
| 4725 } | |
| 4726 | |
| 4727 __ bind(&no_special_nan_handling); | |
| 4728 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); | 4739 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); |
| 4729 if (FLAG_inline_new) { | 4740 if (FLAG_inline_new) { |
| 4730 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex); | 4741 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex); |
| 4731 // We want the untagged address first for performance | 4742 // We want the untagged address first for performance |
| 4732 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(), | 4743 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(), |
| 4733 DONT_TAG_RESULT); | 4744 DONT_TAG_RESULT); |
| 4734 } else { | 4745 } else { |
| 4735 __ Branch(deferred->entry()); | 4746 __ Branch(deferred->entry()); |
| 4736 } | 4747 } |
| 4737 __ bind(deferred->exit()); | 4748 __ bind(deferred->exit()); |
| 4738 __ sdc1(input_reg, MemOperand(reg, HeapNumber::kValueOffset)); | 4749 __ sdc1(input_reg, MemOperand(reg, HeapNumber::kValueOffset)); |
| 4739 // Now that we have finished with the object's real address tag it | 4750 // Now that we have finished with the object's real address tag it |
| 4740 __ Addu(reg, reg, kHeapObjectTag); | 4751 __ Addu(reg, reg, kHeapObjectTag); |
| 4741 __ bind(&done); | |
| 4742 } | 4752 } |
| 4743 | 4753 |
| 4744 | 4754 |
| 4745 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4755 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 4746 // TODO(3095996): Get rid of this. For now, we need to make the | 4756 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4747 // result register contain a valid pointer because it is already | 4757 // result register contain a valid pointer because it is already |
| 4748 // contained in the register pointer map. | 4758 // contained in the register pointer map. |
| 4749 Register reg = ToRegister(instr->result()); | 4759 Register reg = ToRegister(instr->result()); |
| 4750 __ mov(reg, zero_reg); | 4760 __ mov(reg, zero_reg); |
| 4751 | 4761 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 4773 __ SmiUntag(result, input); | 4783 __ SmiUntag(result, input); |
| 4774 DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg)); | 4784 DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg)); |
| 4775 } else { | 4785 } else { |
| 4776 __ SmiUntag(result, input); | 4786 __ SmiUntag(result, input); |
| 4777 } | 4787 } |
| 4778 } | 4788 } |
| 4779 | 4789 |
| 4780 | 4790 |
| 4781 void LCodeGen::EmitNumberUntagD(Register input_reg, | 4791 void LCodeGen::EmitNumberUntagD(Register input_reg, |
| 4782 DoubleRegister result_reg, | 4792 DoubleRegister result_reg, |
| 4783 bool allow_undefined_as_nan, | 4793 bool can_convert_undefined_to_nan, |
| 4784 bool deoptimize_on_minus_zero, | 4794 bool deoptimize_on_minus_zero, |
| 4785 LEnvironment* env, | 4795 LEnvironment* env, |
| 4786 NumberUntagDMode mode) { | 4796 NumberUntagDMode mode) { |
| 4787 Register scratch = scratch0(); | 4797 Register scratch = scratch0(); |
| 4788 | 4798 |
| 4789 Label load_smi, heap_number, done; | 4799 Label load_smi, heap_number, done; |
| 4790 | 4800 |
| 4791 STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE > | 4801 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
| 4792 NUMBER_CANDIDATE_IS_ANY_TAGGED); | |
| 4793 if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) { | |
| 4794 // Smi check. | 4802 // Smi check. |
| 4795 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); | 4803 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); |
| 4796 | 4804 |
| 4797 // Heap number map check. | 4805 // Heap number map check. |
| 4798 __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4806 __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
| 4799 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 4807 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 4800 if (!allow_undefined_as_nan) { | 4808 if (!can_convert_undefined_to_nan) { |
| 4801 DeoptimizeIf(ne, env, scratch, Operand(at)); | 4809 DeoptimizeIf(ne, env, scratch, Operand(at)); |
| 4802 } else { | 4810 } else { |
| 4803 Label heap_number, convert; | 4811 Label heap_number, convert; |
| 4804 __ Branch(&heap_number, eq, scratch, Operand(at)); | 4812 __ Branch(&heap_number, eq, scratch, Operand(at)); |
| 4805 | 4813 |
| 4806 // Convert undefined (and hole) to NaN. | 4814 // Convert undefined (and hole) to NaN. |
| 4807 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4815 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 4808 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) { | |
| 4809 __ Branch(&convert, eq, input_reg, Operand(at)); | |
| 4810 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
| 4811 } | |
| 4812 DeoptimizeIf(ne, env, input_reg, Operand(at)); | 4816 DeoptimizeIf(ne, env, input_reg, Operand(at)); |
| 4813 | 4817 |
| 4814 __ bind(&convert); | 4818 __ bind(&convert); |
| 4815 __ LoadRoot(at, Heap::kNanValueRootIndex); | 4819 __ LoadRoot(at, Heap::kNanValueRootIndex); |
| 4816 __ ldc1(result_reg, FieldMemOperand(at, HeapNumber::kValueOffset)); | 4820 __ ldc1(result_reg, FieldMemOperand(at, HeapNumber::kValueOffset)); |
| 4817 __ Branch(&done); | 4821 __ Branch(&done); |
| 4818 | 4822 |
| 4819 __ bind(&heap_number); | 4823 __ bind(&heap_number); |
| 4820 } | 4824 } |
| 4821 // Heap number to double register conversion. | 4825 // Heap number to double register conversion. |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4949 | 4953 |
| 4950 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { | 4954 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { |
| 4951 LOperand* input = instr->value(); | 4955 LOperand* input = instr->value(); |
| 4952 ASSERT(input->IsRegister()); | 4956 ASSERT(input->IsRegister()); |
| 4953 LOperand* result = instr->result(); | 4957 LOperand* result = instr->result(); |
| 4954 ASSERT(result->IsDoubleRegister()); | 4958 ASSERT(result->IsDoubleRegister()); |
| 4955 | 4959 |
| 4956 Register input_reg = ToRegister(input); | 4960 Register input_reg = ToRegister(input); |
| 4957 DoubleRegister result_reg = ToDoubleRegister(result); | 4961 DoubleRegister result_reg = ToDoubleRegister(result); |
| 4958 | 4962 |
| 4959 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED; | |
| 4960 HValue* value = instr->hydrogen()->value(); | 4963 HValue* value = instr->hydrogen()->value(); |
| 4961 if (value->type().IsSmi()) { | 4964 NumberUntagDMode mode = value->representation().IsSmi() |
| 4962 mode = NUMBER_CANDIDATE_IS_SMI; | 4965 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; |
| 4963 } else if (value->IsLoadKeyed()) { | |
| 4964 HLoadKeyed* load = HLoadKeyed::cast(value); | |
| 4965 if (load->UsesMustHandleHole()) { | |
| 4966 if (load->hole_mode() == ALLOW_RETURN_HOLE) { | |
| 4967 mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE; | |
| 4968 } | |
| 4969 } | |
| 4970 } | |
| 4971 | 4966 |
| 4972 EmitNumberUntagD(input_reg, result_reg, | 4967 EmitNumberUntagD(input_reg, result_reg, |
| 4973 instr->hydrogen()->allow_undefined_as_nan(), | 4968 instr->hydrogen()->can_convert_undefined_to_nan(), |
| 4974 instr->hydrogen()->deoptimize_on_minus_zero(), | 4969 instr->hydrogen()->deoptimize_on_minus_zero(), |
| 4975 instr->environment(), | 4970 instr->environment(), |
| 4976 mode); | 4971 mode); |
| 4977 } | 4972 } |
| 4978 | 4973 |
| 4979 | 4974 |
| 4980 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 4975 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
| 4981 Register result_reg = ToRegister(instr->result()); | 4976 Register result_reg = ToRegister(instr->result()); |
| 4982 Register scratch1 = scratch0(); | 4977 Register scratch1 = scratch0(); |
| 4983 Register scratch2 = ToRegister(instr->temp()); | 4978 Register scratch2 = ToRegister(instr->temp()); |
| (...skipping 814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5798 __ Subu(scratch, result, scratch); | 5793 __ Subu(scratch, result, scratch); |
| 5799 __ lw(result, FieldMemOperand(scratch, | 5794 __ lw(result, FieldMemOperand(scratch, |
| 5800 FixedArray::kHeaderSize - kPointerSize)); | 5795 FixedArray::kHeaderSize - kPointerSize)); |
| 5801 __ bind(&done); | 5796 __ bind(&done); |
| 5802 } | 5797 } |
| 5803 | 5798 |
| 5804 | 5799 |
| 5805 #undef __ | 5800 #undef __ |
| 5806 | 5801 |
| 5807 } } // namespace v8::internal | 5802 } } // namespace v8::internal |
| OLD | NEW |