| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 764 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 775 int deoptimization_index = deoptimizations_.length(); | 775 int deoptimization_index = deoptimizations_.length(); |
| 776 int pc_offset = masm()->pc_offset(); | 776 int pc_offset = masm()->pc_offset(); |
| 777 environment->Register(deoptimization_index, | 777 environment->Register(deoptimization_index, |
| 778 translation.index(), | 778 translation.index(), |
| 779 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 779 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
| 780 deoptimizations_.Add(environment, zone()); | 780 deoptimizations_.Add(environment, zone()); |
| 781 } | 781 } |
| 782 } | 782 } |
| 783 | 783 |
| 784 | 784 |
| 785 void LCodeGen::DeoptimizeIf(Condition cc, | 785 void LCodeGen::DeoptimizeIf(Condition condition, |
| 786 LEnvironment* environment, | 786 LEnvironment* environment, |
| 787 Deoptimizer::BailoutType bailout_type) { | 787 Deoptimizer::BailoutType bailout_type) { |
| 788 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 788 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 789 ASSERT(environment->HasBeenRegistered()); | 789 ASSERT(environment->HasBeenRegistered()); |
| 790 int id = environment->deoptimization_index(); | 790 int id = environment->deoptimization_index(); |
| 791 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 791 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 792 Address entry = | 792 Address entry = |
| 793 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 793 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 794 if (entry == NULL) { | 794 if (entry == NULL) { |
| 795 Abort(kBailoutWasNotPrepared); | 795 Abort(kBailoutWasNotPrepared); |
| 796 return; | 796 return; |
| 797 } | 797 } |
| 798 | 798 |
| 799 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. | 799 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM. |
| 800 if (FLAG_deopt_every_n_times == 1 && | 800 if (FLAG_deopt_every_n_times == 1 && |
| 801 !info()->IsStub() && | 801 !info()->IsStub() && |
| 802 info()->opt_count() == id) { | 802 info()->opt_count() == id) { |
| 803 ASSERT(frame_is_built_); | 803 ASSERT(frame_is_built_); |
| 804 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 804 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 805 return; | 805 return; |
| 806 } | 806 } |
| 807 | 807 |
| 808 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { | 808 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { |
| 809 __ stop("trap_on_deopt", cc); | 809 __ stop("trap_on_deopt", condition); |
| 810 } | 810 } |
| 811 | 811 |
| 812 ASSERT(info()->IsStub() || frame_is_built_); | 812 ASSERT(info()->IsStub() || frame_is_built_); |
| 813 if (cc == al && frame_is_built_) { | 813 if (condition == al && frame_is_built_) { |
| 814 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 814 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 815 } else { | 815 } else { |
| 816 // We often have several deopts to the same entry, reuse the last | 816 // We often have several deopts to the same entry, reuse the last |
| 817 // jump entry if this is the case. | 817 // jump entry if this is the case. |
| 818 if (deopt_jump_table_.is_empty() || | 818 if (deopt_jump_table_.is_empty() || |
| 819 (deopt_jump_table_.last().address != entry) || | 819 (deopt_jump_table_.last().address != entry) || |
| 820 (deopt_jump_table_.last().bailout_type != bailout_type) || | 820 (deopt_jump_table_.last().bailout_type != bailout_type) || |
| 821 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 821 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
| 822 Deoptimizer::JumpTableEntry table_entry(entry, | 822 Deoptimizer::JumpTableEntry table_entry(entry, |
| 823 bailout_type, | 823 bailout_type, |
| 824 !frame_is_built_); | 824 !frame_is_built_); |
| 825 deopt_jump_table_.Add(table_entry, zone()); | 825 deopt_jump_table_.Add(table_entry, zone()); |
| 826 } | 826 } |
| 827 __ b(cc, &deopt_jump_table_.last().label); | 827 __ b(condition, &deopt_jump_table_.last().label); |
| 828 } | 828 } |
| 829 } | 829 } |
| 830 | 830 |
| 831 | 831 |
| 832 void LCodeGen::DeoptimizeIf(Condition cc, | 832 void LCodeGen::DeoptimizeIf(Condition condition, |
| 833 LEnvironment* environment) { | 833 LEnvironment* environment) { |
| 834 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 834 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 835 ? Deoptimizer::LAZY | 835 ? Deoptimizer::LAZY |
| 836 : Deoptimizer::EAGER; | 836 : Deoptimizer::EAGER; |
| 837 DeoptimizeIf(cc, environment, bailout_type); | 837 DeoptimizeIf(condition, environment, bailout_type); |
| 838 } | 838 } |
| 839 | 839 |
| 840 | 840 |
| 841 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 841 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
| 842 ZoneList<Handle<Map> > maps(1, zone()); | 842 ZoneList<Handle<Map> > maps(1, zone()); |
| 843 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 843 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 844 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 844 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
| 845 RelocInfo::Mode mode = it.rinfo()->rmode(); | 845 RelocInfo::Mode mode = it.rinfo()->rmode(); |
| 846 if (mode == RelocInfo::EMBEDDED_OBJECT && | 846 if (mode == RelocInfo::EMBEDDED_OBJECT && |
| 847 it.rinfo()->target_object()->IsMap()) { | 847 it.rinfo()->target_object()->IsMap()) { |
| (...skipping 1283 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2131 | 2131 |
| 2132 | 2132 |
| 2133 int LCodeGen::GetNextEmittedBlock() const { | 2133 int LCodeGen::GetNextEmittedBlock() const { |
| 2134 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { | 2134 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { |
| 2135 if (!chunk_->GetLabel(i)->HasReplacement()) return i; | 2135 if (!chunk_->GetLabel(i)->HasReplacement()) return i; |
| 2136 } | 2136 } |
| 2137 return -1; | 2137 return -1; |
| 2138 } | 2138 } |
| 2139 | 2139 |
| 2140 template<class InstrType> | 2140 template<class InstrType> |
| 2141 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { | 2141 void LCodeGen::EmitBranch(InstrType instr, Condition condition) { |
| 2142 int left_block = instr->TrueDestination(chunk_); | 2142 int left_block = instr->TrueDestination(chunk_); |
| 2143 int right_block = instr->FalseDestination(chunk_); | 2143 int right_block = instr->FalseDestination(chunk_); |
| 2144 | 2144 |
| 2145 int next_block = GetNextEmittedBlock(); | 2145 int next_block = GetNextEmittedBlock(); |
| 2146 | 2146 |
| 2147 if (right_block == left_block || cc == al) { | 2147 if (right_block == left_block || condition == al) { |
| 2148 EmitGoto(left_block); | 2148 EmitGoto(left_block); |
| 2149 } else if (left_block == next_block) { | 2149 } else if (left_block == next_block) { |
| 2150 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); | 2150 __ b(NegateCondition(condition), chunk_->GetAssemblyLabel(right_block)); |
| 2151 } else if (right_block == next_block) { | 2151 } else if (right_block == next_block) { |
| 2152 __ b(cc, chunk_->GetAssemblyLabel(left_block)); | 2152 __ b(condition, chunk_->GetAssemblyLabel(left_block)); |
| 2153 } else { | 2153 } else { |
| 2154 __ b(cc, chunk_->GetAssemblyLabel(left_block)); | 2154 __ b(condition, chunk_->GetAssemblyLabel(left_block)); |
| 2155 __ b(chunk_->GetAssemblyLabel(right_block)); | 2155 __ b(chunk_->GetAssemblyLabel(right_block)); |
| 2156 } | 2156 } |
| 2157 } | 2157 } |
| 2158 | 2158 |
| 2159 | 2159 |
| 2160 template<class InstrType> |
| 2161 void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition) { |
| 2162 int false_block = instr->FalseDestination(chunk_); |
| 2163 __ b(condition, chunk_->GetAssemblyLabel(false_block)); |
| 2164 } |
| 2165 |
| 2166 |
| 2160 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { | 2167 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { |
| 2161 __ stop("LBreak"); | 2168 __ stop("LBreak"); |
| 2162 } | 2169 } |
| 2163 | 2170 |
| 2164 | 2171 |
| 2165 void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { | 2172 void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { |
| 2166 Representation r = instr->hydrogen()->value()->representation(); | 2173 Representation r = instr->hydrogen()->value()->representation(); |
| 2167 if (r.IsSmiOrInteger32() || r.IsDouble()) { | 2174 if (r.IsSmiOrInteger32() || r.IsDouble()) { |
| 2168 EmitBranch(instr, al); | 2175 EmitBranch(instr, al); |
| 2169 } else { | 2176 } else { |
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2405 | 2412 |
| 2406 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { | 2413 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { |
| 2407 Register left = ToRegister(instr->left()); | 2414 Register left = ToRegister(instr->left()); |
| 2408 Register right = ToRegister(instr->right()); | 2415 Register right = ToRegister(instr->right()); |
| 2409 | 2416 |
| 2410 __ cmp(left, Operand(right)); | 2417 __ cmp(left, Operand(right)); |
| 2411 EmitBranch(instr, eq); | 2418 EmitBranch(instr, eq); |
| 2412 } | 2419 } |
| 2413 | 2420 |
| 2414 | 2421 |
| 2422 void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) { |
| 2423 if (instr->hydrogen()->representation().IsTagged()) { |
| 2424 Register input_reg = ToRegister(instr->object()); |
| 2425 __ mov(ip, Operand(factory()->the_hole_value())); |
| 2426 __ cmp(input_reg, ip); |
| 2427 EmitBranch(instr, eq); |
| 2428 return; |
| 2429 } |
| 2430 |
| 2431 DwVfpRegister input_reg = ToDoubleRegister(instr->object()); |
| 2432 __ VFPCompareAndSetFlags(input_reg, input_reg); |
| 2433 EmitFalseBranch(instr, vc); |
| 2434 |
| 2435 Register scratch = scratch0(); |
| 2436 __ VmovHigh(scratch, input_reg); |
| 2437 __ cmp(scratch, Operand(kHoleNanUpper32)); |
| 2438 EmitBranch(instr, eq); |
| 2439 } |
| 2440 |
| 2441 |
| 2415 Condition LCodeGen::EmitIsObject(Register input, | 2442 Condition LCodeGen::EmitIsObject(Register input, |
| 2416 Register temp1, | 2443 Register temp1, |
| 2417 Label* is_not_object, | 2444 Label* is_not_object, |
| 2418 Label* is_object) { | 2445 Label* is_object) { |
| 2419 Register temp2 = scratch0(); | 2446 Register temp2 = scratch0(); |
| 2420 __ JumpIfSmi(input, is_not_object); | 2447 __ JumpIfSmi(input, is_not_object); |
| 2421 | 2448 |
| 2422 __ LoadRoot(temp2, Heap::kNullValueRootIndex); | 2449 __ LoadRoot(temp2, Heap::kNullValueRootIndex); |
| 2423 __ cmp(input, temp2); | 2450 __ cmp(input, temp2); |
| 2424 __ b(eq, is_object); | 2451 __ b(eq, is_object); |
| (...skipping 1863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4288 | 4315 |
| 4289 // Name is always in r2. | 4316 // Name is always in r2. |
| 4290 __ mov(r2, Operand(instr->name())); | 4317 __ mov(r2, Operand(instr->name())); |
| 4291 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4318 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
| 4292 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4319 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 4293 : isolate()->builtins()->StoreIC_Initialize(); | 4320 : isolate()->builtins()->StoreIC_Initialize(); |
| 4294 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); | 4321 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); |
| 4295 } | 4322 } |
| 4296 | 4323 |
| 4297 | 4324 |
| 4298 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { | 4325 void LCodeGen::ApplyCheckIf(Condition condition, LBoundsCheck* check) { |
| 4299 if (FLAG_debug_code && check->hydrogen()->skip_check()) { | 4326 if (FLAG_debug_code && check->hydrogen()->skip_check()) { |
| 4300 Label done; | 4327 Label done; |
| 4301 __ b(NegateCondition(cc), &done); | 4328 __ b(NegateCondition(condition), &done); |
| 4302 __ stop("eliminated bounds check failed"); | 4329 __ stop("eliminated bounds check failed"); |
| 4303 __ bind(&done); | 4330 __ bind(&done); |
| 4304 } else { | 4331 } else { |
| 4305 DeoptimizeIf(cc, check->environment()); | 4332 DeoptimizeIf(condition, check->environment()); |
| 4306 } | 4333 } |
| 4307 } | 4334 } |
| 4308 | 4335 |
| 4309 | 4336 |
| 4310 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 4337 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 4311 if (instr->hydrogen()->skip_check()) return; | 4338 if (instr->hydrogen()->skip_check()) return; |
| 4312 | 4339 |
| 4313 if (instr->index()->IsConstantOperand()) { | 4340 if (instr->index()->IsConstantOperand()) { |
| 4314 int constant_index = | 4341 int constant_index = |
| 4315 ToInteger32(LConstantOperand::cast(instr->index())); | 4342 ToInteger32(LConstantOperand::cast(instr->index())); |
| (...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4819 private: | 4846 private: |
| 4820 LNumberTagD* instr_; | 4847 LNumberTagD* instr_; |
| 4821 }; | 4848 }; |
| 4822 | 4849 |
| 4823 DwVfpRegister input_reg = ToDoubleRegister(instr->value()); | 4850 DwVfpRegister input_reg = ToDoubleRegister(instr->value()); |
| 4824 Register scratch = scratch0(); | 4851 Register scratch = scratch0(); |
| 4825 Register reg = ToRegister(instr->result()); | 4852 Register reg = ToRegister(instr->result()); |
| 4826 Register temp1 = ToRegister(instr->temp()); | 4853 Register temp1 = ToRegister(instr->temp()); |
| 4827 Register temp2 = ToRegister(instr->temp2()); | 4854 Register temp2 = ToRegister(instr->temp2()); |
| 4828 | 4855 |
| 4829 bool convert_hole = false; | |
| 4830 HValue* change_input = instr->hydrogen()->value(); | |
| 4831 if (change_input->IsLoadKeyed()) { | |
| 4832 HLoadKeyed* load = HLoadKeyed::cast(change_input); | |
| 4833 convert_hole = load->UsesMustHandleHole(); | |
| 4834 } | |
| 4835 | |
| 4836 Label no_special_nan_handling; | |
| 4837 Label done; | |
| 4838 if (convert_hole) { | |
| 4839 DwVfpRegister input_reg = ToDoubleRegister(instr->value()); | |
| 4840 __ VFPCompareAndSetFlags(input_reg, input_reg); | |
| 4841 __ b(vc, &no_special_nan_handling); | |
| 4842 __ VmovHigh(scratch, input_reg); | |
| 4843 __ cmp(scratch, Operand(kHoleNanUpper32)); | |
| 4844 // If not the hole NaN, force the NaN to be canonical. | |
| 4845 __ VFPCanonicalizeNaN(input_reg, ne); | |
| 4846 __ b(ne, &no_special_nan_handling); | |
| 4847 __ Move(reg, factory()->the_hole_value()); | |
| 4848 __ b(&done); | |
| 4849 } | |
| 4850 | |
| 4851 __ bind(&no_special_nan_handling); | |
| 4852 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); | 4856 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); |
| 4853 if (FLAG_inline_new) { | 4857 if (FLAG_inline_new) { |
| 4854 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex); | 4858 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex); |
| 4855 // We want the untagged address first for performance | 4859 // We want the untagged address first for performance |
| 4856 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(), | 4860 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(), |
| 4857 DONT_TAG_RESULT); | 4861 DONT_TAG_RESULT); |
| 4858 } else { | 4862 } else { |
| 4859 __ jmp(deferred->entry()); | 4863 __ jmp(deferred->entry()); |
| 4860 } | 4864 } |
| 4861 __ bind(deferred->exit()); | 4865 __ bind(deferred->exit()); |
| 4862 __ vstr(input_reg, reg, HeapNumber::kValueOffset); | 4866 __ vstr(input_reg, reg, HeapNumber::kValueOffset); |
| 4863 // Now that we have finished with the object's real address tag it | 4867 // Now that we have finished with the object's real address tag it |
| 4864 __ add(reg, reg, Operand(kHeapObjectTag)); | 4868 __ add(reg, reg, Operand(kHeapObjectTag)); |
| 4865 __ bind(&done); | |
| 4866 } | 4869 } |
| 4867 | 4870 |
| 4868 | 4871 |
| 4869 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4872 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 4870 // TODO(3095996): Get rid of this. For now, we need to make the | 4873 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4871 // result register contain a valid pointer because it is already | 4874 // result register contain a valid pointer because it is already |
| 4872 // contained in the register pointer map. | 4875 // contained in the register pointer map. |
| 4873 Register reg = ToRegister(instr->result()); | 4876 Register reg = ToRegister(instr->result()); |
| 4874 __ mov(reg, Operand::Zero()); | 4877 __ mov(reg, Operand::Zero()); |
| 4875 | 4878 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 4895 __ SmiUntag(result, input, SetCC); | 4898 __ SmiUntag(result, input, SetCC); |
| 4896 DeoptimizeIf(cs, instr->environment()); | 4899 DeoptimizeIf(cs, instr->environment()); |
| 4897 } else { | 4900 } else { |
| 4898 __ SmiUntag(result, input); | 4901 __ SmiUntag(result, input); |
| 4899 } | 4902 } |
| 4900 } | 4903 } |
| 4901 | 4904 |
| 4902 | 4905 |
| 4903 void LCodeGen::EmitNumberUntagD(Register input_reg, | 4906 void LCodeGen::EmitNumberUntagD(Register input_reg, |
| 4904 DwVfpRegister result_reg, | 4907 DwVfpRegister result_reg, |
| 4905 bool allow_undefined_as_nan, | 4908 bool can_convert_undefined_to_nan, |
| 4906 bool deoptimize_on_minus_zero, | 4909 bool deoptimize_on_minus_zero, |
| 4907 LEnvironment* env, | 4910 LEnvironment* env, |
| 4908 NumberUntagDMode mode) { | 4911 NumberUntagDMode mode) { |
| 4909 Register scratch = scratch0(); | 4912 Register scratch = scratch0(); |
| 4910 SwVfpRegister flt_scratch = double_scratch0().low(); | 4913 SwVfpRegister flt_scratch = double_scratch0().low(); |
| 4911 ASSERT(!result_reg.is(double_scratch0())); | 4914 ASSERT(!result_reg.is(double_scratch0())); |
| 4912 | 4915 |
| 4913 Label load_smi, heap_number, done; | 4916 Label load_smi, heap_number, done; |
| 4914 | 4917 |
| 4915 STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE > | 4918 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
| 4916 NUMBER_CANDIDATE_IS_ANY_TAGGED); | |
| 4917 if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) { | |
| 4918 // Smi check. | 4919 // Smi check. |
| 4919 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); | 4920 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); |
| 4920 | 4921 |
| 4921 // Heap number map check. | 4922 // Heap number map check. |
| 4922 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4923 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
| 4923 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 4924 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 4924 __ cmp(scratch, Operand(ip)); | 4925 __ cmp(scratch, Operand(ip)); |
| 4925 if (!allow_undefined_as_nan) { | 4926 if (!can_convert_undefined_to_nan) { |
| 4926 DeoptimizeIf(ne, env); | 4927 DeoptimizeIf(ne, env); |
| 4927 } else { | 4928 } else { |
| 4928 Label heap_number, convert; | 4929 Label heap_number, convert; |
| 4929 __ b(eq, &heap_number); | 4930 __ b(eq, &heap_number); |
| 4930 | 4931 |
| 4931 // Convert undefined (and hole) to NaN. | 4932 // Convert undefined (and hole) to NaN. |
| 4932 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 4933 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 4933 __ cmp(input_reg, Operand(ip)); | 4934 __ cmp(input_reg, Operand(ip)); |
| 4934 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) { | |
| 4935 __ b(eq, &convert); | |
| 4936 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
| 4937 __ cmp(input_reg, Operand(ip)); | |
| 4938 } | |
| 4939 DeoptimizeIf(ne, env); | 4935 DeoptimizeIf(ne, env); |
| 4940 | 4936 |
| 4941 __ bind(&convert); | 4937 __ bind(&convert); |
| 4942 __ LoadRoot(scratch, Heap::kNanValueRootIndex); | 4938 __ LoadRoot(scratch, Heap::kNanValueRootIndex); |
| 4943 __ vldr(result_reg, scratch, HeapNumber::kValueOffset - kHeapObjectTag); | 4939 __ vldr(result_reg, scratch, HeapNumber::kValueOffset - kHeapObjectTag); |
| 4944 __ jmp(&done); | 4940 __ jmp(&done); |
| 4945 | 4941 |
| 4946 __ bind(&heap_number); | 4942 __ bind(&heap_number); |
| 4947 } | 4943 } |
| 4948 // Heap number to double register conversion. | 4944 // Heap number to double register conversion. |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5069 | 5065 |
| 5070 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { | 5066 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { |
| 5071 LOperand* input = instr->value(); | 5067 LOperand* input = instr->value(); |
| 5072 ASSERT(input->IsRegister()); | 5068 ASSERT(input->IsRegister()); |
| 5073 LOperand* result = instr->result(); | 5069 LOperand* result = instr->result(); |
| 5074 ASSERT(result->IsDoubleRegister()); | 5070 ASSERT(result->IsDoubleRegister()); |
| 5075 | 5071 |
| 5076 Register input_reg = ToRegister(input); | 5072 Register input_reg = ToRegister(input); |
| 5077 DwVfpRegister result_reg = ToDoubleRegister(result); | 5073 DwVfpRegister result_reg = ToDoubleRegister(result); |
| 5078 | 5074 |
| 5079 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED; | |
| 5080 HValue* value = instr->hydrogen()->value(); | 5075 HValue* value = instr->hydrogen()->value(); |
| 5081 if (value->type().IsSmi()) { | 5076 NumberUntagDMode mode = value->representation().IsSmi() |
| 5082 mode = NUMBER_CANDIDATE_IS_SMI; | 5077 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; |
| 5083 } else if (value->IsLoadKeyed()) { | |
| 5084 HLoadKeyed* load = HLoadKeyed::cast(value); | |
| 5085 if (load->UsesMustHandleHole()) { | |
| 5086 if (load->hole_mode() == ALLOW_RETURN_HOLE) { | |
| 5087 mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE; | |
| 5088 } | |
| 5089 } | |
| 5090 } | |
| 5091 | 5078 |
| 5092 EmitNumberUntagD(input_reg, result_reg, | 5079 EmitNumberUntagD(input_reg, result_reg, |
| 5093 instr->hydrogen()->allow_undefined_as_nan(), | 5080 instr->hydrogen()->can_convert_undefined_to_nan(), |
| 5094 instr->hydrogen()->deoptimize_on_minus_zero(), | 5081 instr->hydrogen()->deoptimize_on_minus_zero(), |
| 5095 instr->environment(), | 5082 instr->environment(), |
| 5096 mode); | 5083 mode); |
| 5097 } | 5084 } |
| 5098 | 5085 |
| 5099 | 5086 |
| 5100 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 5087 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
| 5101 Register result_reg = ToRegister(instr->result()); | 5088 Register result_reg = ToRegister(instr->result()); |
| 5102 Register scratch1 = scratch0(); | 5089 Register scratch1 = scratch0(); |
| 5103 Register scratch2 = ToRegister(instr->temp()); | 5090 Register scratch2 = ToRegister(instr->temp()); |
| (...skipping 752 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5856 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5843 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
| 5857 __ ldr(result, FieldMemOperand(scratch, | 5844 __ ldr(result, FieldMemOperand(scratch, |
| 5858 FixedArray::kHeaderSize - kPointerSize)); | 5845 FixedArray::kHeaderSize - kPointerSize)); |
| 5859 __ bind(&done); | 5846 __ bind(&done); |
| 5860 } | 5847 } |
| 5861 | 5848 |
| 5862 | 5849 |
| 5863 #undef __ | 5850 #undef __ |
| 5864 | 5851 |
| 5865 } } // namespace v8::internal | 5852 } } // namespace v8::internal |
| OLD | NEW |