| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 80 GenerateDeferredCode() && | 80 GenerateDeferredCode() && |
| 81 GenerateJumpTable() && | 81 GenerateJumpTable() && |
| 82 GenerateSafepointTable(); | 82 GenerateSafepointTable(); |
| 83 } | 83 } |
| 84 | 84 |
| 85 | 85 |
| 86 void LCodeGen::FinishCode(Handle<Code> code) { | 86 void LCodeGen::FinishCode(Handle<Code> code) { |
| 87 ASSERT(is_done()); | 87 ASSERT(is_done()); |
| 88 code->set_stack_slots(GetStackSlotCount()); | 88 code->set_stack_slots(GetStackSlotCount()); |
| 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 90 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); | 90 RegisterDependentCodeForEmbeddedMaps(code); |
| 91 PopulateDeoptimizationData(code); | 91 PopulateDeoptimizationData(code); |
| 92 info()->CommitDependencies(code); | 92 info()->CommitDependencies(code); |
| 93 } | 93 } |
| 94 | 94 |
| 95 | 95 |
| 96 void LChunkBuilder::Abort(BailoutReason reason) { | 96 void LChunkBuilder::Abort(BailoutReason reason) { |
| 97 info()->set_bailout_reason(reason); | 97 info()->set_bailout_reason(reason); |
| 98 status_ = ABORTED; | 98 status_ = ABORTED; |
| 99 } | 99 } |
| 100 | 100 |
| (...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 321 | 321 |
| 322 | 322 |
| 323 bool LCodeGen::GenerateDeferredCode() { | 323 bool LCodeGen::GenerateDeferredCode() { |
| 324 ASSERT(is_generating()); | 324 ASSERT(is_generating()); |
| 325 if (deferred_.length() > 0) { | 325 if (deferred_.length() > 0) { |
| 326 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 326 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| 327 LDeferredCode* code = deferred_[i]; | 327 LDeferredCode* code = deferred_[i]; |
| 328 | 328 |
| 329 HValue* value = | 329 HValue* value = |
| 330 instructions_->at(code->instruction_index())->hydrogen_value(); | 330 instructions_->at(code->instruction_index())->hydrogen_value(); |
| 331 RecordAndWritePosition( | 331 RecordAndWritePosition(value->position()); |
| 332 chunk()->graph()->SourcePositionToScriptPosition(value->position())); | |
| 333 | 332 |
| 334 Comment(";;; <@%d,#%d> " | 333 Comment(";;; <@%d,#%d> " |
| 335 "-------------------- Deferred %s --------------------", | 334 "-------------------- Deferred %s --------------------", |
| 336 code->instruction_index(), | 335 code->instruction_index(), |
| 337 code->instr()->hydrogen_value()->id(), | 336 code->instr()->hydrogen_value()->id(), |
| 338 code->instr()->Mnemonic()); | 337 code->instr()->Mnemonic()); |
| 339 __ bind(code->entry()); | 338 __ bind(code->entry()); |
| 340 if (NeedsDeferredFrame()) { | 339 if (NeedsDeferredFrame()) { |
| 341 Comment(";;; Build frame"); | 340 Comment(";;; Build frame"); |
| 342 ASSERT(!frame_is_built_); | 341 ASSERT(!frame_is_built_); |
| (...skipping 442 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 785 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 784 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
| 786 int length = deoptimizations_.length(); | 785 int length = deoptimizations_.length(); |
| 787 if (length == 0) return; | 786 if (length == 0) return; |
| 788 Handle<DeoptimizationInputData> data = | 787 Handle<DeoptimizationInputData> data = |
| 789 factory()->NewDeoptimizationInputData(length, TENURED); | 788 factory()->NewDeoptimizationInputData(length, TENURED); |
| 790 | 789 |
| 791 Handle<ByteArray> translations = | 790 Handle<ByteArray> translations = |
| 792 translations_.CreateByteArray(isolate()->factory()); | 791 translations_.CreateByteArray(isolate()->factory()); |
| 793 data->SetTranslationByteArray(*translations); | 792 data->SetTranslationByteArray(*translations); |
| 794 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); | 793 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); |
| 795 data->SetOptimizationId(Smi::FromInt(info_->optimization_id())); | |
| 796 | 794 |
| 797 Handle<FixedArray> literals = | 795 Handle<FixedArray> literals = |
| 798 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED); | 796 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED); |
| 799 { AllowDeferredHandleDereference copy_handles; | 797 { AllowDeferredHandleDereference copy_handles; |
| 800 for (int i = 0; i < deoptimization_literals_.length(); i++) { | 798 for (int i = 0; i < deoptimization_literals_.length(); i++) { |
| 801 literals->set(i, *deoptimization_literals_[i]); | 799 literals->set(i, *deoptimization_literals_[i]); |
| 802 } | 800 } |
| 803 data->SetLiteralArray(*literals); | 801 data->SetLiteralArray(*literals); |
| 804 } | 802 } |
| 805 | 803 |
| (...skipping 339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1145 | 1143 |
| 1146 __ addq(reg2, Immediate(1 << 30)); | 1144 __ addq(reg2, Immediate(1 << 30)); |
| 1147 __ sar(reg2, Immediate(shift)); | 1145 __ sar(reg2, Immediate(shift)); |
| 1148 } | 1146 } |
| 1149 } | 1147 } |
| 1150 | 1148 |
| 1151 | 1149 |
| 1152 void LCodeGen::DoDivI(LDivI* instr) { | 1150 void LCodeGen::DoDivI(LDivI* instr) { |
| 1153 if (!instr->is_flooring() && instr->hydrogen()->RightIsPowerOf2()) { | 1151 if (!instr->is_flooring() && instr->hydrogen()->RightIsPowerOf2()) { |
| 1154 Register dividend = ToRegister(instr->left()); | 1152 Register dividend = ToRegister(instr->left()); |
| 1155 HDiv* hdiv = instr->hydrogen(); | 1153 int32_t divisor = |
| 1156 int32_t divisor = hdiv->right()->GetInteger32Constant(); | 1154 HConstant::cast(instr->hydrogen()->right())->Integer32Value(); |
| 1157 Register result = ToRegister(instr->result()); | 1155 int32_t test_value = 0; |
| 1158 ASSERT(!result.is(dividend)); | 1156 int32_t power = 0; |
| 1159 | 1157 |
| 1160 // Check for (0 / -x) that will produce negative zero. | 1158 if (divisor > 0) { |
| 1161 if (hdiv->left()->RangeCanInclude(0) && divisor < 0 && | 1159 test_value = divisor - 1; |
| 1162 hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1160 power = WhichPowerOf2(divisor); |
| 1163 __ testl(dividend, dividend); | 1161 } else { |
| 1164 DeoptimizeIf(zero, instr->environment()); | 1162 // Check for (0 / -x) that will produce negative zero. |
| 1163 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1164 __ testl(dividend, dividend); |
| 1165 DeoptimizeIf(zero, instr->environment()); |
| 1166 } |
| 1167 // Check for (kMinInt / -1). |
| 1168 if (divisor == -1 && instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1169 __ cmpl(dividend, Immediate(kMinInt)); |
| 1170 DeoptimizeIf(zero, instr->environment()); |
| 1171 } |
| 1172 test_value = - divisor - 1; |
| 1173 power = WhichPowerOf2(-divisor); |
| 1165 } | 1174 } |
| 1166 // Check for (kMinInt / -1). | 1175 |
| 1167 if (hdiv->left()->RangeCanInclude(kMinInt) && divisor == -1 && | 1176 if (test_value != 0) { |
| 1168 hdiv->CheckFlag(HValue::kCanOverflow)) { | 1177 if (instr->hydrogen()->CheckFlag( |
| 1169 __ cmpl(dividend, Immediate(kMinInt)); | 1178 HInstruction::kAllUsesTruncatingToInt32)) { |
| 1170 DeoptimizeIf(zero, instr->environment()); | 1179 Label done, negative; |
| 1180 __ cmpl(dividend, Immediate(0)); |
| 1181 __ j(less, &negative, Label::kNear); |
| 1182 __ sarl(dividend, Immediate(power)); |
| 1183 if (divisor < 0) __ negl(dividend); |
| 1184 __ jmp(&done, Label::kNear); |
| 1185 |
| 1186 __ bind(&negative); |
| 1187 __ negl(dividend); |
| 1188 __ sarl(dividend, Immediate(power)); |
| 1189 if (divisor > 0) __ negl(dividend); |
| 1190 __ bind(&done); |
| 1191 return; // Don't fall through to "__ neg" below. |
| 1192 } else { |
| 1193 // Deoptimize if remainder is not 0. |
| 1194 __ testl(dividend, Immediate(test_value)); |
| 1195 DeoptimizeIf(not_zero, instr->environment()); |
| 1196 __ sarl(dividend, Immediate(power)); |
| 1197 } |
| 1171 } | 1198 } |
| 1172 // Deoptimize if remainder will not be 0. | 1199 |
| 1173 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1200 if (divisor < 0) __ negl(dividend); |
| 1174 __ testl(dividend, Immediate(Abs(divisor) - 1)); | 1201 |
| 1175 DeoptimizeIf(not_zero, instr->environment()); | |
| 1176 } | |
| 1177 __ Move(result, dividend); | |
| 1178 int32_t shift = WhichPowerOf2(Abs(divisor)); | |
| 1179 if (shift > 0) { | |
| 1180 // The arithmetic shift is always OK, the 'if' is an optimization only. | |
| 1181 if (shift > 1) __ sarl(result, Immediate(31)); | |
| 1182 __ shrl(result, Immediate(32 - shift)); | |
| 1183 __ addl(result, dividend); | |
| 1184 __ sarl(result, Immediate(shift)); | |
| 1185 } | |
| 1186 if (divisor < 0) __ negl(result); | |
| 1187 return; | 1202 return; |
| 1188 } | 1203 } |
| 1189 | 1204 |
| 1190 LOperand* right = instr->right(); | 1205 LOperand* right = instr->right(); |
| 1191 ASSERT(ToRegister(instr->result()).is(rax)); | 1206 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1192 ASSERT(ToRegister(instr->left()).is(rax)); | 1207 ASSERT(ToRegister(instr->left()).is(rax)); |
| 1193 ASSERT(!ToRegister(instr->right()).is(rax)); | 1208 ASSERT(!ToRegister(instr->right()).is(rax)); |
| 1194 ASSERT(!ToRegister(instr->right()).is(rdx)); | 1209 ASSERT(!ToRegister(instr->right()).is(rdx)); |
| 1195 | 1210 |
| 1196 Register left_reg = rax; | 1211 Register left_reg = rax; |
| (...skipping 1560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2757 | 2772 |
| 2758 Register result = ToRegister(instr->result()); | 2773 Register result = ToRegister(instr->result()); |
| 2759 if (!access.IsInobject()) { | 2774 if (!access.IsInobject()) { |
| 2760 __ movp(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2775 __ movp(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 2761 object = result; | 2776 object = result; |
| 2762 } | 2777 } |
| 2763 | 2778 |
| 2764 Representation representation = access.representation(); | 2779 Representation representation = access.representation(); |
| 2765 if (representation.IsSmi() && | 2780 if (representation.IsSmi() && |
| 2766 instr->hydrogen()->representation().IsInteger32()) { | 2781 instr->hydrogen()->representation().IsInteger32()) { |
| 2767 #ifdef DEBUG | |
| 2768 Register scratch = kScratchRegister; | |
| 2769 __ Load(scratch, FieldOperand(object, offset), representation); | |
| 2770 __ AssertSmi(scratch); | |
| 2771 #endif | |
| 2772 | |
| 2773 // Read int value directly from upper half of the smi. | 2782 // Read int value directly from upper half of the smi. |
| 2774 STATIC_ASSERT(kSmiTag == 0); | 2783 STATIC_ASSERT(kSmiTag == 0); |
| 2775 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 2784 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
| 2776 offset += kPointerSize / 2; | 2785 offset += kPointerSize / 2; |
| 2777 representation = Representation::Integer32(); | 2786 representation = Representation::Integer32(); |
| 2778 } | 2787 } |
| 2779 __ Load(result, FieldOperand(object, offset), representation); | 2788 __ Load(result, FieldOperand(object, offset), representation); |
| 2780 } | 2789 } |
| 2781 | 2790 |
| 2782 | 2791 |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3008 } | 3017 } |
| 3009 } | 3018 } |
| 3010 | 3019 |
| 3011 bool requires_hole_check = hinstr->RequiresHoleCheck(); | 3020 bool requires_hole_check = hinstr->RequiresHoleCheck(); |
| 3012 int offset = FixedArray::kHeaderSize - kHeapObjectTag; | 3021 int offset = FixedArray::kHeaderSize - kHeapObjectTag; |
| 3013 Representation representation = hinstr->representation(); | 3022 Representation representation = hinstr->representation(); |
| 3014 | 3023 |
| 3015 if (representation.IsInteger32() && | 3024 if (representation.IsInteger32() && |
| 3016 hinstr->elements_kind() == FAST_SMI_ELEMENTS) { | 3025 hinstr->elements_kind() == FAST_SMI_ELEMENTS) { |
| 3017 ASSERT(!requires_hole_check); | 3026 ASSERT(!requires_hole_check); |
| 3018 #ifdef DEBUG | |
| 3019 Register scratch = kScratchRegister; | |
| 3020 __ Load(scratch, | |
| 3021 BuildFastArrayOperand(instr->elements(), | |
| 3022 key, | |
| 3023 FAST_ELEMENTS, | |
| 3024 offset, | |
| 3025 instr->additional_index()), | |
| 3026 Representation::Smi()); | |
| 3027 __ AssertSmi(scratch); | |
| 3028 #endif | |
| 3029 // Read int value directly from upper half of the smi. | 3027 // Read int value directly from upper half of the smi. |
| 3030 STATIC_ASSERT(kSmiTag == 0); | 3028 STATIC_ASSERT(kSmiTag == 0); |
| 3031 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 3029 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
| 3032 offset += kPointerSize / 2; | 3030 offset += kPointerSize / 2; |
| 3033 } | 3031 } |
| 3034 | 3032 |
| 3035 __ Load(result, | 3033 __ Load(result, |
| 3036 BuildFastArrayOperand(instr->elements(), | 3034 BuildFastArrayOperand(instr->elements(), |
| 3037 key, | 3035 key, |
| 3038 FAST_ELEMENTS, | 3036 FAST_ELEMENTS, |
| (...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3307 // Set rax to arguments count if adaption is not needed. Assumes that rax | 3305 // Set rax to arguments count if adaption is not needed. Assumes that rax |
| 3308 // is available to write to at this point. | 3306 // is available to write to at this point. |
| 3309 if (dont_adapt_arguments) { | 3307 if (dont_adapt_arguments) { |
| 3310 __ Set(rax, arity); | 3308 __ Set(rax, arity); |
| 3311 } | 3309 } |
| 3312 | 3310 |
| 3313 // Invoke function. | 3311 // Invoke function. |
| 3314 if (function.is_identical_to(info()->closure())) { | 3312 if (function.is_identical_to(info()->closure())) { |
| 3315 __ CallSelf(); | 3313 __ CallSelf(); |
| 3316 } else { | 3314 } else { |
| 3317 __ Call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 3315 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
| 3318 } | 3316 } |
| 3319 | 3317 |
| 3320 // Set up deoptimization. | 3318 // Set up deoptimization. |
| 3321 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 3319 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 3322 } else { | 3320 } else { |
| 3323 // We need to adapt arguments. | 3321 // We need to adapt arguments. |
| 3324 SafepointGenerator generator( | 3322 SafepointGenerator generator( |
| 3325 this, pointers, Safepoint::kLazyDeopt); | 3323 this, pointers, Safepoint::kLazyDeopt); |
| 3326 ParameterCount count(arity); | 3324 ParameterCount count(arity); |
| 3327 ParameterCount expected(formal_parameter_count); | 3325 ParameterCount expected(formal_parameter_count); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3372 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function()); | 3370 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function()); |
| 3373 jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate())); | 3371 jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate())); |
| 3374 is_self_call = jsfun.is_identical_to(info()->closure()); | 3372 is_self_call = jsfun.is_identical_to(info()->closure()); |
| 3375 } | 3373 } |
| 3376 | 3374 |
| 3377 if (is_self_call) { | 3375 if (is_self_call) { |
| 3378 __ CallSelf(); | 3376 __ CallSelf(); |
| 3379 } else { | 3377 } else { |
| 3380 Operand target = FieldOperand(rdi, JSFunction::kCodeEntryOffset); | 3378 Operand target = FieldOperand(rdi, JSFunction::kCodeEntryOffset); |
| 3381 generator.BeforeCall(__ CallSize(target)); | 3379 generator.BeforeCall(__ CallSize(target)); |
| 3382 __ Call(target); | 3380 __ call(target); |
| 3383 } | 3381 } |
| 3384 generator.AfterCall(); | 3382 generator.AfterCall(); |
| 3385 } | 3383 } |
| 3386 | 3384 |
| 3387 | 3385 |
| 3388 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3386 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
| 3389 Register input_reg = ToRegister(instr->value()); | 3387 Register input_reg = ToRegister(instr->value()); |
| 3390 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 3388 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 3391 Heap::kHeapNumberMapRootIndex); | 3389 Heap::kHeapNumberMapRootIndex); |
| 3392 DeoptimizeIf(not_equal, instr->environment()); | 3390 DeoptimizeIf(not_equal, instr->environment()); |
| (...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3722 __ movsd(Operand(rsp, 0), input_reg); | 3720 __ movsd(Operand(rsp, 0), input_reg); |
| 3723 __ fld_d(Operand(rsp, 0)); | 3721 __ fld_d(Operand(rsp, 0)); |
| 3724 __ fyl2x(); | 3722 __ fyl2x(); |
| 3725 __ fstp_d(Operand(rsp, 0)); | 3723 __ fstp_d(Operand(rsp, 0)); |
| 3726 __ movsd(input_reg, Operand(rsp, 0)); | 3724 __ movsd(input_reg, Operand(rsp, 0)); |
| 3727 __ addq(rsp, Immediate(kDoubleSize)); | 3725 __ addq(rsp, Immediate(kDoubleSize)); |
| 3728 __ bind(&done); | 3726 __ bind(&done); |
| 3729 } | 3727 } |
| 3730 | 3728 |
| 3731 | 3729 |
| 3732 void LCodeGen::DoMathClz32(LMathClz32* instr) { | |
| 3733 Register input = ToRegister(instr->value()); | |
| 3734 Register result = ToRegister(instr->result()); | |
| 3735 Label not_zero_input; | |
| 3736 __ bsrl(result, input); | |
| 3737 | |
| 3738 __ j(not_zero, ¬_zero_input); | |
| 3739 __ Set(result, 63); // 63^31 == 32 | |
| 3740 | |
| 3741 __ bind(¬_zero_input); | |
| 3742 __ xorl(result, Immediate(31)); // for x in [0..31], 31^x == 31-x. | |
| 3743 } | |
| 3744 | |
| 3745 | |
| 3746 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3730 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3747 ASSERT(ToRegister(instr->context()).is(rsi)); | 3731 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3748 ASSERT(ToRegister(instr->function()).is(rdi)); | 3732 ASSERT(ToRegister(instr->function()).is(rdi)); |
| 3749 ASSERT(instr->HasPointerMap()); | 3733 ASSERT(instr->HasPointerMap()); |
| 3750 | 3734 |
| 3751 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3735 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
| 3752 if (known_function.is_null()) { | 3736 if (known_function.is_null()) { |
| 3753 LPointerMap* pointers = instr->pointer_map(); | 3737 LPointerMap* pointers = instr->pointer_map(); |
| 3754 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3738 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3755 ParameterCount count(instr->arity()); | 3739 ParameterCount count(instr->arity()); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3864 Representation representation = instr->representation(); | 3848 Representation representation = instr->representation(); |
| 3865 | 3849 |
| 3866 HObjectAccess access = hinstr->access(); | 3850 HObjectAccess access = hinstr->access(); |
| 3867 int offset = access.offset(); | 3851 int offset = access.offset(); |
| 3868 | 3852 |
| 3869 if (access.IsExternalMemory()) { | 3853 if (access.IsExternalMemory()) { |
| 3870 ASSERT(!hinstr->NeedsWriteBarrier()); | 3854 ASSERT(!hinstr->NeedsWriteBarrier()); |
| 3871 Register value = ToRegister(instr->value()); | 3855 Register value = ToRegister(instr->value()); |
| 3872 if (instr->object()->IsConstantOperand()) { | 3856 if (instr->object()->IsConstantOperand()) { |
| 3873 ASSERT(value.is(rax)); | 3857 ASSERT(value.is(rax)); |
| 3858 ASSERT(!access.representation().IsSpecialization()); |
| 3874 LConstantOperand* object = LConstantOperand::cast(instr->object()); | 3859 LConstantOperand* object = LConstantOperand::cast(instr->object()); |
| 3875 __ store_rax(ToExternalReference(object)); | 3860 __ store_rax(ToExternalReference(object)); |
| 3876 } else { | 3861 } else { |
| 3877 Register object = ToRegister(instr->object()); | 3862 Register object = ToRegister(instr->object()); |
| 3878 __ Store(MemOperand(object, offset), value, representation); | 3863 __ Store(MemOperand(object, offset), value, representation); |
| 3879 } | 3864 } |
| 3880 return; | 3865 return; |
| 3881 } | 3866 } |
| 3882 | 3867 |
| 3883 Register object = ToRegister(instr->object()); | 3868 Register object = ToRegister(instr->object()); |
| 3884 Handle<Map> transition = instr->transition(); | 3869 Handle<Map> transition = instr->transition(); |
| 3885 SmiCheck check_needed = hinstr->value()->IsHeapObject() | |
| 3886 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | |
| 3887 | 3870 |
| 3888 if (FLAG_track_fields && representation.IsSmi()) { | 3871 if (FLAG_track_fields && representation.IsSmi()) { |
| 3889 if (instr->value()->IsConstantOperand()) { | 3872 if (instr->value()->IsConstantOperand()) { |
| 3890 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3873 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3891 if (!IsInteger32Constant(operand_value) && | 3874 if (!IsInteger32Constant(operand_value) && |
| 3892 !IsSmiConstant(operand_value)) { | 3875 !IsSmiConstant(operand_value)) { |
| 3893 DeoptimizeIf(no_condition, instr->environment()); | 3876 DeoptimizeIf(no_condition, instr->environment()); |
| 3894 } | 3877 } |
| 3895 } | 3878 } |
| 3896 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 3879 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
| 3897 if (instr->value()->IsConstantOperand()) { | 3880 if (instr->value()->IsConstantOperand()) { |
| 3898 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3881 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3899 if (IsInteger32Constant(operand_value)) { | 3882 if (IsInteger32Constant(operand_value)) { |
| 3900 DeoptimizeIf(no_condition, instr->environment()); | 3883 DeoptimizeIf(no_condition, instr->environment()); |
| 3901 } | 3884 } |
| 3902 } else { | 3885 } else { |
| 3903 if (!hinstr->value()->type().IsHeapObject()) { | 3886 if (!hinstr->value()->type().IsHeapObject()) { |
| 3904 Register value = ToRegister(instr->value()); | 3887 Register value = ToRegister(instr->value()); |
| 3905 Condition cc = masm()->CheckSmi(value); | 3888 Condition cc = masm()->CheckSmi(value); |
| 3906 DeoptimizeIf(cc, instr->environment()); | 3889 DeoptimizeIf(cc, instr->environment()); |
| 3907 | |
| 3908 // We know that value is a smi now, so we can omit the check below. | |
| 3909 check_needed = OMIT_SMI_CHECK; | |
| 3910 } | 3890 } |
| 3911 } | 3891 } |
| 3912 } else if (representation.IsDouble()) { | 3892 } else if (representation.IsDouble()) { |
| 3913 ASSERT(transition.is_null()); | 3893 ASSERT(transition.is_null()); |
| 3914 ASSERT(access.IsInobject()); | 3894 ASSERT(access.IsInobject()); |
| 3915 ASSERT(!hinstr->NeedsWriteBarrier()); | 3895 ASSERT(!hinstr->NeedsWriteBarrier()); |
| 3916 XMMRegister value = ToDoubleRegister(instr->value()); | 3896 XMMRegister value = ToDoubleRegister(instr->value()); |
| 3917 __ movsd(FieldOperand(object, offset), value); | 3897 __ movsd(FieldOperand(object, offset), value); |
| 3918 return; | 3898 return; |
| 3919 } | 3899 } |
| (...skipping 10 matching lines...) Expand all Loading... |
| 3930 HeapObject::kMapOffset, | 3910 HeapObject::kMapOffset, |
| 3931 kScratchRegister, | 3911 kScratchRegister, |
| 3932 temp, | 3912 temp, |
| 3933 kSaveFPRegs, | 3913 kSaveFPRegs, |
| 3934 OMIT_REMEMBERED_SET, | 3914 OMIT_REMEMBERED_SET, |
| 3935 OMIT_SMI_CHECK); | 3915 OMIT_SMI_CHECK); |
| 3936 } | 3916 } |
| 3937 } | 3917 } |
| 3938 | 3918 |
| 3939 // Do the store. | 3919 // Do the store. |
| 3920 SmiCheck check_needed = hinstr->value()->IsHeapObject() |
| 3921 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 3922 |
| 3940 Register write_register = object; | 3923 Register write_register = object; |
| 3941 if (!access.IsInobject()) { | 3924 if (!access.IsInobject()) { |
| 3942 write_register = ToRegister(instr->temp()); | 3925 write_register = ToRegister(instr->temp()); |
| 3943 __ movp(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); | 3926 __ movp(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 3944 } | 3927 } |
| 3945 | 3928 |
| 3946 if (representation.IsSmi() && | 3929 if (representation.IsSmi() && |
| 3947 hinstr->value()->representation().IsInteger32()) { | 3930 hinstr->value()->representation().IsInteger32()) { |
| 3948 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); | 3931 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); |
| 3949 #ifdef DEBUG | |
| 3950 Register scratch = kScratchRegister; | |
| 3951 __ Load(scratch, FieldOperand(write_register, offset), representation); | |
| 3952 __ AssertSmi(scratch); | |
| 3953 #endif | |
| 3954 // Store int value directly to upper half of the smi. | 3932 // Store int value directly to upper half of the smi. |
| 3955 STATIC_ASSERT(kSmiTag == 0); | 3933 STATIC_ASSERT(kSmiTag == 0); |
| 3956 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 3934 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
| 3957 offset += kPointerSize / 2; | 3935 offset += kPointerSize / 2; |
| 3958 representation = Representation::Integer32(); | 3936 representation = Representation::Integer32(); |
| 3959 } | 3937 } |
| 3960 | 3938 |
| 3961 Operand operand = FieldOperand(write_register, offset); | 3939 Operand operand = FieldOperand(write_register, offset); |
| 3962 | 3940 |
| 3963 if (instr->value()->IsRegister()) { | 3941 if (instr->value()->IsRegister()) { |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4197 __ movsxlq(key_reg, key_reg); | 4175 __ movsxlq(key_reg, key_reg); |
| 4198 } | 4176 } |
| 4199 } | 4177 } |
| 4200 | 4178 |
| 4201 int offset = FixedArray::kHeaderSize - kHeapObjectTag; | 4179 int offset = FixedArray::kHeaderSize - kHeapObjectTag; |
| 4202 Representation representation = hinstr->value()->representation(); | 4180 Representation representation = hinstr->value()->representation(); |
| 4203 | 4181 |
| 4204 if (representation.IsInteger32()) { | 4182 if (representation.IsInteger32()) { |
| 4205 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); | 4183 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); |
| 4206 ASSERT(hinstr->elements_kind() == FAST_SMI_ELEMENTS); | 4184 ASSERT(hinstr->elements_kind() == FAST_SMI_ELEMENTS); |
| 4207 #ifdef DEBUG | |
| 4208 Register scratch = kScratchRegister; | |
| 4209 __ Load(scratch, | |
| 4210 BuildFastArrayOperand(instr->elements(), | |
| 4211 key, | |
| 4212 FAST_ELEMENTS, | |
| 4213 offset, | |
| 4214 instr->additional_index()), | |
| 4215 Representation::Smi()); | |
| 4216 __ AssertSmi(scratch); | |
| 4217 #endif | |
| 4218 // Store int value directly to upper half of the smi. | 4185 // Store int value directly to upper half of the smi. |
| 4219 STATIC_ASSERT(kSmiTag == 0); | 4186 STATIC_ASSERT(kSmiTag == 0); |
| 4220 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 4187 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
| 4221 offset += kPointerSize / 2; | 4188 offset += kPointerSize / 2; |
| 4222 } | 4189 } |
| 4223 | 4190 |
| 4224 Operand operand = | 4191 Operand operand = |
| 4225 BuildFastArrayOperand(instr->elements(), | 4192 BuildFastArrayOperand(instr->elements(), |
| 4226 key, | 4193 key, |
| 4227 FAST_ELEMENTS, | 4194 FAST_ELEMENTS, |
| (...skipping 1350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5578 FixedArray::kHeaderSize - kPointerSize)); | 5545 FixedArray::kHeaderSize - kPointerSize)); |
| 5579 __ bind(&done); | 5546 __ bind(&done); |
| 5580 } | 5547 } |
| 5581 | 5548 |
| 5582 | 5549 |
| 5583 #undef __ | 5550 #undef __ |
| 5584 | 5551 |
| 5585 } } // namespace v8::internal | 5552 } } // namespace v8::internal |
| 5586 | 5553 |
| 5587 #endif // V8_TARGET_ARCH_X64 | 5554 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |