| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "x64/lithium-codegen-x64.h" | 9 #include "x64/lithium-codegen-x64.h" |
| 10 #include "code-stubs.h" | 10 #include "code-stubs.h" |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 62 | 62 |
| 63 void LCodeGen::FinishCode(Handle<Code> code) { | 63 void LCodeGen::FinishCode(Handle<Code> code) { |
| 64 ASSERT(is_done()); | 64 ASSERT(is_done()); |
| 65 code->set_stack_slots(GetStackSlotCount()); | 65 code->set_stack_slots(GetStackSlotCount()); |
| 66 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 66 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 67 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); | 67 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); |
| 68 PopulateDeoptimizationData(code); | 68 PopulateDeoptimizationData(code); |
| 69 } | 69 } |
| 70 | 70 |
| 71 | 71 |
| 72 void LChunkBuilder::Abort(BailoutReason reason) { | |
| 73 info()->set_bailout_reason(reason); | |
| 74 status_ = ABORTED; | |
| 75 } | |
| 76 | |
| 77 | |
| 78 #ifdef _MSC_VER | 72 #ifdef _MSC_VER |
| 79 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 73 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
| 80 const int kPageSize = 4 * KB; | 74 const int kPageSize = 4 * KB; |
| 81 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 75 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
| 82 __ movp(Operand(rsp, offset), rax); | 76 __ movp(Operand(rsp, offset), rax); |
| 83 } | 77 } |
| 84 } | 78 } |
| 85 #endif | 79 #endif |
| 86 | 80 |
| 87 | 81 |
| (...skipping 3901 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3989 LConstantOperand* object = LConstantOperand::cast(instr->object()); | 3983 LConstantOperand* object = LConstantOperand::cast(instr->object()); |
| 3990 __ store_rax(ToExternalReference(object)); | 3984 __ store_rax(ToExternalReference(object)); |
| 3991 } else { | 3985 } else { |
| 3992 Register object = ToRegister(instr->object()); | 3986 Register object = ToRegister(instr->object()); |
| 3993 __ Store(MemOperand(object, offset), value, representation); | 3987 __ Store(MemOperand(object, offset), value, representation); |
| 3994 } | 3988 } |
| 3995 return; | 3989 return; |
| 3996 } | 3990 } |
| 3997 | 3991 |
| 3998 Register object = ToRegister(instr->object()); | 3992 Register object = ToRegister(instr->object()); |
| 3999 Handle<Map> transition = instr->transition(); | |
| 4000 SmiCheck check_needed = hinstr->value()->IsHeapObject() | 3993 SmiCheck check_needed = hinstr->value()->IsHeapObject() |
| 4001 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 3994 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 4002 | 3995 |
| 4003 ASSERT(!(representation.IsSmi() && | 3996 ASSERT(!(representation.IsSmi() && |
| 4004 instr->value()->IsConstantOperand() && | 3997 instr->value()->IsConstantOperand() && |
| 4005 !IsInteger32Constant(LConstantOperand::cast(instr->value())))); | 3998 !IsInteger32Constant(LConstantOperand::cast(instr->value())))); |
| 4006 if (representation.IsHeapObject()) { | 3999 if (representation.IsHeapObject()) { |
| 4007 if (instr->value()->IsConstantOperand()) { | 4000 if (instr->value()->IsConstantOperand()) { |
| 4008 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 4001 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 4009 if (chunk_->LookupConstant(operand_value)->HasSmiValue()) { | 4002 if (chunk_->LookupConstant(operand_value)->HasSmiValue()) { |
| 4010 DeoptimizeIf(no_condition, instr->environment()); | 4003 DeoptimizeIf(no_condition, instr->environment()); |
| 4011 } | 4004 } |
| 4012 } else { | 4005 } else { |
| 4013 if (!hinstr->value()->type().IsHeapObject()) { | 4006 if (!hinstr->value()->type().IsHeapObject()) { |
| 4014 Register value = ToRegister(instr->value()); | 4007 Register value = ToRegister(instr->value()); |
| 4015 Condition cc = masm()->CheckSmi(value); | 4008 Condition cc = masm()->CheckSmi(value); |
| 4016 DeoptimizeIf(cc, instr->environment()); | 4009 DeoptimizeIf(cc, instr->environment()); |
| 4017 | 4010 |
| 4018 // We know now that value is not a smi, so we can omit the check below. | 4011 // We know now that value is not a smi, so we can omit the check below. |
| 4019 check_needed = OMIT_SMI_CHECK; | 4012 check_needed = OMIT_SMI_CHECK; |
| 4020 } | 4013 } |
| 4021 } | 4014 } |
| 4022 } else if (representation.IsDouble()) { | 4015 } else if (representation.IsDouble()) { |
| 4023 ASSERT(transition.is_null()); | |
| 4024 ASSERT(access.IsInobject()); | 4016 ASSERT(access.IsInobject()); |
| 4017 ASSERT(!hinstr->has_transition()); |
| 4025 ASSERT(!hinstr->NeedsWriteBarrier()); | 4018 ASSERT(!hinstr->NeedsWriteBarrier()); |
| 4026 XMMRegister value = ToDoubleRegister(instr->value()); | 4019 XMMRegister value = ToDoubleRegister(instr->value()); |
| 4027 __ movsd(FieldOperand(object, offset), value); | 4020 __ movsd(FieldOperand(object, offset), value); |
| 4028 return; | 4021 return; |
| 4029 } | 4022 } |
| 4030 | 4023 |
| 4031 if (!transition.is_null()) { | 4024 if (hinstr->has_transition()) { |
| 4025 Handle<Map> transition = hinstr->transition_map(); |
| 4026 AddDeprecationDependency(transition); |
| 4032 if (!hinstr->NeedsWriteBarrierForMap()) { | 4027 if (!hinstr->NeedsWriteBarrierForMap()) { |
| 4033 __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); | 4028 __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); |
| 4034 } else { | 4029 } else { |
| 4035 Register temp = ToRegister(instr->temp()); | 4030 Register temp = ToRegister(instr->temp()); |
| 4036 __ Move(kScratchRegister, transition); | 4031 __ Move(kScratchRegister, transition); |
| 4037 __ movp(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister); | 4032 __ movp(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister); |
| 4038 // Update the write barrier for the map field. | 4033 // Update the write barrier for the map field. |
| 4039 __ RecordWriteField(object, | 4034 __ RecordWriteField(object, |
| 4040 HeapObject::kMapOffset, | 4035 HeapObject::kMapOffset, |
| 4041 kScratchRegister, | 4036 kScratchRegister, |
| (...skipping 1661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5703 __ bind(deferred->exit()); | 5698 __ bind(deferred->exit()); |
| 5704 __ bind(&done); | 5699 __ bind(&done); |
| 5705 } | 5700 } |
| 5706 | 5701 |
| 5707 | 5702 |
| 5708 #undef __ | 5703 #undef __ |
| 5709 | 5704 |
| 5710 } } // namespace v8::internal | 5705 } } // namespace v8::internal |
| 5711 | 5706 |
| 5712 #endif // V8_TARGET_ARCH_X64 | 5707 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |