| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
| 8 | 8 |
| 9 #include "ia32/lithium-codegen-ia32.h" | 9 #include "ia32/lithium-codegen-ia32.h" |
| 10 #include "ic.h" | 10 #include "ic.h" |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 82 code->set_stack_slots(GetStackSlotCount()); | 82 code->set_stack_slots(GetStackSlotCount()); |
| 83 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 83 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 84 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); | 84 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); |
| 85 PopulateDeoptimizationData(code); | 85 PopulateDeoptimizationData(code); |
| 86 if (!info()->IsStub()) { | 86 if (!info()->IsStub()) { |
| 87 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | 87 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); |
| 88 } | 88 } |
| 89 } | 89 } |
| 90 | 90 |
| 91 | 91 |
| 92 void LCodeGen::Abort(BailoutReason reason) { | |
| 93 info()->set_bailout_reason(reason); | |
| 94 status_ = ABORTED; | |
| 95 } | |
| 96 | |
| 97 | |
| 98 #ifdef _MSC_VER | 92 #ifdef _MSC_VER |
| 99 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 93 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
| 100 const int kPageSize = 4 * KB; | 94 const int kPageSize = 4 * KB; |
| 101 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 95 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
| 102 __ mov(Operand(esp, offset), eax); | 96 __ mov(Operand(esp, offset), eax); |
| 103 } | 97 } |
| 104 } | 98 } |
| 105 #endif | 99 #endif |
| 106 | 100 |
| 107 | 101 |
| (...skipping 4290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4398 __ movsd(FieldOperand(object, offset), value); | 4392 __ movsd(FieldOperand(object, offset), value); |
| 4399 } else { | 4393 } else { |
| 4400 X87Register value = ToX87Register(instr->value()); | 4394 X87Register value = ToX87Register(instr->value()); |
| 4401 X87Mov(FieldOperand(object, offset), value); | 4395 X87Mov(FieldOperand(object, offset), value); |
| 4402 } | 4396 } |
| 4403 return; | 4397 return; |
| 4404 } | 4398 } |
| 4405 | 4399 |
| 4406 if (instr->hydrogen()->has_transition()) { | 4400 if (instr->hydrogen()->has_transition()) { |
| 4407 Handle<Map> transition = instr->hydrogen()->transition_map(); | 4401 Handle<Map> transition = instr->hydrogen()->transition_map(); |
| 4402 AddDeprecationDependency(transition); |
| 4408 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) { | 4403 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) { |
| 4409 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); | 4404 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); |
| 4410 } else { | 4405 } else { |
| 4411 Register temp = ToRegister(instr->temp()); | 4406 Register temp = ToRegister(instr->temp()); |
| 4412 Register temp_map = ToRegister(instr->temp_map()); | 4407 Register temp_map = ToRegister(instr->temp_map()); |
| 4413 __ mov(temp_map, transition); | 4408 __ mov(temp_map, transition); |
| 4414 __ mov(FieldOperand(object, HeapObject::kMapOffset), temp_map); | 4409 __ mov(FieldOperand(object, HeapObject::kMapOffset), temp_map); |
| 4415 // Update the write barrier for the map field. | 4410 // Update the write barrier for the map field. |
| 4416 __ RecordWriteField(object, | 4411 __ RecordWriteField(object, |
| 4417 HeapObject::kMapOffset, | 4412 HeapObject::kMapOffset, |
| (...skipping 2005 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6423 __ bind(deferred->exit()); | 6418 __ bind(deferred->exit()); |
| 6424 __ bind(&done); | 6419 __ bind(&done); |
| 6425 } | 6420 } |
| 6426 | 6421 |
| 6427 | 6422 |
| 6428 #undef __ | 6423 #undef __ |
| 6429 | 6424 |
| 6430 } } // namespace v8::internal | 6425 } } // namespace v8::internal |
| 6431 | 6426 |
| 6432 #endif // V8_TARGET_ARCH_IA32 | 6427 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |