OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 985 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
996 | 996 |
997 void LCodeGen::DeoptimizeIf(Condition cc, | 997 void LCodeGen::DeoptimizeIf(Condition cc, |
998 LEnvironment* environment) { | 998 LEnvironment* environment) { |
999 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 999 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
1000 ? Deoptimizer::LAZY | 1000 ? Deoptimizer::LAZY |
1001 : Deoptimizer::EAGER; | 1001 : Deoptimizer::EAGER; |
1002 DeoptimizeIf(cc, environment, bailout_type); | 1002 DeoptimizeIf(cc, environment, bailout_type); |
1003 } | 1003 } |
1004 | 1004 |
1005 | 1005 |
1006 void LCodeGen::SoftDeoptimize(LEnvironment* environment) { | |
1007 ASSERT(!info()->IsStub()); | |
1008 DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT); | |
1009 } | |
1010 | |
1011 | |
1012 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 1006 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
1013 ZoneList<Handle<Map> > maps(1, zone()); | 1007 ZoneList<Handle<Map> > maps(1, zone()); |
1014 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 1008 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
1015 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 1009 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
1016 RelocInfo::Mode mode = it.rinfo()->rmode(); | 1010 RelocInfo::Mode mode = it.rinfo()->rmode(); |
1017 if (mode == RelocInfo::EMBEDDED_OBJECT && | 1011 if (mode == RelocInfo::EMBEDDED_OBJECT && |
1018 it.rinfo()->target_object()->IsMap()) { | 1012 it.rinfo()->target_object()->IsMap()) { |
1019 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | 1013 Handle<Map> map(Map::cast(it.rinfo()->target_object())); |
1020 if (map->CanTransition()) { | 1014 if (map->CanTransition()) { |
1021 maps.Add(map, zone()); | 1015 maps.Add(map, zone()); |
(...skipping 5294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6316 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 6310 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
6317 EnsureSpaceForLazyDeopt(); | 6311 EnsureSpaceForLazyDeopt(); |
6318 ASSERT(instr->HasEnvironment()); | 6312 ASSERT(instr->HasEnvironment()); |
6319 LEnvironment* env = instr->environment(); | 6313 LEnvironment* env = instr->environment(); |
6320 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6314 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
6321 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 6315 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
6322 } | 6316 } |
6323 | 6317 |
6324 | 6318 |
6325 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 6319 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
6326 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 6320 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
6327 SoftDeoptimize(instr->environment()); | 6321 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
6328 } else { | 6322 // needed return address), even though the implementation of LAZY and EAGER is |
6329 DeoptimizeIf(no_condition, instr->environment()); | 6323 // now identical. When LAZY is eventually completely folded into EAGER, remove |
| 6324 // the special case below. |
| 6325 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 6326 type = Deoptimizer::LAZY; |
6330 } | 6327 } |
| 6328 DeoptimizeIf(no_condition, instr->environment(), type); |
6331 } | 6329 } |
6332 | 6330 |
6333 | 6331 |
6334 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 6332 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
6335 // Nothing to see here, move on! | 6333 // Nothing to see here, move on! |
6336 } | 6334 } |
6337 | 6335 |
6338 | 6336 |
6339 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 6337 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
6340 PushSafepointRegistersScope scope(this); | 6338 PushSafepointRegistersScope scope(this); |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6501 FixedArray::kHeaderSize - kPointerSize)); | 6499 FixedArray::kHeaderSize - kPointerSize)); |
6502 __ bind(&done); | 6500 __ bind(&done); |
6503 } | 6501 } |
6504 | 6502 |
6505 | 6503 |
6506 #undef __ | 6504 #undef __ |
6507 | 6505 |
6508 } } // namespace v8::internal | 6506 } } // namespace v8::internal |
6509 | 6507 |
6510 #endif // V8_TARGET_ARCH_IA32 | 6508 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |