| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 929 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 940 | 940 |
| 941 void LCodeGen::DeoptimizeIf(Condition cc, | 941 void LCodeGen::DeoptimizeIf(Condition cc, |
| 942 LEnvironment* environment) { | 942 LEnvironment* environment) { |
| 943 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 943 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 944 ? Deoptimizer::LAZY | 944 ? Deoptimizer::LAZY |
| 945 : Deoptimizer::EAGER; | 945 : Deoptimizer::EAGER; |
| 946 DeoptimizeIf(cc, environment, bailout_type); | 946 DeoptimizeIf(cc, environment, bailout_type); |
| 947 } | 947 } |
| 948 | 948 |
| 949 | 949 |
| 950 void LCodeGen::SoftDeoptimize(LEnvironment* environment) { | |
| 951 ASSERT(!info()->IsStub()); | |
| 952 DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT); | |
| 953 } | |
| 954 | |
| 955 | |
| 956 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 950 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
| 957 ZoneList<Handle<Map> > maps(1, zone()); | 951 ZoneList<Handle<Map> > maps(1, zone()); |
| 958 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 952 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 959 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 953 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
| 960 RelocInfo::Mode mode = it.rinfo()->rmode(); | 954 RelocInfo::Mode mode = it.rinfo()->rmode(); |
| 961 if (mode == RelocInfo::EMBEDDED_OBJECT && | 955 if (mode == RelocInfo::EMBEDDED_OBJECT && |
| 962 it.rinfo()->target_object()->IsMap()) { | 956 it.rinfo()->target_object()->IsMap()) { |
| 963 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | 957 Handle<Map> map(Map::cast(it.rinfo()->target_object())); |
| 964 if (map->CanTransition()) { | 958 if (map->CanTransition()) { |
| 965 maps.Add(map, zone()); | 959 maps.Add(map, zone()); |
| (...skipping 5322 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6288 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 6282 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 6289 EnsureSpaceForLazyDeopt(); | 6283 EnsureSpaceForLazyDeopt(); |
| 6290 ASSERT(instr->HasEnvironment()); | 6284 ASSERT(instr->HasEnvironment()); |
| 6291 LEnvironment* env = instr->environment(); | 6285 LEnvironment* env = instr->environment(); |
| 6292 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6286 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 6293 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 6287 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 6294 } | 6288 } |
| 6295 | 6289 |
| 6296 | 6290 |
| 6297 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 6291 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 6298 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 6292 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 6299 SoftDeoptimize(instr->environment()); | 6293 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| 6300 } else { | 6294 // needed return address), even though the implementation of LAZY and EAGER is |
| 6301 DeoptimizeIf(no_condition, instr->environment()); | 6295 // now identical. When LAZY is eventually completely folded into EAGER, remove |
| 6296 // the special case below. |
| 6297 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 6298 type = Deoptimizer::LAZY; |
| 6302 } | 6299 } |
| 6300 DeoptimizeIf(no_condition, instr->environment(), type); |
| 6303 } | 6301 } |
| 6304 | 6302 |
| 6305 | 6303 |
| 6306 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 6304 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 6307 // Nothing to see here, move on! | 6305 // Nothing to see here, move on! |
| 6308 } | 6306 } |
| 6309 | 6307 |
| 6310 | 6308 |
| 6311 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 6309 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
| 6312 LOperand* obj = instr->object(); | 6310 LOperand* obj = instr->object(); |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6505 FixedArray::kHeaderSize - kPointerSize)); | 6503 FixedArray::kHeaderSize - kPointerSize)); |
| 6506 __ bind(&done); | 6504 __ bind(&done); |
| 6507 } | 6505 } |
| 6508 | 6506 |
| 6509 | 6507 |
| 6510 #undef __ | 6508 #undef __ |
| 6511 | 6509 |
| 6512 } } // namespace v8::internal | 6510 } } // namespace v8::internal |
| 6513 | 6511 |
| 6514 #endif // V8_TARGET_ARCH_IA32 | 6512 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |