OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5673 | 5673 |
5674 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5674 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5675 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5675 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
5676 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5676 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
5677 // needed return address), even though the implementation of LAZY and EAGER is | 5677 // needed return address), even though the implementation of LAZY and EAGER is |
5678 // now identical. When LAZY is eventually completely folded into EAGER, remove | 5678 // now identical. When LAZY is eventually completely folded into EAGER, remove |
5679 // the special case below. | 5679 // the special case below. |
5680 if (info()->IsStub() && type == Deoptimizer::EAGER) { | 5680 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
5681 type = Deoptimizer::LAZY; | 5681 type = Deoptimizer::LAZY; |
5682 } | 5682 } |
| 5683 |
| 5684 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); |
5683 DeoptimizeIf(al, instr->environment(), type); | 5685 DeoptimizeIf(al, instr->environment(), type); |
5684 } | 5686 } |
5685 | 5687 |
5686 | 5688 |
5687 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5689 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
5688 // Nothing to see here, move on! | 5690 // Nothing to see here, move on! |
5689 } | 5691 } |
5690 | 5692 |
5691 | 5693 |
5692 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5694 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5856 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5858 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5857 __ ldr(result, FieldMemOperand(scratch, | 5859 __ ldr(result, FieldMemOperand(scratch, |
5858 FixedArray::kHeaderSize - kPointerSize)); | 5860 FixedArray::kHeaderSize - kPointerSize)); |
5859 __ bind(&done); | 5861 __ bind(&done); |
5860 } | 5862 } |
5861 | 5863 |
5862 | 5864 |
5863 #undef __ | 5865 #undef __ |
5864 | 5866 |
5865 } } // namespace v8::internal | 5867 } } // namespace v8::internal |
OLD | NEW |