| OLD | NEW |
| 1 | 1 |
| 2 // Copyright 2011 the V8 project authors. All rights reserved. | 2 // Copyright 2011 the V8 project authors. All rights reserved. |
| 3 // Redistribution and use in source and binary forms, with or without | 3 // Redistribution and use in source and binary forms, with or without |
| 4 // modification, are permitted provided that the following conditions are | 4 // modification, are permitted provided that the following conditions are |
| 5 // met: | 5 // met: |
| 6 // | 6 // |
| 7 // * Redistributions of source code must retain the above copyright | 7 // * Redistributions of source code must retain the above copyright |
| 8 // notice, this list of conditions and the following disclaimer. | 8 // notice, this list of conditions and the following disclaimer. |
| 9 // * Redistributions in binary form must reproduce the above | 9 // * Redistributions in binary form must reproduce the above |
| 10 // copyright notice, this list of conditions and the following | 10 // copyright notice, this list of conditions and the following |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 142 if (Assembler::IsAddImmediate( | 142 if (Assembler::IsAddImmediate( |
| 143 Assembler::instr_at(pc_after - 6 * kInstrSize))) { | 143 Assembler::instr_at(pc_after - 6 * kInstrSize))) { |
| 144 Code* osr_builtin = | 144 Code* osr_builtin = |
| 145 isolate->builtins()->builtin(Builtins::kOnStackReplacement); | 145 isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
| 146 ASSERT(reinterpret_cast<uint32_t>( | 146 ASSERT(reinterpret_cast<uint32_t>( |
| 147 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == | 147 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == |
| 148 reinterpret_cast<uint32_t>(osr_builtin->entry())); | 148 reinterpret_cast<uint32_t>(osr_builtin->entry())); |
| 149 return PATCHED_FOR_OSR; | 149 return PATCHED_FOR_OSR; |
| 150 } else { | 150 } else { |
| 151 // Get the interrupt stub code object to match against from cache. | 151 // Get the interrupt stub code object to match against from cache. |
| 152 Code* interrupt_code = NULL; | 152 Code* interrupt_builtin = |
| 153 InterruptStub stub; | 153 isolate->builtins()->builtin(Builtins::kInterruptCheck); |
| 154 if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); | |
| 155 ASSERT(reinterpret_cast<uint32_t>( | 154 ASSERT(reinterpret_cast<uint32_t>( |
| 156 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == | 155 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == |
| 157 reinterpret_cast<uint32_t>(interrupt_code->entry())); | 156 reinterpret_cast<uint32_t>(interrupt_builtin->entry())); |
| 158 return NOT_PATCHED; | 157 return NOT_PATCHED; |
| 159 } | 158 } |
| 160 } | 159 } |
| 161 #endif // DEBUG | 160 #endif // DEBUG |
| 162 | 161 |
| 163 | 162 |
| 164 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 163 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
| 165 ByteArray* translations = data->TranslationByteArray(); | 164 ByteArray* translations = data->TranslationByteArray(); |
| 166 int length = data->DeoptCount(); | 165 int length = data->DeoptCount(); |
| 167 for (int i = 0; i < length; i++) { | 166 for (int i = 0; i < length; i++) { |
| (...skipping 446 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 614 | 613 |
| 615 void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { | 614 void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { |
| 616 SetFrameSlot(offset, value); | 615 SetFrameSlot(offset, value); |
| 617 } | 616 } |
| 618 | 617 |
| 619 | 618 |
| 620 #undef __ | 619 #undef __ |
| 621 | 620 |
| 622 | 621 |
| 623 } } // namespace v8::internal | 622 } } // namespace v8::internal |
| OLD | NEW |