OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
86 } | 86 } |
87 #endif | 87 #endif |
88 last_pc_offset = pc_offset; | 88 last_pc_offset = pc_offset; |
89 if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) { | 89 if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) { |
90 last_pc_offset += gap_code_size; | 90 last_pc_offset += gap_code_size; |
91 CodePatcher patcher(code->instruction_start() + last_pc_offset, | 91 CodePatcher patcher(code->instruction_start() + last_pc_offset, |
92 call_size_in_words); | 92 call_size_in_words); |
93 Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry( | 93 Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry( |
94 deoptimization_index, Deoptimizer::LAZY); | 94 deoptimization_index, Deoptimizer::LAZY); |
95 patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE); | 95 patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE); |
96 #ifdef DEBUG | |
Kevin Millikin (Chromium)
2011/08/05 15:10:16
There must be a better fix than this. The patcher
Michael Starzinger
2011/08/08 09:31:25
I think making patch_size() giving the correct ans
| |
97 while (patcher.masm()->pc_offset() < patch_size()) { | |
98 patcher.masm()->bkpt(0); | |
99 } | |
100 ASSERT(patcher.masm()->pc_offset() == patch_size()); | |
101 #endif | |
96 last_pc_offset += patch_size(); | 102 last_pc_offset += patch_size(); |
97 } | 103 } |
98 } | 104 } |
99 | 105 |
100 | 106 |
101 #ifdef DEBUG | 107 #ifdef DEBUG |
102 // Destroy the code which is not supposed to be run again. | 108 // Destroy the code which is not supposed to be run again. |
103 int instructions = | 109 int instructions = |
104 (code->safepoint_table_offset() - last_pc_offset) / Assembler::kInstrSize; | 110 (code->safepoint_table_offset() - last_pc_offset) / Assembler::kInstrSize; |
105 CodePatcher destroyer(code->instruction_start() + last_pc_offset, | 111 CodePatcher destroyer(code->instruction_start() + last_pc_offset, |
(...skipping 656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
762 __ push(ip); | 768 __ push(ip); |
763 __ b(&done); | 769 __ b(&done); |
764 ASSERT(masm()->pc_offset() - start == table_entry_size_); | 770 ASSERT(masm()->pc_offset() - start == table_entry_size_); |
765 } | 771 } |
766 __ bind(&done); | 772 __ bind(&done); |
767 } | 773 } |
768 | 774 |
769 #undef __ | 775 #undef __ |
770 | 776 |
771 } } // namespace v8::internal | 777 } } // namespace v8::internal |
OLD | NEW |