OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
245 // call <on-stack replacment> | 245 // call <on-stack replacment> |
246 // test eax, <loop nesting depth> | 246 // test eax, <loop nesting depth> |
247 // ok: | 247 // ok: |
248 ASSERT(*(call_target_address - 3) == 0x73 && // jae | 248 ASSERT(*(call_target_address - 3) == 0x73 && // jae |
249 *(call_target_address - 2) == 0x07 && // offset | 249 *(call_target_address - 2) == 0x07 && // offset |
250 *(call_target_address - 1) == 0xe8); // call | 250 *(call_target_address - 1) == 0xe8); // call |
251 *(call_target_address - 3) = 0x90; // nop | 251 *(call_target_address - 3) = 0x90; // nop |
252 *(call_target_address - 2) = 0x90; // nop | 252 *(call_target_address - 2) = 0x90; // nop |
253 Assembler::set_target_address_at(call_target_address, | 253 Assembler::set_target_address_at(call_target_address, |
254 replacement_code->entry()); | 254 replacement_code->entry()); |
255 // TODO(gc) ISOLATES MERGE | 255 |
256 HEAP->incremental_marking()->RecordWrite(unoptimized_code, replacement_code); | 256 // TODO(gc) we are not compacting code space. |
| 257 unoptimized_code->GetHeap()->incremental_marking()->RecordWrite( |
| 258 unoptimized_code, NULL, replacement_code); |
257 } | 259 } |
258 | 260 |
259 | 261 |
260 void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, | 262 void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, |
261 Code* check_code, | 263 Code* check_code, |
262 Code* replacement_code) { | 264 Code* replacement_code) { |
263 Address call_target_address = pc_after - kIntSize; | 265 Address call_target_address = pc_after - kIntSize; |
264 ASSERT(replacement_code->entry() == | 266 ASSERT(replacement_code->entry() == |
265 Assembler::target_address_at(call_target_address)); | 267 Assembler::target_address_at(call_target_address)); |
266 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 268 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
(...skipping 504 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
771 } | 773 } |
772 __ bind(&done); | 774 __ bind(&done); |
773 } | 775 } |
774 | 776 |
775 #undef __ | 777 #undef __ |
776 | 778 |
777 | 779 |
778 } } // namespace v8::internal | 780 } } // namespace v8::internal |
779 | 781 |
780 #endif // V8_TARGET_ARCH_IA32 | 782 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |