OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
44 int Deoptimizer::patch_size() { | 44 int Deoptimizer::patch_size() { |
45 return Assembler::kCallSequenceLength; | 45 return Assembler::kCallSequenceLength; |
46 } | 46 } |
47 | 47 |
48 | 48 |
49 void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { | 49 void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { |
50 // Invalidate the relocation information, as it will become invalid by the | 50 // Invalidate the relocation information, as it will become invalid by the |
51 // code patching below, and is not needed any more. | 51 // code patching below, and is not needed any more. |
52 code->InvalidateRelocation(); | 52 code->InvalidateRelocation(); |
53 | 53 |
| 54 if (FLAG_zap_code_space) { |
| 55 // Fail hard and early if we enter this code object again. |
| 56 byte* pointer = code->FindCodeAgeSequence(); |
| 57 if (pointer != NULL) { |
| 58 pointer += kNoCodeAgeSequenceLength; |
| 59 } else { |
| 60 pointer = code->instruction_start(); |
| 61 } |
| 62 CodePatcher patcher(pointer, 1); |
| 63 patcher.masm()->int3(); |
| 64 |
| 65 DeoptimizationInputData* data = |
| 66 DeoptimizationInputData::cast(code->deoptimization_data()); |
| 67 int osr_offset = data->OsrPcOffset()->value(); |
| 68 if (osr_offset > 0) { |
| 69 CodePatcher osr_patcher(code->instruction_start() + osr_offset, 1); |
| 70 osr_patcher.masm()->int3(); |
| 71 } |
| 72 } |
| 73 |
54 // For each LLazyBailout instruction insert a absolute call to the | 74 // For each LLazyBailout instruction insert a absolute call to the |
55 // corresponding deoptimization entry, or a short call to an absolute | 75 // corresponding deoptimization entry, or a short call to an absolute |
56 // jump if space is short. The absolute jumps are put in a table just | 76 // jump if space is short. The absolute jumps are put in a table just |
57 // before the safepoint table (space was allocated there when the Code | 77 // before the safepoint table (space was allocated there when the Code |
58 // object was created, if necessary). | 78 // object was created, if necessary). |
59 | 79 |
60 Address instruction_start = code->instruction_start(); | 80 Address instruction_start = code->instruction_start(); |
61 #ifdef DEBUG | 81 #ifdef DEBUG |
62 Address prev_call_address = NULL; | 82 Address prev_call_address = NULL; |
63 #endif | 83 #endif |
(...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
334 SetFrameSlot(offset, value); | 354 SetFrameSlot(offset, value); |
335 } | 355 } |
336 | 356 |
337 | 357 |
338 #undef __ | 358 #undef __ |
339 | 359 |
340 | 360 |
341 } } // namespace v8::internal | 361 } } // namespace v8::internal |
342 | 362 |
343 #endif // V8_TARGET_ARCH_X64 | 363 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |