| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 int Deoptimizer::patch_size() { | 40 int Deoptimizer::patch_size() { |
| 41 // Size of the code used to patch lazy bailout points. | 41 // Size of the code used to patch lazy bailout points. |
| 42 // Patching is done by Deoptimizer::DeoptimizeFunction. | 42 // Patching is done by Deoptimizer::DeoptimizeFunction. |
| 43 return 4 * kInstructionSize; | 43 return 4 * kInstructionSize; |
| 44 } | 44 } |
| 45 | 45 |
| 46 | 46 |
| 47 void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList( | |
| 48 JSFunction* function) { | |
| 49 Isolate* isolate = function->GetIsolate(); | |
| 50 HandleScope scope(isolate); | |
| 51 DisallowHeapAllocation no_allocation; | |
| 52 | 47 |
| 53 ASSERT(function->IsOptimized()); | 48 void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { |
| 54 ASSERT(function->FunctionsInFunctionListShareSameCode()); | |
| 55 | |
| 56 // Get the optimized code. | |
| 57 Code* code = function->code(); | |
| 58 | |
| 59 // The optimized code is going to be patched, so we cannot use it any more. | |
| 60 function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function"); | |
| 61 | |
| 62 // Invalidate the relocation information, as it will become invalid by the | 49 // Invalidate the relocation information, as it will become invalid by the |
| 63 // code patching below, and is not needed any more. | 50 // code patching below, and is not needed any more. |
| 64 code->InvalidateRelocation(); | 51 code->InvalidateRelocation(); |
| 65 | 52 |
| 66 // For each LLazyBailout instruction insert a call to the corresponding | 53 // For each LLazyBailout instruction insert a call to the corresponding |
| 67 // deoptimization entry. | 54 // deoptimization entry. |
| 68 DeoptimizationInputData* deopt_data = | 55 DeoptimizationInputData* deopt_data = |
| 69 DeoptimizationInputData::cast(code->deoptimization_data()); | 56 DeoptimizationInputData::cast(code->deoptimization_data()); |
| 70 Address code_start_address = code->instruction_start(); | 57 Address code_start_address = code->instruction_start(); |
| 71 #ifdef DEBUG | 58 #ifdef DEBUG |
| (...skipping 11 matching lines...) Expand all Loading... |
| 83 patcher.blr(ip0); | 70 patcher.blr(ip0); |
| 84 patcher.dc64(reinterpret_cast<intptr_t>(deopt_entry)); | 71 patcher.dc64(reinterpret_cast<intptr_t>(deopt_entry)); |
| 85 | 72 |
| 86 ASSERT((prev_call_address == NULL) || | 73 ASSERT((prev_call_address == NULL) || |
| 87 (call_address >= prev_call_address + patch_size())); | 74 (call_address >= prev_call_address + patch_size())); |
| 88 ASSERT(call_address + patch_size() <= code->instruction_end()); | 75 ASSERT(call_address + patch_size() <= code->instruction_end()); |
| 89 #ifdef DEBUG | 76 #ifdef DEBUG |
| 90 prev_call_address = call_address; | 77 prev_call_address = call_address; |
| 91 #endif | 78 #endif |
| 92 } | 79 } |
| 93 | |
| 94 // Add the deoptimizing code to the list. | |
| 95 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | |
| 96 DeoptimizerData* data = isolate->deoptimizer_data(); | |
| 97 node->set_next(data->deoptimizing_code_list_); | |
| 98 data->deoptimizing_code_list_ = node; | |
| 99 | |
| 100 // We might be in the middle of incremental marking with compaction. | |
| 101 // Tell collector to treat this code object in a special way and | |
| 102 // ignore all slots that might have been recorded on it. | |
| 103 isolate->heap()->mark_compact_collector()->InvalidateCode(code); | |
| 104 | |
| 105 ReplaceCodeForRelatedFunctions(function, code); | |
| 106 | |
| 107 if (FLAG_trace_deopt) { | |
| 108 PrintF("[forced deoptimization: "); | |
| 109 function->PrintName(); | |
| 110 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | |
| 111 } | |
| 112 } | 80 } |
| 113 | 81 |
| 114 | 82 |
| 115 // The back edge bookkeeping code matches the pattern: | 83 // The back edge bookkeeping code matches the pattern: |
| 116 // | 84 // |
| 117 // <decrement profiling counter> | 85 // <decrement profiling counter> |
| 118 // .. .. .. .. b.pl ok | 86 // .. .. .. .. b.pl ok |
| 119 // .. .. .. .. ldr x16, pc+<interrupt stub address> | 87 // .. .. .. .. ldr x16, pc+<interrupt stub address> |
| 120 // .. .. .. .. blr x16 | 88 // .. .. .. .. blr x16 |
| 121 // ok-label | 89 // ok-label |
| (...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 612 __ b(&done); | 580 __ b(&done); |
| 613 ASSERT(masm()->pc_offset() - start == table_entry_size_); | 581 ASSERT(masm()->pc_offset() - start == table_entry_size_); |
| 614 } | 582 } |
| 615 } | 583 } |
| 616 __ Bind(&done); | 584 __ Bind(&done); |
| 617 // TODO(all): We need to add some kind of assertion to verify that Tmp0() | 585 // TODO(all): We need to add some kind of assertion to verify that Tmp0() |
| 618 // is not clobbered by Push. | 586 // is not clobbered by Push. |
| 619 __ Push(masm()->Tmp0()); | 587 __ Push(masm()->Tmp0()); |
| 620 } | 588 } |
| 621 | 589 |
| 590 |
| 591 void FrameDescription::SetCallerPc(unsigned offset, intptr_t value) { |
| 592 SetFrameSlot(offset, value); |
| 593 } |
| 594 |
| 595 |
| 596 void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { |
| 597 SetFrameSlot(offset, value); |
| 598 } |
| 599 |
| 600 |
| 622 #undef __ | 601 #undef __ |
| 623 | 602 |
| 624 } } // namespace v8::internal | 603 } } // namespace v8::internal |
| OLD | NEW |