| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 127 function_= Handle<Object>::null(); | 127 function_= Handle<Object>::null(); |
| 128 } | 128 } |
| 129 | 129 |
| 130 | 130 |
| 131 void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) { | 131 void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) { |
| 132 reinterpret_cast<PendingListNode*>(data)->Destroy(); | 132 reinterpret_cast<PendingListNode*>(data)->Destroy(); |
| 133 } | 133 } |
| 134 | 134 |
| 135 | 135 |
| 136 static bool IsOptimizable(JSFunction* function) { | 136 static bool IsOptimizable(JSFunction* function) { |
| 137 if (Heap::InNewSpace(function)) return false; |
| 137 Code* code = function->code(); | 138 Code* code = function->code(); |
| 138 return code->kind() == Code::FUNCTION && code->optimizable(); | 139 return code->kind() == Code::FUNCTION && code->optimizable(); |
| 139 } | 140 } |
| 140 | 141 |
| 141 | 142 |
| 142 static void Optimize(JSFunction* function, bool eager, int delay) { | 143 static void Optimize(JSFunction* function, bool eager, int delay) { |
| 143 ASSERT(IsOptimizable(function)); | 144 ASSERT(IsOptimizable(function)); |
| 144 if (FLAG_trace_opt) { | 145 if (FLAG_trace_opt) { |
| 145 PrintF("[marking (%s) ", eager ? "eagerly" : "lazily"); | 146 PrintF("[marking (%s) ", eager ? "eagerly" : "lazily"); |
| 146 function->PrintName(); | 147 function->PrintName(); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 158 | 159 |
| 159 static void AttemptOnStackReplacement(JSFunction* function) { | 160 static void AttemptOnStackReplacement(JSFunction* function) { |
| 160 // See AlwaysFullCompiler (in compiler.cc) comment on why we need | 161 // See AlwaysFullCompiler (in compiler.cc) comment on why we need |
| 161 // Debug::has_break_points(). | 162 // Debug::has_break_points(). |
| 162 ASSERT(function->IsMarkedForLazyRecompilation()); | 163 ASSERT(function->IsMarkedForLazyRecompilation()); |
| 163 if (!FLAG_use_osr || Debug::has_break_points() || function->IsBuiltin()) { | 164 if (!FLAG_use_osr || Debug::has_break_points() || function->IsBuiltin()) { |
| 164 return; | 165 return; |
| 165 } | 166 } |
| 166 | 167 |
| 167 SharedFunctionInfo* shared = function->shared(); | 168 SharedFunctionInfo* shared = function->shared(); |
| 168 // If the code is not optimizable, don't try OSR. | 169 // If the code is not optimizable or references context slots, don't try OSR. |
| 169 if (!shared->code()->optimizable()) return; | 170 if (!shared->code()->optimizable() || !shared->allows_lazy_compilation()) { |
| 171 return; |
| 172 } |
| 170 | 173 |
| 171 // We are not prepared to do OSR for a function that already has an | 174 // We are not prepared to do OSR for a function that already has an |
| 172 // allocated arguments object. The optimized code would bypass it for | 175 // allocated arguments object. The optimized code would bypass it for |
| 173 // arguments accesses, which is unsound. Don't try OSR. | 176 // arguments accesses, which is unsound. Don't try OSR. |
| 174 if (shared->scope_info()->HasArgumentsShadow()) return; | 177 if (shared->scope_info()->HasArgumentsShadow()) return; |
| 175 | 178 |
| 176 // We're using on-stack replacement: patch the unoptimized code so that | 179 // We're using on-stack replacement: patch the unoptimized code so that |
| 177 // any back edge in any unoptimized frame will trigger on-stack | 180 // any back edge in any unoptimized frame will trigger on-stack |
| 178 // replacement for that frame. | 181 // replacement for that frame. |
| 179 if (FLAG_trace_osr) { | 182 if (FLAG_trace_osr) { |
| 180 PrintF("[patching stack checks in "); | 183 PrintF("[patching stack checks in "); |
| 181 function->PrintName(); | 184 function->PrintName(); |
| 182 PrintF(" for on-stack replacement]\n"); | 185 PrintF(" for on-stack replacement]\n"); |
| 183 } | 186 } |
| 184 | 187 |
| 185 // Get the stack check stub code object to match against. We aren't | 188 // Get the stack check stub code object to match against. We aren't |
| 186 // prepared to generate it, but we don't expect to have to. | 189 // prepared to generate it, but we don't expect to have to. |
| 187 StackCheckStub check_stub; | 190 StackCheckStub check_stub; |
| 188 Object* check_code; | 191 Object* check_code; |
| 189 MaybeObject* maybe_check_code = check_stub.TryGetCode(); | 192 MaybeObject* maybe_check_code = check_stub.TryGetCode(); |
| 190 if (maybe_check_code->ToObject(&check_code)) { | 193 if (maybe_check_code->ToObject(&check_code)) { |
| 191 Code* replacement_code = Builtins::builtin(Builtins::OnStackReplacement); | 194 Code* replacement_code = Builtins::builtin(Builtins::OnStackReplacement); |
| 192 Code* unoptimized_code = shared->code(); | 195 Code* unoptimized_code = shared->code(); |
| 193 // Iterate the unoptimized code and patch every stack check except at | 196 Deoptimizer::PatchStackCheckCode(unoptimized_code, |
| 194 // the function entry. This code assumes the function entry stack | 197 Code::cast(check_code), |
| 195 // check appears first i.e., is not deferred or otherwise reordered. | 198 replacement_code); |
| 196 bool first = true; | |
| 197 for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask); | |
| 198 !it.done(); | |
| 199 it.next()) { | |
| 200 RelocInfo* rinfo = it.rinfo(); | |
| 201 if (rinfo->target_address() == Code::cast(check_code)->entry()) { | |
| 202 if (first) { | |
| 203 first = false; | |
| 204 } else { | |
| 205 Deoptimizer::PatchStackCheckCode(rinfo, replacement_code); | |
| 206 } | |
| 207 } | |
| 208 } | |
| 209 } | 199 } |
| 210 } | 200 } |
| 211 | 201 |
| 212 | 202 |
| 213 static void ClearSampleBuffer() { | 203 static void ClearSampleBuffer() { |
| 214 for (int i = 0; i < kSamplerWindowSize; i++) { | 204 for (int i = 0; i < kSamplerWindowSize; i++) { |
| 215 sampler_window[i] = NULL; | 205 sampler_window[i] = NULL; |
| 216 sampler_window_weight[i] = 0; | 206 sampler_window_weight[i] = 0; |
| 217 } | 207 } |
| 218 } | 208 } |
| (...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 449 if (Top::WaitForJSState()) return true; | 439 if (Top::WaitForJSState()) return true; |
| 450 } | 440 } |
| 451 } | 441 } |
| 452 } | 442 } |
| 453 #endif | 443 #endif |
| 454 return false; | 444 return false; |
| 455 } | 445 } |
| 456 | 446 |
| 457 | 447 |
| 458 } } // namespace v8::internal | 448 } } // namespace v8::internal |
| OLD | NEW |