| Index: src/runtime-profiler.cc
|
| ===================================================================
|
| --- src/runtime-profiler.cc (revision 6800)
|
| +++ src/runtime-profiler.cc (working copy)
|
| @@ -134,6 +134,7 @@
|
|
|
|
|
| static bool IsOptimizable(JSFunction* function) {
|
| + if (Heap::InNewSpace(function)) return false;
|
| Code* code = function->code();
|
| return code->kind() == Code::FUNCTION && code->optimizable();
|
| }
|
| @@ -165,8 +166,10 @@
|
| }
|
|
|
| SharedFunctionInfo* shared = function->shared();
|
| - // If the code is not optimizable, don't try OSR.
|
| - if (!shared->code()->optimizable()) return;
|
| + // If the code is not optimizable or references context slots, don't try OSR.
|
| + if (!shared->code()->optimizable() || !shared->allows_lazy_compilation()) {
|
| + return;
|
| + }
|
|
|
| // We are not prepared to do OSR for a function that already has an
|
| // allocated arguments object. The optimized code would bypass it for
|
| @@ -190,22 +193,9 @@
|
| if (maybe_check_code->ToObject(&check_code)) {
|
| Code* replacement_code = Builtins::builtin(Builtins::OnStackReplacement);
|
| Code* unoptimized_code = shared->code();
|
| - // Iterate the unoptimized code and patch every stack check except at
|
| - // the function entry. This code assumes the function entry stack
|
| - // check appears first i.e., is not deferred or otherwise reordered.
|
| - bool first = true;
|
| - for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask);
|
| - !it.done();
|
| - it.next()) {
|
| - RelocInfo* rinfo = it.rinfo();
|
| - if (rinfo->target_address() == Code::cast(check_code)->entry()) {
|
| - if (first) {
|
| - first = false;
|
| - } else {
|
| - Deoptimizer::PatchStackCheckCode(rinfo, replacement_code);
|
| - }
|
| - }
|
| - }
|
| + Deoptimizer::PatchStackCheckCode(unoptimized_code,
|
| + Code::cast(check_code),
|
| + replacement_code);
|
| }
|
| }
|
|
|
|
|