| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 100 function_= Handle<Object>::null(); | 100 function_= Handle<Object>::null(); |
| 101 } | 101 } |
| 102 | 102 |
| 103 | 103 |
| 104 void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) { | 104 void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) { |
| 105 reinterpret_cast<PendingListNode*>(data)->Destroy(); | 105 reinterpret_cast<PendingListNode*>(data)->Destroy(); |
| 106 } | 106 } |
| 107 | 107 |
| 108 | 108 |
| 109 static bool IsOptimizable(JSFunction* function) { | 109 static bool IsOptimizable(JSFunction* function) { |
| 110 if (function->GetHeap()->InNewSpace(function)) return false; |
| 110 Code* code = function->code(); | 111 Code* code = function->code(); |
| 111 return code->kind() == Code::FUNCTION && code->optimizable(); | 112 return code->kind() == Code::FUNCTION && code->optimizable(); |
| 112 } | 113 } |
| 113 | 114 |
| 114 | 115 |
| 115 Atomic32 RuntimeProfiler::state_ = 0; | 116 Atomic32 RuntimeProfiler::state_ = 0; |
| 116 // TODO(isolates): Create the semaphore lazily and clean it up when no | 117 // TODO(isolates): Create the semaphore lazily and clean it up when no |
| 117 // longer required. | 118 // longer required. |
| 118 #ifdef ENABLE_LOGGING_AND_PROFILING | 119 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 119 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0); | 120 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 163 // See AlwaysFullCompiler (in compiler.cc) comment on why we need | 164 // See AlwaysFullCompiler (in compiler.cc) comment on why we need |
| 164 // Debug::has_break_points(). | 165 // Debug::has_break_points(). |
| 165 ASSERT(function->IsMarkedForLazyRecompilation()); | 166 ASSERT(function->IsMarkedForLazyRecompilation()); |
| 166 if (!FLAG_use_osr || | 167 if (!FLAG_use_osr || |
| 167 isolate_->debug()->has_break_points() || | 168 isolate_->debug()->has_break_points() || |
| 168 function->IsBuiltin()) { | 169 function->IsBuiltin()) { |
| 169 return; | 170 return; |
| 170 } | 171 } |
| 171 | 172 |
| 172 SharedFunctionInfo* shared = function->shared(); | 173 SharedFunctionInfo* shared = function->shared(); |
| 173 // If the code is not optimizable, don't try OSR. | 174 // If the code is not optimizable or references context slots, don't try OSR. |
| 174 if (!shared->code()->optimizable()) return; | 175 if (!shared->code()->optimizable() || !shared->allows_lazy_compilation()) { |
| 176 return; |
| 177 } |
| 175 | 178 |
| 176 // We are not prepared to do OSR for a function that already has an | 179 // We are not prepared to do OSR for a function that already has an |
| 177 // allocated arguments object. The optimized code would bypass it for | 180 // allocated arguments object. The optimized code would bypass it for |
| 178 // arguments accesses, which is unsound. Don't try OSR. | 181 // arguments accesses, which is unsound. Don't try OSR. |
| 179 if (shared->scope_info()->HasArgumentsShadow()) return; | 182 if (shared->scope_info()->HasArgumentsShadow()) return; |
| 180 | 183 |
| 181 // We're using on-stack replacement: patch the unoptimized code so that | 184 // We're using on-stack replacement: patch the unoptimized code so that |
| 182 // any back edge in any unoptimized frame will trigger on-stack | 185 // any back edge in any unoptimized frame will trigger on-stack |
| 183 // replacement for that frame. | 186 // replacement for that frame. |
| 184 if (FLAG_trace_osr) { | 187 if (FLAG_trace_osr) { |
| 185 PrintF("[patching stack checks in "); | 188 PrintF("[patching stack checks in "); |
| 186 function->PrintName(); | 189 function->PrintName(); |
| 187 PrintF(" for on-stack replacement]\n"); | 190 PrintF(" for on-stack replacement]\n"); |
| 188 } | 191 } |
| 189 | 192 |
| 190 // Get the stack check stub code object to match against. We aren't | 193 // Get the stack check stub code object to match against. We aren't |
| 191 // prepared to generate it, but we don't expect to have to. | 194 // prepared to generate it, but we don't expect to have to. |
| 192 StackCheckStub check_stub; | 195 StackCheckStub check_stub; |
| 193 Object* check_code; | 196 Object* check_code; |
| 194 MaybeObject* maybe_check_code = check_stub.TryGetCode(); | 197 MaybeObject* maybe_check_code = check_stub.TryGetCode(); |
| 195 if (maybe_check_code->ToObject(&check_code)) { | 198 if (maybe_check_code->ToObject(&check_code)) { |
| 196 Code* replacement_code = | 199 Code* replacement_code = |
| 197 isolate_->builtins()->builtin(Builtins::OnStackReplacement); | 200 isolate_->builtins()->builtin(Builtins::OnStackReplacement); |
| 198 Code* unoptimized_code = shared->code(); | 201 Code* unoptimized_code = shared->code(); |
| 199 // Iterate the unoptimized code and patch every stack check except at | 202 Deoptimizer::PatchStackCheckCode(unoptimized_code, |
| 200 // the function entry. This code assumes the function entry stack | 203 Code::cast(check_code), |
| 201 // check appears first i.e., is not deferred or otherwise reordered. | 204 replacement_code); |
| 202 bool first = true; | |
| 203 for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask); | |
| 204 !it.done(); | |
| 205 it.next()) { | |
| 206 RelocInfo* rinfo = it.rinfo(); | |
| 207 if (rinfo->target_address() == Code::cast(check_code)->entry()) { | |
| 208 if (first) { | |
| 209 first = false; | |
| 210 } else { | |
| 211 Deoptimizer::PatchStackCheckCode(rinfo, replacement_code); | |
| 212 } | |
| 213 } | |
| 214 } | |
| 215 } | 205 } |
| 216 } | 206 } |
| 217 | 207 |
| 218 | 208 |
| 219 void RuntimeProfiler::ClearSampleBuffer() { | 209 void RuntimeProfiler::ClearSampleBuffer() { |
| 220 memset(sampler_window_, 0, sizeof(sampler_window_)); | 210 memset(sampler_window_, 0, sizeof(sampler_window_)); |
| 221 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_)); | 211 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_)); |
| 222 } | 212 } |
| 223 | 213 |
| 224 | 214 |
| (...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 474 } else { | 464 } else { |
| 475 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); | 465 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); |
| 476 } | 466 } |
| 477 } | 467 } |
| 478 #endif | 468 #endif |
| 479 return false; | 469 return false; |
| 480 } | 470 } |
| 481 | 471 |
| 482 | 472 |
| 483 } } // namespace v8::internal | 473 } } // namespace v8::internal |
| OLD | NEW |