| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 142 | 142 |
| 143 if (FLAG_concurrent_recompilation && !isolate_->bootstrapper()->IsActive()) { | 143 if (FLAG_concurrent_recompilation && !isolate_->bootstrapper()->IsActive()) { |
| 144 if (FLAG_concurrent_osr && | 144 if (FLAG_concurrent_osr && |
| 145 isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) { | 145 isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) { |
| 146 // Do not attempt regular recompilation if we already queued this for OSR. | 146 // Do not attempt regular recompilation if we already queued this for OSR. |
| 147 // TODO(yangguo): This is necessary so that we don't install optimized | 147 // TODO(yangguo): This is necessary so that we don't install optimized |
| 148 // code on a function that is already optimized, since OSR and regular | 148 // code on a function that is already optimized, since OSR and regular |
| 149 // recompilation race. This goes away as soon as OSR becomes one-shot. | 149 // recompilation race. This goes away as soon as OSR becomes one-shot. |
| 150 return; | 150 return; |
| 151 } | 151 } |
| 152 ASSERT(!function->IsMarkedForInstallingRecompiledCode()); | |
| 153 ASSERT(!function->IsInRecompileQueue()); | 152 ASSERT(!function->IsInRecompileQueue()); |
| 154 function->MarkForConcurrentRecompilation(); | 153 function->MarkForConcurrentRecompilation(); |
| 155 } else { | 154 } else { |
| 156 // The next call to the function will trigger optimization. | 155 // The next call to the function will trigger optimization. |
| 157 function->MarkForLazyRecompilation(); | 156 function->MarkForLazyRecompilation(); |
| 158 } | 157 } |
| 159 } | 158 } |
| 160 | 159 |
| 161 | 160 |
| 162 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { | 161 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 220 sampler_window_position_ = (sampler_window_position_ + 1) & | 219 sampler_window_position_ = (sampler_window_position_ + 1) & |
| 221 (kSamplerWindowSize - 1); | 220 (kSamplerWindowSize - 1); |
| 222 } | 221 } |
| 223 | 222 |
| 224 | 223 |
| 225 void RuntimeProfiler::OptimizeNow() { | 224 void RuntimeProfiler::OptimizeNow() { |
| 226 HandleScope scope(isolate_); | 225 HandleScope scope(isolate_); |
| 227 | 226 |
| 228 if (isolate_->DebuggerHasBreakPoints()) return; | 227 if (isolate_->DebuggerHasBreakPoints()) return; |
| 229 | 228 |
| 230 if (FLAG_concurrent_recompilation) { | |
| 231 // Take this as opportunity to process the optimizing compiler thread's | |
| 232 // output queue so that it does not unnecessarily keep objects alive. | |
| 233 isolate_->optimizing_compiler_thread()->InstallOptimizedFunctions(); | |
| 234 } | |
| 235 | |
| 236 DisallowHeapAllocation no_gc; | 229 DisallowHeapAllocation no_gc; |
| 237 | 230 |
| 238 // Run through the JavaScript frames and collect them. If we already | 231 // Run through the JavaScript frames and collect them. If we already |
| 239 // have a sample of the function, we mark it for optimizations | 232 // have a sample of the function, we mark it for optimizations |
| 240 // (eagerly or lazily). | 233 // (eagerly or lazily). |
| 241 JSFunction* samples[kSamplerFrameCount]; | 234 JSFunction* samples[kSamplerFrameCount]; |
| 242 int sample_count = 0; | 235 int sample_count = 0; |
| 243 int frame_count = 0; | 236 int frame_count = 0; |
| 244 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count | 237 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count |
| 245 : kSamplerFrameCount; | 238 : kSamplerFrameCount; |
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 436 | 429 |
| 437 | 430 |
| 438 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | 431 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { |
| 439 for (int i = 0; i < kSamplerWindowSize; i++) { | 432 for (int i = 0; i < kSamplerWindowSize; i++) { |
| 440 visitor->VisitPointer(&sampler_window_[i]); | 433 visitor->VisitPointer(&sampler_window_[i]); |
| 441 } | 434 } |
| 442 } | 435 } |
| 443 | 436 |
| 444 | 437 |
| 445 } } // namespace v8::internal | 438 } } // namespace v8::internal |
| OLD | NEW |