Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(55)

Side by Side Diff: src/optimizing-compile-dispatcher.cc

Issue 1773593002: [compiler] Remove support for concurrent OSR. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix release builds. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/optimizing-compile-dispatcher.h ('k') | src/ppc/builtins-ppc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/optimizing-compile-dispatcher.h" 5 #include "src/optimizing-compile-dispatcher.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/full-codegen/full-codegen.h" 8 #include "src/full-codegen/full-codegen.h"
9 #include "src/isolate.h" 9 #include "src/isolate.h"
10 #include "src/tracing/trace-event.h" 10 #include "src/tracing/trace-event.h"
11 #include "src/v8.h" 11 #include "src/v8.h"
12 12
13 namespace v8 { 13 namespace v8 {
14 namespace internal { 14 namespace internal {
15 15
16 namespace { 16 namespace {
17 17
18 void DisposeOptimizedCompileJob(OptimizedCompileJob* job, 18 void DisposeOptimizedCompileJob(OptimizedCompileJob* job,
19 bool restore_function_code) { 19 bool restore_function_code) {
20 // The recompile job is allocated in the CompilationInfo's zone. 20 // The recompile job is allocated in the CompilationInfo's zone.
21 CompilationInfo* info = job->info(); 21 CompilationInfo* info = job->info();
22 if (restore_function_code) { 22 if (restore_function_code) {
23 if (info->is_osr()) { 23 Handle<JSFunction> function = info->closure();
24 if (!job->IsWaitingForInstall()) { 24 function->ReplaceCode(function->shared()->code());
25 // Remove stack check that guards OSR entry on original code.
26 Handle<Code> code = info->unoptimized_code();
27 uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
28 BackEdgeTable::RemoveStackCheck(code, offset);
29 }
30 } else {
31 Handle<JSFunction> function = info->closure();
32 function->ReplaceCode(function->shared()->code());
33 }
34 } 25 }
35 delete info; 26 delete info;
36 } 27 }
37 28
38 } // namespace 29 } // namespace
39 30
40 31
41 class OptimizingCompileDispatcher::CompileTask : public v8::Task { 32 class OptimizingCompileDispatcher::CompileTask : public v8::Task {
42 public: 33 public:
43 explicit CompileTask(Isolate* isolate) : isolate_(isolate) { 34 explicit CompileTask(Isolate* isolate) : isolate_(isolate) {
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
85 76
86 OptimizingCompileDispatcher::~OptimizingCompileDispatcher() { 77 OptimizingCompileDispatcher::~OptimizingCompileDispatcher() {
87 #ifdef DEBUG 78 #ifdef DEBUG
88 { 79 {
89 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_); 80 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
90 DCHECK_EQ(0, ref_count_); 81 DCHECK_EQ(0, ref_count_);
91 } 82 }
92 #endif 83 #endif
93 DCHECK_EQ(0, input_queue_length_); 84 DCHECK_EQ(0, input_queue_length_);
94 DeleteArray(input_queue_); 85 DeleteArray(input_queue_);
95 if (FLAG_concurrent_osr) {
96 #ifdef DEBUG
97 for (int i = 0; i < osr_buffer_capacity_; i++) {
98 CHECK_NULL(osr_buffer_[i]);
99 }
100 #endif
101 DeleteArray(osr_buffer_);
102 }
103 } 86 }
104 87
105 88
106 OptimizedCompileJob* OptimizingCompileDispatcher::NextInput( 89 OptimizedCompileJob* OptimizingCompileDispatcher::NextInput(
107 bool check_if_flushing) { 90 bool check_if_flushing) {
108 base::LockGuard<base::Mutex> access_input_queue_(&input_queue_mutex_); 91 base::LockGuard<base::Mutex> access_input_queue_(&input_queue_mutex_);
109 if (input_queue_length_ == 0) return NULL; 92 if (input_queue_length_ == 0) return NULL;
110 OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)]; 93 OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)];
111 DCHECK_NOT_NULL(job); 94 DCHECK_NOT_NULL(job);
112 input_queue_shift_ = InputQueueIndex(1); 95 input_queue_shift_ = InputQueueIndex(1);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
152 } 135 }
153 136
154 // OSR jobs are dealt with separately. 137 // OSR jobs are dealt with separately.
155 if (!job->info()->is_osr()) { 138 if (!job->info()->is_osr()) {
156 DisposeOptimizedCompileJob(job, restore_function_code); 139 DisposeOptimizedCompileJob(job, restore_function_code);
157 } 140 }
158 } 141 }
159 } 142 }
160 143
161 144
162 void OptimizingCompileDispatcher::FlushOsrBuffer(bool restore_function_code) {
163 for (int i = 0; i < osr_buffer_capacity_; i++) {
164 if (osr_buffer_[i] != NULL) {
165 DisposeOptimizedCompileJob(osr_buffer_[i], restore_function_code);
166 osr_buffer_[i] = NULL;
167 }
168 }
169 }
170
171
172 void OptimizingCompileDispatcher::Flush() { 145 void OptimizingCompileDispatcher::Flush() {
173 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH)); 146 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH));
174 if (FLAG_block_concurrent_recompilation) Unblock(); 147 if (FLAG_block_concurrent_recompilation) Unblock();
175 { 148 {
176 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_); 149 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
177 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_); 150 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
178 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE)); 151 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE));
179 } 152 }
180 FlushOutputQueue(true); 153 FlushOutputQueue(true);
181 if (FLAG_concurrent_osr) FlushOsrBuffer(true);
182 if (FLAG_trace_concurrent_recompilation) { 154 if (FLAG_trace_concurrent_recompilation) {
183 PrintF(" ** Flushed concurrent recompilation queues.\n"); 155 PrintF(" ** Flushed concurrent recompilation queues.\n");
184 } 156 }
185 } 157 }
186 158
187 159
188 void OptimizingCompileDispatcher::Stop() { 160 void OptimizingCompileDispatcher::Stop() {
189 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH)); 161 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH));
190 if (FLAG_block_concurrent_recompilation) Unblock(); 162 if (FLAG_block_concurrent_recompilation) Unblock();
191 { 163 {
192 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_); 164 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
193 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_); 165 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
194 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE)); 166 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE));
195 } 167 }
196 168
197 if (recompilation_delay_ != 0) { 169 if (recompilation_delay_ != 0) {
198 // At this point the optimizing compiler thread's event loop has stopped. 170 // At this point the optimizing compiler thread's event loop has stopped.
199 // There is no need for a mutex when reading input_queue_length_. 171 // There is no need for a mutex when reading input_queue_length_.
200 while (input_queue_length_ > 0) CompileNext(NextInput()); 172 while (input_queue_length_ > 0) CompileNext(NextInput());
201 InstallOptimizedFunctions(); 173 InstallOptimizedFunctions();
202 } else { 174 } else {
203 FlushOutputQueue(false); 175 FlushOutputQueue(false);
204 } 176 }
205
206 if (FLAG_concurrent_osr) FlushOsrBuffer(false);
207
208 if ((FLAG_trace_osr || FLAG_trace_concurrent_recompilation) &&
209 FLAG_concurrent_osr) {
210 PrintF("[COSR hit rate %d / %d]\n", osr_hits_, osr_attempts_);
211 }
212 } 177 }
213 178
214 179
215 void OptimizingCompileDispatcher::InstallOptimizedFunctions() { 180 void OptimizingCompileDispatcher::InstallOptimizedFunctions() {
216 HandleScope handle_scope(isolate_); 181 HandleScope handle_scope(isolate_);
217 182
218 for (;;) { 183 for (;;) {
219 OptimizedCompileJob* job = NULL; 184 OptimizedCompileJob* job = NULL;
220 { 185 {
221 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_); 186 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_);
222 if (output_queue_.empty()) return; 187 if (output_queue_.empty()) return;
223 job = output_queue_.front(); 188 job = output_queue_.front();
224 output_queue_.pop(); 189 output_queue_.pop();
225 } 190 }
226 CompilationInfo* info = job->info(); 191 CompilationInfo* info = job->info();
227 Handle<JSFunction> function(*info->closure()); 192 Handle<JSFunction> function(*info->closure());
228 if (info->is_osr()) { 193 if (function->IsOptimized()) {
229 if (FLAG_trace_osr) { 194 if (FLAG_trace_concurrent_recompilation) {
230 PrintF("[COSR - "); 195 PrintF(" ** Aborting compilation for ");
231 function->ShortPrint(); 196 function->ShortPrint();
232 PrintF(" is ready for install and entry at AST id %d]\n", 197 PrintF(" as it has already been optimized.\n");
233 info->osr_ast_id().ToInt());
234 } 198 }
235 job->WaitForInstall(); 199 DisposeOptimizedCompileJob(job, false);
236 // Remove stack check that guards OSR entry on original code.
237 Handle<Code> code = info->unoptimized_code();
238 uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
239 BackEdgeTable::RemoveStackCheck(code, offset);
240 } else { 200 } else {
241 if (function->IsOptimized()) { 201 MaybeHandle<Code> code = Compiler::GetConcurrentlyOptimizedCode(job);
242 if (FLAG_trace_concurrent_recompilation) { 202 function->ReplaceCode(code.is_null() ? function->shared()->code()
243 PrintF(" ** Aborting compilation for "); 203 : *code.ToHandleChecked());
244 function->ShortPrint();
245 PrintF(" as it has already been optimized.\n");
246 }
247 DisposeOptimizedCompileJob(job, false);
248 } else {
249 MaybeHandle<Code> code = Compiler::GetConcurrentlyOptimizedCode(job);
250 function->ReplaceCode(code.is_null() ? function->shared()->code()
251 : *code.ToHandleChecked());
252 }
253 } 204 }
254 } 205 }
255 } 206 }
256 207
257 208
258 void OptimizingCompileDispatcher::QueueForOptimization( 209 void OptimizingCompileDispatcher::QueueForOptimization(
259 OptimizedCompileJob* job) { 210 OptimizedCompileJob* job) {
260 DCHECK(IsQueueAvailable()); 211 DCHECK(IsQueueAvailable());
261 CompilationInfo* info = job->info(); 212 {
262 if (info->is_osr()) {
263 osr_attempts_++;
264 AddToOsrBuffer(job);
265 // Add job to the front of the input queue.
266 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_);
267 DCHECK_LT(input_queue_length_, input_queue_capacity_);
268 // Move shift_ back by one.
269 input_queue_shift_ = InputQueueIndex(input_queue_capacity_ - 1);
270 input_queue_[InputQueueIndex(0)] = job;
271 input_queue_length_++;
272 } else {
273 // Add job to the back of the input queue. 213 // Add job to the back of the input queue.
274 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_); 214 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_);
275 DCHECK_LT(input_queue_length_, input_queue_capacity_); 215 DCHECK_LT(input_queue_length_, input_queue_capacity_);
276 input_queue_[InputQueueIndex(input_queue_length_)] = job; 216 input_queue_[InputQueueIndex(input_queue_length_)] = job;
277 input_queue_length_++; 217 input_queue_length_++;
278 } 218 }
279 if (FLAG_block_concurrent_recompilation) { 219 if (FLAG_block_concurrent_recompilation) {
280 blocked_jobs_++; 220 blocked_jobs_++;
281 } else { 221 } else {
282 V8::GetCurrentPlatform()->CallOnBackgroundThread( 222 V8::GetCurrentPlatform()->CallOnBackgroundThread(
283 new CompileTask(isolate_), v8::Platform::kShortRunningTask); 223 new CompileTask(isolate_), v8::Platform::kShortRunningTask);
284 } 224 }
285 } 225 }
286 226
287 227
288 void OptimizingCompileDispatcher::Unblock() { 228 void OptimizingCompileDispatcher::Unblock() {
289 while (blocked_jobs_ > 0) { 229 while (blocked_jobs_ > 0) {
290 V8::GetCurrentPlatform()->CallOnBackgroundThread( 230 V8::GetCurrentPlatform()->CallOnBackgroundThread(
291 new CompileTask(isolate_), v8::Platform::kShortRunningTask); 231 new CompileTask(isolate_), v8::Platform::kShortRunningTask);
292 blocked_jobs_--; 232 blocked_jobs_--;
293 } 233 }
294 } 234 }
295 235
296 236
297 OptimizedCompileJob* OptimizingCompileDispatcher::FindReadyOSRCandidate(
298 Handle<JSFunction> function, BailoutId osr_ast_id) {
299 for (int i = 0; i < osr_buffer_capacity_; i++) {
300 OptimizedCompileJob* current = osr_buffer_[i];
301 if (current != NULL && current->IsWaitingForInstall() &&
302 current->info()->HasSameOsrEntry(function, osr_ast_id)) {
303 osr_hits_++;
304 osr_buffer_[i] = NULL;
305 return current;
306 }
307 }
308 return NULL;
309 }
310
311
312 bool OptimizingCompileDispatcher::IsQueuedForOSR(Handle<JSFunction> function,
313 BailoutId osr_ast_id) {
314 for (int i = 0; i < osr_buffer_capacity_; i++) {
315 OptimizedCompileJob* current = osr_buffer_[i];
316 if (current != NULL &&
317 current->info()->HasSameOsrEntry(function, osr_ast_id)) {
318 return !current->IsWaitingForInstall();
319 }
320 }
321 return false;
322 }
323
324
325 bool OptimizingCompileDispatcher::IsQueuedForOSR(JSFunction* function) {
326 for (int i = 0; i < osr_buffer_capacity_; i++) {
327 OptimizedCompileJob* current = osr_buffer_[i];
328 if (current != NULL && *current->info()->closure() == function) {
329 return !current->IsWaitingForInstall();
330 }
331 }
332 return false;
333 }
334
335
336 void OptimizingCompileDispatcher::AddToOsrBuffer(OptimizedCompileJob* job) {
337 // Find the next slot that is empty or has a stale job.
338 OptimizedCompileJob* stale = NULL;
339 while (true) {
340 stale = osr_buffer_[osr_buffer_cursor_];
341 if (stale == NULL || stale->IsWaitingForInstall()) break;
342 osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
343 }
344
345 // Add to found slot and dispose the evicted job.
346 if (stale != NULL) {
347 DCHECK(stale->IsWaitingForInstall());
348 CompilationInfo* info = stale->info();
349 if (FLAG_trace_osr) {
350 PrintF("[COSR - Discarded ");
351 info->closure()->PrintName();
352 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
353 }
354 DisposeOptimizedCompileJob(stale, false);
355 }
356 osr_buffer_[osr_buffer_cursor_] = job;
357 osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
358 }
359 } // namespace internal 237 } // namespace internal
360 } // namespace v8 238 } // namespace v8
OLDNEW
« no previous file with comments | « src/optimizing-compile-dispatcher.h ('k') | src/ppc/builtins-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698