Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1137)

Side by Side Diff: src/optimizing-compiler-thread.cc

Issue 1063383004: Revert of Remove support for thread-based recompilation (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: manual revert Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/optimizing-compiler-thread.h ('k') | src/runtime/runtime-compiler.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/optimizing-compile-dispatcher.h" 5 #include "src/optimizing-compiler-thread.h"
6 6
7 #include "src/v8.h" 7 #include "src/v8.h"
8 8
9 #include "src/base/atomicops.h" 9 #include "src/base/atomicops.h"
10 #include "src/full-codegen.h" 10 #include "src/full-codegen.h"
11 #include "src/hydrogen.h" 11 #include "src/hydrogen.h"
12 #include "src/isolate.h" 12 #include "src/isolate.h"
13 #include "src/v8threads.h"
13 14
14 namespace v8 { 15 namespace v8 {
15 namespace internal { 16 namespace internal {
16 17
17 namespace { 18 namespace {
18 19
19 void DisposeOptimizedCompileJob(OptimizedCompileJob* job, 20 void DisposeOptimizedCompileJob(OptimizedCompileJob* job,
20 bool restore_function_code) { 21 bool restore_function_code) {
21 // The recompile job is allocated in the CompilationInfo's zone. 22 // The recompile job is allocated in the CompilationInfo's zone.
22 CompilationInfo* info = job->info(); 23 CompilationInfo* info = job->info();
23 if (restore_function_code) { 24 if (restore_function_code) {
24 if (info->is_osr()) { 25 if (info->is_osr()) {
25 if (!job->IsWaitingForInstall()) { 26 if (!job->IsWaitingForInstall()) {
26 // Remove stack check that guards OSR entry on original code. 27 // Remove stack check that guards OSR entry on original code.
27 Handle<Code> code = info->unoptimized_code(); 28 Handle<Code> code = info->unoptimized_code();
28 uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id()); 29 uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
29 BackEdgeTable::RemoveStackCheck(code, offset); 30 BackEdgeTable::RemoveStackCheck(code, offset);
30 } 31 }
31 } else { 32 } else {
32 Handle<JSFunction> function = info->closure(); 33 Handle<JSFunction> function = info->closure();
33 function->ReplaceCode(function->shared()->code()); 34 function->ReplaceCode(function->shared()->code());
34 } 35 }
35 } 36 }
36 delete info; 37 delete info;
37 } 38 }
38 39
39 } // namespace 40 } // namespace
40 41
41 42
42 class OptimizingCompileDispatcher::CompileTask : public v8::Task { 43 class OptimizingCompilerThread::CompileTask : public v8::Task {
43 public: 44 public:
44 explicit CompileTask(Isolate* isolate) : isolate_(isolate) { 45 explicit CompileTask(Isolate* isolate) : isolate_(isolate) {
45 OptimizingCompileDispatcher* dispatcher = 46 OptimizingCompilerThread* thread = isolate_->optimizing_compiler_thread();
46 isolate_->optimizing_compile_dispatcher(); 47 base::LockGuard<base::Mutex> lock_guard(&thread->ref_count_mutex_);
47 base::LockGuard<base::Mutex> lock_guard(&dispatcher->ref_count_mutex_); 48 ++thread->ref_count_;
48 ++dispatcher->ref_count_;
49 } 49 }
50 50
51 virtual ~CompileTask() {} 51 virtual ~CompileTask() {}
52 52
53 private: 53 private:
54 // v8::Task overrides. 54 // v8::Task overrides.
55 void Run() OVERRIDE { 55 void Run() OVERRIDE {
56 DisallowHeapAllocation no_allocation; 56 DisallowHeapAllocation no_allocation;
57 DisallowHandleAllocation no_handles; 57 DisallowHandleAllocation no_handles;
58 DisallowHandleDereference no_deref; 58 DisallowHandleDereference no_deref;
59 59
60 OptimizingCompileDispatcher* dispatcher = 60 OptimizingCompilerThread* thread = isolate_->optimizing_compiler_thread();
61 isolate_->optimizing_compile_dispatcher();
62 { 61 {
63 TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_); 62 TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
64 63
65 if (dispatcher->recompilation_delay_ != 0) { 64 if (thread->recompilation_delay_ != 0) {
66 base::OS::Sleep(dispatcher->recompilation_delay_); 65 base::OS::Sleep(thread->recompilation_delay_);
67 } 66 }
68 67
69 dispatcher->CompileNext(dispatcher->NextInput(true)); 68 thread->CompileNext(thread->NextInput(true));
70 } 69 }
71 { 70 {
72 base::LockGuard<base::Mutex> lock_guard(&dispatcher->ref_count_mutex_); 71 base::LockGuard<base::Mutex> lock_guard(&thread->ref_count_mutex_);
73 if (--dispatcher->ref_count_ == 0) { 72 if (--thread->ref_count_ == 0) {
74 dispatcher->ref_count_zero_.NotifyOne(); 73 thread->ref_count_zero_.NotifyOne();
75 } 74 }
76 } 75 }
77 } 76 }
78 77
79 Isolate* isolate_; 78 Isolate* isolate_;
80 79
81 DISALLOW_COPY_AND_ASSIGN(CompileTask); 80 DISALLOW_COPY_AND_ASSIGN(CompileTask);
82 }; 81 };
83 82
84 83
85 OptimizingCompileDispatcher::~OptimizingCompileDispatcher() { 84 OptimizingCompilerThread::~OptimizingCompilerThread() {
86 #ifdef DEBUG 85 #ifdef DEBUG
87 { 86 {
88 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_); 87 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
89 DCHECK_EQ(0, ref_count_); 88 DCHECK_EQ(0, ref_count_);
90 } 89 }
91 #endif 90 #endif
92 DCHECK_EQ(0, input_queue_length_); 91 DCHECK_EQ(0, input_queue_length_);
93 DeleteArray(input_queue_); 92 DeleteArray(input_queue_);
94 if (FLAG_concurrent_osr) { 93 if (FLAG_concurrent_osr) {
95 #ifdef DEBUG 94 #ifdef DEBUG
96 for (int i = 0; i < osr_buffer_capacity_; i++) { 95 for (int i = 0; i < osr_buffer_capacity_; i++) {
97 CHECK_NULL(osr_buffer_[i]); 96 CHECK_NULL(osr_buffer_[i]);
98 } 97 }
99 #endif 98 #endif
100 DeleteArray(osr_buffer_); 99 DeleteArray(osr_buffer_);
101 } 100 }
102 } 101 }
103 102
104 103
105 OptimizedCompileJob* OptimizingCompileDispatcher::NextInput( 104 void OptimizingCompilerThread::Run() {
105 #ifdef DEBUG
106 { base::LockGuard<base::Mutex> lock_guard(&thread_id_mutex_);
107 thread_id_ = ThreadId::Current().ToInteger();
108 }
109 #endif
110 DisallowHeapAllocation no_allocation;
111 DisallowHandleAllocation no_handles;
112 DisallowHandleDereference no_deref;
113
114 if (job_based_recompilation_) {
115 return;
116 }
117
118 base::ElapsedTimer total_timer;
119 if (tracing_enabled_) total_timer.Start();
120
121 while (true) {
122 input_queue_semaphore_.Wait();
123 TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
124
125 if (recompilation_delay_ != 0) {
126 base::OS::Sleep(recompilation_delay_);
127 }
128
129 switch (static_cast<StopFlag>(base::Acquire_Load(&stop_thread_))) {
130 case CONTINUE:
131 break;
132 case STOP:
133 if (tracing_enabled_) {
134 time_spent_total_ = total_timer.Elapsed();
135 }
136 stop_semaphore_.Signal();
137 return;
138 case FLUSH:
139 // The main thread is blocked, waiting for the stop semaphore.
140 { AllowHandleDereference allow_handle_dereference;
141 FlushInputQueue(true);
142 }
143 base::Release_Store(&stop_thread_,
144 static_cast<base::AtomicWord>(CONTINUE));
145 stop_semaphore_.Signal();
146 // Return to start of consumer loop.
147 continue;
148 }
149
150 base::ElapsedTimer compiling_timer;
151 if (tracing_enabled_) compiling_timer.Start();
152
153 CompileNext(NextInput());
154
155 if (tracing_enabled_) {
156 time_spent_compiling_ += compiling_timer.Elapsed();
157 }
158 }
159 }
160
161
162 OptimizedCompileJob* OptimizingCompilerThread::NextInput(
106 bool check_if_flushing) { 163 bool check_if_flushing) {
107 base::LockGuard<base::Mutex> access_input_queue_(&input_queue_mutex_); 164 base::LockGuard<base::Mutex> access_input_queue_(&input_queue_mutex_);
108 if (input_queue_length_ == 0) return NULL; 165 if (input_queue_length_ == 0) return NULL;
109 OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)]; 166 OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)];
110 DCHECK_NOT_NULL(job); 167 DCHECK_NOT_NULL(job);
111 input_queue_shift_ = InputQueueIndex(1); 168 input_queue_shift_ = InputQueueIndex(1);
112 input_queue_length_--; 169 input_queue_length_--;
113 if (check_if_flushing) { 170 if (check_if_flushing) {
114 if (static_cast<ModeFlag>(base::Acquire_Load(&mode_)) == FLUSH) { 171 if (static_cast<StopFlag>(base::Acquire_Load(&stop_thread_)) != CONTINUE) {
115 if (!job->info()->is_osr()) { 172 if (!job->info()->is_osr()) {
116 AllowHandleDereference allow_handle_dereference; 173 AllowHandleDereference allow_handle_dereference;
117 DisposeOptimizedCompileJob(job, true); 174 DisposeOptimizedCompileJob(job, true);
118 } 175 }
119 return NULL; 176 return NULL;
120 } 177 }
121 } 178 }
122 return job; 179 return job;
123 } 180 }
124 181
125 182
126 void OptimizingCompileDispatcher::CompileNext(OptimizedCompileJob* job) { 183 void OptimizingCompilerThread::CompileNext(OptimizedCompileJob* job) {
127 if (!job) return; 184 if (!job) return;
128 185
129 // The function may have already been optimized by OSR. Simply continue. 186 // The function may have already been optimized by OSR. Simply continue.
130 OptimizedCompileJob::Status status = job->OptimizeGraph(); 187 OptimizedCompileJob::Status status = job->OptimizeGraph();
131 USE(status); // Prevent an unused-variable error in release mode. 188 USE(status); // Prevent an unused-variable error in release mode.
132 DCHECK(status != OptimizedCompileJob::FAILED); 189 DCHECK(status != OptimizedCompileJob::FAILED);
133 190
134 // The function may have already been optimized by OSR. Simply continue. 191 // The function may have already been optimized by OSR. Simply continue.
135 // Use a mutex to make sure that functions marked for install 192 // Use a mutex to make sure that functions marked for install
136 // are always also queued. 193 // are always also queued.
137 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_); 194 if (job_based_recompilation_) output_queue_mutex_.Lock();
138 output_queue_.push(job); 195 output_queue_.Enqueue(job);
196 if (job_based_recompilation_) output_queue_mutex_.Unlock();
139 isolate_->stack_guard()->RequestInstallCode(); 197 isolate_->stack_guard()->RequestInstallCode();
140 } 198 }
141 199
142 200
143 void OptimizingCompileDispatcher::FlushOutputQueue(bool restore_function_code) { 201 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
144 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_); 202 OptimizedCompileJob* job;
145 while (!output_queue_.empty()) { 203 while ((job = NextInput())) {
146 OptimizedCompileJob* job = output_queue_.front(); 204 DCHECK(!job_based_recompilation_);
147 output_queue_.pop(); 205 // This should not block, since we have one signal on the input queue
148 206 // semaphore corresponding to each element in the input queue.
207 input_queue_semaphore_.Wait();
149 // OSR jobs are dealt with separately. 208 // OSR jobs are dealt with separately.
150 if (!job->info()->is_osr()) { 209 if (!job->info()->is_osr()) {
151 DisposeOptimizedCompileJob(job, restore_function_code); 210 DisposeOptimizedCompileJob(job, restore_function_code);
211 }
212 }
213 }
214
215
216 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
217 OptimizedCompileJob* job;
218 while (output_queue_.Dequeue(&job)) {
219 // OSR jobs are dealt with separately.
220 if (!job->info()->is_osr()) {
221 DisposeOptimizedCompileJob(job, restore_function_code);
152 } 222 }
153 } 223 }
154 } 224 }
155 225
156 226
157 void OptimizingCompileDispatcher::FlushOsrBuffer(bool restore_function_code) { 227 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
158 for (int i = 0; i < osr_buffer_capacity_; i++) { 228 for (int i = 0; i < osr_buffer_capacity_; i++) {
159 if (osr_buffer_[i] != NULL) { 229 if (osr_buffer_[i] != NULL) {
160 DisposeOptimizedCompileJob(osr_buffer_[i], restore_function_code); 230 DisposeOptimizedCompileJob(osr_buffer_[i], restore_function_code);
161 osr_buffer_[i] = NULL; 231 osr_buffer_[i] = NULL;
162 } 232 }
163 } 233 }
164 } 234 }
165 235
166 236
167 void OptimizingCompileDispatcher::Flush() { 237 void OptimizingCompilerThread::Flush() {
168 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH)); 238 DCHECK(!IsOptimizerThread());
239 base::Release_Store(&stop_thread_, static_cast<base::AtomicWord>(FLUSH));
169 if (FLAG_block_concurrent_recompilation) Unblock(); 240 if (FLAG_block_concurrent_recompilation) Unblock();
170 { 241 if (!job_based_recompilation_) {
242 input_queue_semaphore_.Signal();
243 stop_semaphore_.Wait();
244 } else {
171 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_); 245 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
172 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_); 246 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
173 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE)); 247 base::Release_Store(&stop_thread_, static_cast<base::AtomicWord>(CONTINUE));
174 } 248 }
175 FlushOutputQueue(true); 249 FlushOutputQueue(true);
176 if (FLAG_concurrent_osr) FlushOsrBuffer(true); 250 if (FLAG_concurrent_osr) FlushOsrBuffer(true);
177 if (FLAG_trace_concurrent_recompilation) { 251 if (tracing_enabled_) {
178 PrintF(" ** Flushed concurrent recompilation queues.\n"); 252 PrintF(" ** Flushed concurrent recompilation queues.\n");
179 } 253 }
180 } 254 }
181 255
182 256
183 void OptimizingCompileDispatcher::Stop() { 257 void OptimizingCompilerThread::Stop() {
184 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH)); 258 DCHECK(!IsOptimizerThread());
259 base::Release_Store(&stop_thread_, static_cast<base::AtomicWord>(STOP));
185 if (FLAG_block_concurrent_recompilation) Unblock(); 260 if (FLAG_block_concurrent_recompilation) Unblock();
186 { 261 if (!job_based_recompilation_) {
262 input_queue_semaphore_.Signal();
263 stop_semaphore_.Wait();
264 } else {
187 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_); 265 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
188 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_); 266 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
189 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE)); 267 base::Release_Store(&stop_thread_, static_cast<base::AtomicWord>(CONTINUE));
190 } 268 }
191 269
192 if (recompilation_delay_ != 0) { 270 if (recompilation_delay_ != 0) {
193 // At this point the optimizing compiler thread's event loop has stopped. 271 // At this point the optimizing compiler thread's event loop has stopped.
194 // There is no need for a mutex when reading input_queue_length_. 272 // There is no need for a mutex when reading input_queue_length_.
195 while (input_queue_length_ > 0) CompileNext(NextInput()); 273 while (input_queue_length_ > 0) CompileNext(NextInput());
196 InstallOptimizedFunctions(); 274 InstallOptimizedFunctions();
197 } else { 275 } else {
276 FlushInputQueue(false);
198 FlushOutputQueue(false); 277 FlushOutputQueue(false);
199 } 278 }
200 279
201 if (FLAG_concurrent_osr) FlushOsrBuffer(false); 280 if (FLAG_concurrent_osr) FlushOsrBuffer(false);
202 281
203 if ((FLAG_trace_osr || FLAG_trace_concurrent_recompilation) && 282 if (tracing_enabled_) {
204 FLAG_concurrent_osr) { 283 double percentage = time_spent_compiling_.PercentOf(time_spent_total_);
284 if (job_based_recompilation_) percentage = 100.0;
285 PrintF(" ** Compiler thread did %.2f%% useful work\n", percentage);
286 }
287
288 if ((FLAG_trace_osr || tracing_enabled_) && FLAG_concurrent_osr) {
205 PrintF("[COSR hit rate %d / %d]\n", osr_hits_, osr_attempts_); 289 PrintF("[COSR hit rate %d / %d]\n", osr_hits_, osr_attempts_);
206 } 290 }
291
292 Join();
207 } 293 }
208 294
209 295
210 void OptimizingCompileDispatcher::InstallOptimizedFunctions() { 296 void OptimizingCompilerThread::InstallOptimizedFunctions() {
297 DCHECK(!IsOptimizerThread());
211 HandleScope handle_scope(isolate_); 298 HandleScope handle_scope(isolate_);
212 299
213 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_); 300 OptimizedCompileJob* job;
214 while (!output_queue_.empty()) { 301 while (output_queue_.Dequeue(&job)) {
215 OptimizedCompileJob* job = output_queue_.front();
216 output_queue_.pop();
217 CompilationInfo* info = job->info(); 302 CompilationInfo* info = job->info();
218 Handle<JSFunction> function(*info->closure()); 303 Handle<JSFunction> function(*info->closure());
219 if (info->is_osr()) { 304 if (info->is_osr()) {
220 if (FLAG_trace_osr) { 305 if (FLAG_trace_osr) {
221 PrintF("[COSR - "); 306 PrintF("[COSR - ");
222 function->ShortPrint(); 307 function->ShortPrint();
223 PrintF(" is ready for install and entry at AST id %d]\n", 308 PrintF(" is ready for install and entry at AST id %d]\n",
224 info->osr_ast_id().ToInt()); 309 info->osr_ast_id().ToInt());
225 } 310 }
226 job->WaitForInstall(); 311 job->WaitForInstall();
227 // Remove stack check that guards OSR entry on original code. 312 // Remove stack check that guards OSR entry on original code.
228 Handle<Code> code = info->unoptimized_code(); 313 Handle<Code> code = info->unoptimized_code();
229 uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id()); 314 uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
230 BackEdgeTable::RemoveStackCheck(code, offset); 315 BackEdgeTable::RemoveStackCheck(code, offset);
231 } else { 316 } else {
232 if (function->IsOptimized()) { 317 if (function->IsOptimized()) {
233 if (FLAG_trace_concurrent_recompilation) { 318 if (tracing_enabled_) {
234 PrintF(" ** Aborting compilation for "); 319 PrintF(" ** Aborting compilation for ");
235 function->ShortPrint(); 320 function->ShortPrint();
236 PrintF(" as it has already been optimized.\n"); 321 PrintF(" as it has already been optimized.\n");
237 } 322 }
238 DisposeOptimizedCompileJob(job, false); 323 DisposeOptimizedCompileJob(job, false);
239 } else { 324 } else {
240 Handle<Code> code = Compiler::GetConcurrentlyOptimizedCode(job); 325 Handle<Code> code = Compiler::GetConcurrentlyOptimizedCode(job);
241 function->ReplaceCode(code.is_null() ? function->shared()->code() 326 function->ReplaceCode(
242 : *code); 327 code.is_null() ? function->shared()->code() : *code);
243 } 328 }
244 } 329 }
245 } 330 }
246 } 331 }
247 332
248 333
249 void OptimizingCompileDispatcher::QueueForOptimization( 334 void OptimizingCompilerThread::QueueForOptimization(OptimizedCompileJob* job) {
250 OptimizedCompileJob* job) {
251 DCHECK(IsQueueAvailable()); 335 DCHECK(IsQueueAvailable());
336 DCHECK(!IsOptimizerThread());
252 CompilationInfo* info = job->info(); 337 CompilationInfo* info = job->info();
253 if (info->is_osr()) { 338 if (info->is_osr()) {
254 osr_attempts_++; 339 osr_attempts_++;
255 AddToOsrBuffer(job); 340 AddToOsrBuffer(job);
256 // Add job to the front of the input queue. 341 // Add job to the front of the input queue.
257 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_); 342 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_);
258 DCHECK_LT(input_queue_length_, input_queue_capacity_); 343 DCHECK_LT(input_queue_length_, input_queue_capacity_);
259 // Move shift_ back by one. 344 // Move shift_ back by one.
260 input_queue_shift_ = InputQueueIndex(input_queue_capacity_ - 1); 345 input_queue_shift_ = InputQueueIndex(input_queue_capacity_ - 1);
261 input_queue_[InputQueueIndex(0)] = job; 346 input_queue_[InputQueueIndex(0)] = job;
262 input_queue_length_++; 347 input_queue_length_++;
263 } else { 348 } else {
264 // Add job to the back of the input queue. 349 // Add job to the back of the input queue.
265 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_); 350 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_);
266 DCHECK_LT(input_queue_length_, input_queue_capacity_); 351 DCHECK_LT(input_queue_length_, input_queue_capacity_);
267 input_queue_[InputQueueIndex(input_queue_length_)] = job; 352 input_queue_[InputQueueIndex(input_queue_length_)] = job;
268 input_queue_length_++; 353 input_queue_length_++;
269 } 354 }
270 if (FLAG_block_concurrent_recompilation) { 355 if (FLAG_block_concurrent_recompilation) {
271 blocked_jobs_++; 356 blocked_jobs_++;
272 } else { 357 } else if (job_based_recompilation_) {
273 V8::GetCurrentPlatform()->CallOnBackgroundThread( 358 V8::GetCurrentPlatform()->CallOnBackgroundThread(
274 new CompileTask(isolate_), v8::Platform::kShortRunningTask); 359 new CompileTask(isolate_), v8::Platform::kShortRunningTask);
360 } else {
361 input_queue_semaphore_.Signal();
275 } 362 }
276 } 363 }
277 364
278 365
279 void OptimizingCompileDispatcher::Unblock() { 366 void OptimizingCompilerThread::Unblock() {
367 DCHECK(!IsOptimizerThread());
280 while (blocked_jobs_ > 0) { 368 while (blocked_jobs_ > 0) {
281 V8::GetCurrentPlatform()->CallOnBackgroundThread( 369 if (job_based_recompilation_) {
282 new CompileTask(isolate_), v8::Platform::kShortRunningTask); 370 V8::GetCurrentPlatform()->CallOnBackgroundThread(
371 new CompileTask(isolate_), v8::Platform::kShortRunningTask);
372 } else {
373 input_queue_semaphore_.Signal();
374 }
283 blocked_jobs_--; 375 blocked_jobs_--;
284 } 376 }
285 } 377 }
286 378
287 379
288 OptimizedCompileJob* OptimizingCompileDispatcher::FindReadyOSRCandidate( 380 OptimizedCompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
289 Handle<JSFunction> function, BailoutId osr_ast_id) { 381 Handle<JSFunction> function, BailoutId osr_ast_id) {
382 DCHECK(!IsOptimizerThread());
290 for (int i = 0; i < osr_buffer_capacity_; i++) { 383 for (int i = 0; i < osr_buffer_capacity_; i++) {
291 OptimizedCompileJob* current = osr_buffer_[i]; 384 OptimizedCompileJob* current = osr_buffer_[i];
292 if (current != NULL && current->IsWaitingForInstall() && 385 if (current != NULL &&
386 current->IsWaitingForInstall() &&
293 current->info()->HasSameOsrEntry(function, osr_ast_id)) { 387 current->info()->HasSameOsrEntry(function, osr_ast_id)) {
294 osr_hits_++; 388 osr_hits_++;
295 osr_buffer_[i] = NULL; 389 osr_buffer_[i] = NULL;
296 return current; 390 return current;
297 } 391 }
298 } 392 }
299 return NULL; 393 return NULL;
300 } 394 }
301 395
302 396
303 bool OptimizingCompileDispatcher::IsQueuedForOSR(Handle<JSFunction> function, 397 bool OptimizingCompilerThread::IsQueuedForOSR(Handle<JSFunction> function,
304 BailoutId osr_ast_id) { 398 BailoutId osr_ast_id) {
399 DCHECK(!IsOptimizerThread());
305 for (int i = 0; i < osr_buffer_capacity_; i++) { 400 for (int i = 0; i < osr_buffer_capacity_; i++) {
306 OptimizedCompileJob* current = osr_buffer_[i]; 401 OptimizedCompileJob* current = osr_buffer_[i];
307 if (current != NULL && 402 if (current != NULL &&
308 current->info()->HasSameOsrEntry(function, osr_ast_id)) { 403 current->info()->HasSameOsrEntry(function, osr_ast_id)) {
309 return !current->IsWaitingForInstall(); 404 return !current->IsWaitingForInstall();
310 } 405 }
311 } 406 }
312 return false; 407 return false;
313 } 408 }
314 409
315 410
316 bool OptimizingCompileDispatcher::IsQueuedForOSR(JSFunction* function) { 411 bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
412 DCHECK(!IsOptimizerThread());
317 for (int i = 0; i < osr_buffer_capacity_; i++) { 413 for (int i = 0; i < osr_buffer_capacity_; i++) {
318 OptimizedCompileJob* current = osr_buffer_[i]; 414 OptimizedCompileJob* current = osr_buffer_[i];
319 if (current != NULL && *current->info()->closure() == function) { 415 if (current != NULL && *current->info()->closure() == function) {
320 return !current->IsWaitingForInstall(); 416 return !current->IsWaitingForInstall();
321 } 417 }
322 } 418 }
323 return false; 419 return false;
324 } 420 }
325 421
326 422
327 void OptimizingCompileDispatcher::AddToOsrBuffer(OptimizedCompileJob* job) { 423 void OptimizingCompilerThread::AddToOsrBuffer(OptimizedCompileJob* job) {
424 DCHECK(!IsOptimizerThread());
328 // Find the next slot that is empty or has a stale job. 425 // Find the next slot that is empty or has a stale job.
329 OptimizedCompileJob* stale = NULL; 426 OptimizedCompileJob* stale = NULL;
330 while (true) { 427 while (true) {
331 stale = osr_buffer_[osr_buffer_cursor_]; 428 stale = osr_buffer_[osr_buffer_cursor_];
332 if (stale == NULL || stale->IsWaitingForInstall()) break; 429 if (stale == NULL || stale->IsWaitingForInstall()) break;
333 osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_; 430 osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
334 } 431 }
335 432
336 // Add to found slot and dispose the evicted job. 433 // Add to found slot and dispose the evicted job.
337 if (stale != NULL) { 434 if (stale != NULL) {
338 DCHECK(stale->IsWaitingForInstall()); 435 DCHECK(stale->IsWaitingForInstall());
339 CompilationInfo* info = stale->info(); 436 CompilationInfo* info = stale->info();
340 if (FLAG_trace_osr) { 437 if (FLAG_trace_osr) {
341 PrintF("[COSR - Discarded "); 438 PrintF("[COSR - Discarded ");
342 info->closure()->PrintName(); 439 info->closure()->PrintName();
343 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt()); 440 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
344 } 441 }
345 DisposeOptimizedCompileJob(stale, false); 442 DisposeOptimizedCompileJob(stale, false);
346 } 443 }
347 osr_buffer_[osr_buffer_cursor_] = job; 444 osr_buffer_[osr_buffer_cursor_] = job;
348 osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_; 445 osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
349 } 446 }
447
448
449 #ifdef DEBUG
450 bool OptimizingCompilerThread::IsOptimizerThread(Isolate* isolate) {
451 return isolate->concurrent_recompilation_enabled() &&
452 isolate->optimizing_compiler_thread()->IsOptimizerThread();
350 } 453 }
351 } // namespace v8::internal 454
455
456 bool OptimizingCompilerThread::IsOptimizerThread() {
457 base::LockGuard<base::Mutex> lock_guard(&thread_id_mutex_);
458 return ThreadId::Current().ToInteger() == thread_id_;
459 }
460 #endif
461
462
463 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/optimizing-compiler-thread.h ('k') | src/runtime/runtime-compiler.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698