Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(195)

Side by Side Diff: src/optimizing-compiler-thread.cc

Issue 24543002: Rename "OptimizingCompiler" to the more suitable "RecompileJob". (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/optimizing-compiler-thread.h ('k') | src/runtime.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
87 CompileNext(); 87 CompileNext();
88 88
89 if (FLAG_trace_concurrent_recompilation) { 89 if (FLAG_trace_concurrent_recompilation) {
90 time_spent_compiling_ += compiling_timer.Elapsed(); 90 time_spent_compiling_ += compiling_timer.Elapsed();
91 } 91 }
92 } 92 }
93 } 93 }
94 94
95 95
96 void OptimizingCompilerThread::CompileNext() { 96 void OptimizingCompilerThread::CompileNext() {
97 OptimizingCompiler* optimizing_compiler = NULL; 97 RecompileJob* job = NULL;
98 bool result = input_queue_.Dequeue(&optimizing_compiler); 98 bool result = input_queue_.Dequeue(&job);
99 USE(result); 99 USE(result);
100 ASSERT(result); 100 ASSERT(result);
101 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1)); 101 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1));
102 102
103 // The function may have already been optimized by OSR. Simply continue. 103 // The function may have already been optimized by OSR. Simply continue.
104 OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph(); 104 RecompileJob::Status status = job->OptimizeGraph();
105 USE(status); // Prevent an unused-variable error in release mode. 105 USE(status); // Prevent an unused-variable error in release mode.
106 ASSERT(status != OptimizingCompiler::FAILED); 106 ASSERT(status != RecompileJob::FAILED);
107 107
108 // The function may have already been optimized by OSR. Simply continue. 108 // The function may have already been optimized by OSR. Simply continue.
109 // Use a mutex to make sure that functions marked for install 109 // Use a mutex to make sure that functions marked for install
110 // are always also queued. 110 // are always also queued.
111 LockGuard<Mutex> access_queue(&queue_mutex_); 111 LockGuard<Mutex> access_queue(&queue_mutex_);
112 output_queue_.Enqueue(optimizing_compiler); 112 output_queue_.Enqueue(job);
113 isolate_->stack_guard()->RequestInstallCode(); 113 isolate_->stack_guard()->RequestInstallCode();
114 } 114 }
115 115
116 116
117 static void DisposeOptimizingCompiler(OptimizingCompiler* compiler, 117 static void DisposeRecompileJob(RecompileJob* compiler,
118 bool restore_function_code) { 118 bool restore_function_code) {
119 // The recompile job is allocated in the CompilationInfo's zone.
119 CompilationInfo* info = compiler->info(); 120 CompilationInfo* info = compiler->info();
120 if (restore_function_code) { 121 if (restore_function_code) {
121 Handle<JSFunction> function = info->closure(); 122 Handle<JSFunction> function = info->closure();
122 function->ReplaceCode(function->shared()->code()); 123 function->ReplaceCode(function->shared()->code());
123 } 124 }
124 delete info; 125 delete info;
125 } 126 }
126 127
127 128
128 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) { 129 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
129 OptimizingCompiler* optimizing_compiler; 130 RecompileJob* job;
130 // The optimizing compiler is allocated in the CompilationInfo's zone. 131 while (input_queue_.Dequeue(&job)) {
131 while (input_queue_.Dequeue(&optimizing_compiler)) {
132 // This should not block, since we have one signal on the input queue 132 // This should not block, since we have one signal on the input queue
133 // semaphore corresponding to each element in the input queue. 133 // semaphore corresponding to each element in the input queue.
134 input_queue_semaphore_.Wait(); 134 input_queue_semaphore_.Wait();
135 if (optimizing_compiler->info()->osr_ast_id().IsNone()) { 135 if (job->info()->osr_ast_id().IsNone()) {
136 // OSR jobs are dealt with separately. 136 // OSR jobs are dealt with separately.
137 DisposeOptimizingCompiler(optimizing_compiler, restore_function_code); 137 DisposeRecompileJob(job, restore_function_code);
138 } 138 }
139 } 139 }
140 Release_Store(&queue_length_, static_cast<AtomicWord>(0)); 140 Release_Store(&queue_length_, static_cast<AtomicWord>(0));
141 } 141 }
142 142
143 143
144 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) { 144 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
145 OptimizingCompiler* optimizing_compiler; 145 RecompileJob* job;
146 // The optimizing compiler is allocated in the CompilationInfo's zone.
147 while (true) { 146 while (true) {
148 { LockGuard<Mutex> access_queue(&queue_mutex_); 147 { LockGuard<Mutex> access_queue(&queue_mutex_);
149 if (!output_queue_.Dequeue(&optimizing_compiler)) break; 148 if (!output_queue_.Dequeue(&job)) break;
150 } 149 }
151 if (optimizing_compiler->info()->osr_ast_id().IsNone()) { 150 if (job->info()->osr_ast_id().IsNone()) {
152 // OSR jobs are dealt with separately. 151 // OSR jobs are dealt with separately.
153 DisposeOptimizingCompiler(optimizing_compiler, restore_function_code); 152 DisposeRecompileJob(job, restore_function_code);
154 } 153 }
155 } 154 }
156 } 155 }
157 156
158 157
159 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) { 158 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
160 OptimizingCompiler* optimizing_compiler; 159 RecompileJob* job;
161 for (int i = 0; i < osr_buffer_size_; i++) { 160 for (int i = 0; i < osr_buffer_size_; i++) {
162 optimizing_compiler = osr_buffer_[i]; 161 job = osr_buffer_[i];
163 if (optimizing_compiler != NULL) { 162 if (job != NULL) DisposeRecompileJob(job, restore_function_code);
164 DisposeOptimizingCompiler(optimizing_compiler, restore_function_code);
165 }
166 } 163 }
167 osr_cursor_ = 0; 164 osr_cursor_ = 0;
168 } 165 }
169 166
170 167
171 void OptimizingCompilerThread::Flush() { 168 void OptimizingCompilerThread::Flush() {
172 ASSERT(!IsOptimizerThread()); 169 ASSERT(!IsOptimizerThread());
173 Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH)); 170 Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH));
174 input_queue_semaphore_.Signal(); 171 input_queue_semaphore_.Signal();
175 stop_semaphore_.Wait(); 172 stop_semaphore_.Wait();
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
208 } 205 }
209 206
210 Join(); 207 Join();
211 } 208 }
212 209
213 210
214 void OptimizingCompilerThread::InstallOptimizedFunctions() { 211 void OptimizingCompilerThread::InstallOptimizedFunctions() {
215 ASSERT(!IsOptimizerThread()); 212 ASSERT(!IsOptimizerThread());
216 HandleScope handle_scope(isolate_); 213 HandleScope handle_scope(isolate_);
217 214
218 OptimizingCompiler* compiler; 215 RecompileJob* job;
219 while (true) { 216 while (true) {
220 { LockGuard<Mutex> access_queue(&queue_mutex_); 217 { LockGuard<Mutex> access_queue(&queue_mutex_);
221 if (!output_queue_.Dequeue(&compiler)) break; 218 if (!output_queue_.Dequeue(&job)) break;
222 } 219 }
223 CompilationInfo* info = compiler->info(); 220 CompilationInfo* info = job->info();
224 if (info->osr_ast_id().IsNone()) { 221 if (info->osr_ast_id().IsNone()) {
225 Compiler::InstallOptimizedCode(compiler); 222 Compiler::InstallOptimizedCode(job);
226 } else { 223 } else {
227 if (FLAG_trace_osr) { 224 if (FLAG_trace_osr) {
228 PrintF("[COSR - "); 225 PrintF("[COSR - ");
229 info->closure()->PrintName(); 226 info->closure()->PrintName();
230 PrintF(" is ready for install and entry at AST id %d]\n", 227 PrintF(" is ready for install and entry at AST id %d]\n",
231 info->osr_ast_id().ToInt()); 228 info->osr_ast_id().ToInt());
232 } 229 }
233 compiler->WaitForInstall(); 230 job->WaitForInstall();
234 BackEdgeTable::RemoveStackCheck(info); 231 BackEdgeTable::RemoveStackCheck(info);
235 } 232 }
236 } 233 }
237 } 234 }
238 235
239 236
240 void OptimizingCompilerThread::QueueForOptimization( 237 void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
241 OptimizingCompiler* optimizing_compiler) {
242 ASSERT(IsQueueAvailable()); 238 ASSERT(IsQueueAvailable());
243 ASSERT(!IsOptimizerThread()); 239 ASSERT(!IsOptimizerThread());
244 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(1)); 240 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(1));
245 CompilationInfo* info = optimizing_compiler->info(); 241 CompilationInfo* info = job->info();
246 if (info->osr_ast_id().IsNone()) { 242 if (info->osr_ast_id().IsNone()) {
247 info->closure()->MarkInRecompileQueue(); 243 info->closure()->MarkInRecompileQueue();
248 } else { 244 } else {
249 if (FLAG_trace_concurrent_recompilation) { 245 if (FLAG_trace_concurrent_recompilation) {
250 PrintF(" ** Queueing "); 246 PrintF(" ** Queueing ");
251 info->closure()->PrintName(); 247 info->closure()->PrintName();
252 PrintF(" for concurrent on-stack replacement.\n"); 248 PrintF(" for concurrent on-stack replacement.\n");
253 } 249 }
254 AddToOsrBuffer(optimizing_compiler); 250 AddToOsrBuffer(job);
255 osr_attempts_++; 251 osr_attempts_++;
256 BackEdgeTable::AddStackCheck(info); 252 BackEdgeTable::AddStackCheck(info);
257 } 253 }
258 input_queue_.Enqueue(optimizing_compiler); 254 input_queue_.Enqueue(job);
259 input_queue_semaphore_.Signal(); 255 input_queue_semaphore_.Signal();
260 } 256 }
261 257
262 258
263 OptimizingCompiler* OptimizingCompilerThread::FindReadyOSRCandidate( 259 RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
264 Handle<JSFunction> function, uint32_t osr_pc_offset) { 260 Handle<JSFunction> function, uint32_t osr_pc_offset) {
265 ASSERT(!IsOptimizerThread()); 261 ASSERT(!IsOptimizerThread());
266 OptimizingCompiler* result = NULL; 262 RecompileJob* result = NULL;
267 for (int i = 0; i < osr_buffer_size_; i++) { 263 for (int i = 0; i < osr_buffer_size_; i++) {
268 result = osr_buffer_[i]; 264 result = osr_buffer_[i];
269 if (result == NULL) continue; 265 if (result == NULL) continue;
270 if (result->IsWaitingForInstall() && 266 if (result->IsWaitingForInstall() &&
271 result->info()->HasSameOsrEntry(function, osr_pc_offset)) { 267 result->info()->HasSameOsrEntry(function, osr_pc_offset)) {
272 osr_hits_++; 268 osr_hits_++;
273 osr_buffer_[i] = NULL; 269 osr_buffer_[i] = NULL;
274 return result; 270 return result;
275 } 271 }
276 } 272 }
(...skipping 19 matching lines...) Expand all
296 for (int i = 0; i < osr_buffer_size_; i++) { 292 for (int i = 0; i < osr_buffer_size_; i++) {
297 if (osr_buffer_[i] != NULL && 293 if (osr_buffer_[i] != NULL &&
298 *osr_buffer_[i]->info()->closure() == function) { 294 *osr_buffer_[i]->info()->closure() == function) {
299 return !osr_buffer_[i]->IsWaitingForInstall(); 295 return !osr_buffer_[i]->IsWaitingForInstall();
300 } 296 }
301 } 297 }
302 return false; 298 return false;
303 } 299 }
304 300
305 301
306 void OptimizingCompilerThread::AddToOsrBuffer(OptimizingCompiler* compiler) { 302 void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
307 ASSERT(!IsOptimizerThread()); 303 ASSERT(!IsOptimizerThread());
308 // Store into next empty slot or replace next stale OSR job that's waiting 304 // Store into next empty slot or replace next stale OSR job that's waiting
309 // in vain. Dispose in the latter case. 305 // in vain. Dispose in the latter case.
310 OptimizingCompiler* stale; 306 RecompileJob* stale;
311 while (true) { 307 while (true) {
312 stale = osr_buffer_[osr_cursor_]; 308 stale = osr_buffer_[osr_cursor_];
313 if (stale == NULL) break; 309 if (stale == NULL) break;
314 if (stale->IsWaitingForInstall()) { 310 if (stale->IsWaitingForInstall()) {
315 CompilationInfo* info = stale->info(); 311 CompilationInfo* info = stale->info();
316 if (FLAG_trace_osr) { 312 if (FLAG_trace_osr) {
317 PrintF("[COSR - Discarded "); 313 PrintF("[COSR - Discarded ");
318 info->closure()->PrintName(); 314 info->closure()->PrintName();
319 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt()); 315 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
320 } 316 }
321 BackEdgeTable::RemoveStackCheck(info); 317 BackEdgeTable::RemoveStackCheck(info);
322 DisposeOptimizingCompiler(stale, false); 318 DisposeRecompileJob(stale, false);
323 break; 319 break;
324 } 320 }
325 AdvanceOsrCursor(); 321 AdvanceOsrCursor();
326 } 322 }
327 323
328 osr_buffer_[osr_cursor_] = compiler; 324 osr_buffer_[osr_cursor_] = job;
329 AdvanceOsrCursor(); 325 AdvanceOsrCursor();
330 } 326 }
331 327
332 328
333 #ifdef DEBUG 329 #ifdef DEBUG
334 bool OptimizingCompilerThread::IsOptimizerThread() { 330 bool OptimizingCompilerThread::IsOptimizerThread() {
335 if (!FLAG_concurrent_recompilation) return false; 331 if (!FLAG_concurrent_recompilation) return false;
336 LockGuard<Mutex> lock_guard(&thread_id_mutex_); 332 LockGuard<Mutex> lock_guard(&thread_id_mutex_);
337 return ThreadId::Current().ToInteger() == thread_id_; 333 return ThreadId::Current().ToInteger() == thread_id_;
338 } 334 }
339 #endif 335 #endif
340 336
341 337
342 } } // namespace v8::internal 338 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/optimizing-compiler-thread.h ('k') | src/runtime.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698