Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(229)

Side by Side Diff: src/optimizing-compiler-thread.cc

Issue 24543002: Rename "OptimizingCompiler" to the more suitable "RecompileJob". (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/optimizing-compiler-thread.h ('k') | src/runtime.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
87 CompileNext(); 87 CompileNext();
88 88
89 if (FLAG_trace_concurrent_recompilation) { 89 if (FLAG_trace_concurrent_recompilation) {
90 time_spent_compiling_ += compiling_timer.Elapsed(); 90 time_spent_compiling_ += compiling_timer.Elapsed();
91 } 91 }
92 } 92 }
93 } 93 }
94 94
95 95
96 void OptimizingCompilerThread::CompileNext() { 96 void OptimizingCompilerThread::CompileNext() {
97 OptimizingCompiler* optimizing_compiler = NULL; 97 RecompileJob* job = NULL;
98 bool result = input_queue_.Dequeue(&optimizing_compiler); 98 bool result = input_queue_.Dequeue(&job);
99 USE(result); 99 USE(result);
100 ASSERT(result); 100 ASSERT(result);
101 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1)); 101 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1));
102 102
103 // The function may have already been optimized by OSR. Simply continue. 103 // The function may have already been optimized by OSR. Simply continue.
104 OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph(); 104 RecompileJob::Status status = job->OptimizeGraph();
105 USE(status); // Prevent an unused-variable error in release mode. 105 USE(status); // Prevent an unused-variable error in release mode.
106 ASSERT(status != OptimizingCompiler::FAILED); 106 ASSERT(status != RecompileJob::FAILED);
107 107
108 // The function may have already been optimized by OSR. Simply continue. 108 // The function may have already been optimized by OSR. Simply continue.
109 // Use a mutex to make sure that functions marked for install 109 // Use a mutex to make sure that functions marked for install
110 // are always also queued. 110 // are always also queued.
111 LockGuard<Mutex> access_queue(&queue_mutex_); 111 LockGuard<Mutex> access_queue(&queue_mutex_);
112 output_queue_.Enqueue(optimizing_compiler); 112 output_queue_.Enqueue(job);
113 isolate_->stack_guard()->RequestInstallCode(); 113 isolate_->stack_guard()->RequestInstallCode();
114 } 114 }
115 115
116 116
117 static void DisposeOptimizingCompiler(OptimizingCompiler* compiler, 117 static void DisposeRecompileJob(RecompileJob* compiler,
118 bool restore_function_code) { 118 bool restore_function_code) {
119 CompilationInfo* info = compiler->info(); 119 CompilationInfo* info = compiler->info();
120 if (restore_function_code) { 120 if (restore_function_code) {
121 Handle<JSFunction> function = info->closure(); 121 Handle<JSFunction> function = info->closure();
122 function->ReplaceCode(function->shared()->code()); 122 function->ReplaceCode(function->shared()->code());
123 } 123 }
124 delete info; 124 delete info;
125 } 125 }
126 126
127 127
128 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) { 128 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
129 OptimizingCompiler* optimizing_compiler; 129 RecompileJob* job;
130 // The optimizing compiler is allocated in the CompilationInfo's zone. 130 // The optimizing compiler is allocated in the CompilationInfo's zone.
mvstanton 2013/09/25 09:22:25 To // "The recompile job"
131 while (input_queue_.Dequeue(&optimizing_compiler)) { 131 while (input_queue_.Dequeue(&job)) {
132 // This should not block, since we have one signal on the input queue 132 // This should not block, since we have one signal on the input queue
133 // semaphore corresponding to each element in the input queue. 133 // semaphore corresponding to each element in the input queue.
134 input_queue_semaphore_.Wait(); 134 input_queue_semaphore_.Wait();
135 if (optimizing_compiler->info()->osr_ast_id().IsNone()) { 135 if (job->info()->osr_ast_id().IsNone()) {
136 // OSR jobs are dealt with separately. 136 // OSR jobs are dealt with separately.
137 DisposeOptimizingCompiler(optimizing_compiler, restore_function_code); 137 DisposeRecompileJob(job, restore_function_code);
138 } 138 }
139 } 139 }
140 Release_Store(&queue_length_, static_cast<AtomicWord>(0)); 140 Release_Store(&queue_length_, static_cast<AtomicWord>(0));
141 } 141 }
142 142
143 143
144 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) { 144 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
145 OptimizingCompiler* optimizing_compiler; 145 RecompileJob* job;
146 // The optimizing compiler is allocated in the CompilationInfo's zone. 146 // The optimizing compiler is allocated in the CompilationInfo's zone.
mvstanton 2013/09/25 09:22:25 Address this comment too.
147 while (true) { 147 while (true) {
148 { LockGuard<Mutex> access_queue(&queue_mutex_); 148 { LockGuard<Mutex> access_queue(&queue_mutex_);
149 if (!output_queue_.Dequeue(&optimizing_compiler)) break; 149 if (!output_queue_.Dequeue(&job)) break;
150 } 150 }
151 if (optimizing_compiler->info()->osr_ast_id().IsNone()) { 151 if (job->info()->osr_ast_id().IsNone()) {
152 // OSR jobs are dealt with separately. 152 // OSR jobs are dealt with separately.
153 DisposeOptimizingCompiler(optimizing_compiler, restore_function_code); 153 DisposeRecompileJob(job, restore_function_code);
154 } 154 }
155 } 155 }
156 } 156 }
157 157
158 158
159 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) { 159 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
160 OptimizingCompiler* optimizing_compiler; 160 RecompileJob* job;
161 for (int i = 0; i < osr_buffer_size_; i++) { 161 for (int i = 0; i < osr_buffer_size_; i++) {
162 optimizing_compiler = osr_buffer_[i]; 162 job = osr_buffer_[i];
163 if (optimizing_compiler != NULL) { 163 if (job != NULL) DisposeRecompileJob(job, restore_function_code);
164 DisposeOptimizingCompiler(optimizing_compiler, restore_function_code);
165 }
166 } 164 }
167 osr_cursor_ = 0; 165 osr_cursor_ = 0;
168 } 166 }
169 167
170 168
171 void OptimizingCompilerThread::Flush() { 169 void OptimizingCompilerThread::Flush() {
172 ASSERT(!IsOptimizerThread()); 170 ASSERT(!IsOptimizerThread());
173 Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH)); 171 Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH));
174 input_queue_semaphore_.Signal(); 172 input_queue_semaphore_.Signal();
175 stop_semaphore_.Wait(); 173 stop_semaphore_.Wait();
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
208 } 206 }
209 207
210 Join(); 208 Join();
211 } 209 }
212 210
213 211
214 void OptimizingCompilerThread::InstallOptimizedFunctions() { 212 void OptimizingCompilerThread::InstallOptimizedFunctions() {
215 ASSERT(!IsOptimizerThread()); 213 ASSERT(!IsOptimizerThread());
216 HandleScope handle_scope(isolate_); 214 HandleScope handle_scope(isolate_);
217 215
218 OptimizingCompiler* compiler; 216 RecompileJob* job;
219 while (true) { 217 while (true) {
220 { LockGuard<Mutex> access_queue(&queue_mutex_); 218 { LockGuard<Mutex> access_queue(&queue_mutex_);
221 if (!output_queue_.Dequeue(&compiler)) break; 219 if (!output_queue_.Dequeue(&job)) break;
222 } 220 }
223 CompilationInfo* info = compiler->info(); 221 CompilationInfo* info = job->info();
224 if (info->osr_ast_id().IsNone()) { 222 if (info->osr_ast_id().IsNone()) {
225 Compiler::InstallOptimizedCode(compiler); 223 Compiler::InstallOptimizedCode(job);
226 } else { 224 } else {
227 if (FLAG_trace_osr) { 225 if (FLAG_trace_osr) {
228 PrintF("[COSR - "); 226 PrintF("[COSR - ");
229 info->closure()->PrintName(); 227 info->closure()->PrintName();
230 PrintF(" is ready for install and entry at AST id %d]\n", 228 PrintF(" is ready for install and entry at AST id %d]\n",
231 info->osr_ast_id().ToInt()); 229 info->osr_ast_id().ToInt());
232 } 230 }
233 compiler->WaitForInstall(); 231 job->WaitForInstall();
234 BackEdgeTable::RemoveStackCheck(info); 232 BackEdgeTable::RemoveStackCheck(info);
235 } 233 }
236 } 234 }
237 } 235 }
238 236
239 237
240 void OptimizingCompilerThread::QueueForOptimization( 238 void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
241 OptimizingCompiler* optimizing_compiler) {
242 ASSERT(IsQueueAvailable()); 239 ASSERT(IsQueueAvailable());
243 ASSERT(!IsOptimizerThread()); 240 ASSERT(!IsOptimizerThread());
244 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(1)); 241 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(1));
245 CompilationInfo* info = optimizing_compiler->info(); 242 CompilationInfo* info = job->info();
246 if (info->osr_ast_id().IsNone()) { 243 if (info->osr_ast_id().IsNone()) {
247 info->closure()->MarkInRecompileQueue(); 244 info->closure()->MarkInRecompileQueue();
248 } else { 245 } else {
249 if (FLAG_trace_concurrent_recompilation) { 246 if (FLAG_trace_concurrent_recompilation) {
250 PrintF(" ** Queueing "); 247 PrintF(" ** Queueing ");
251 info->closure()->PrintName(); 248 info->closure()->PrintName();
252 PrintF(" for concurrent on-stack replacement.\n"); 249 PrintF(" for concurrent on-stack replacement.\n");
253 } 250 }
254 AddToOsrBuffer(optimizing_compiler); 251 AddToOsrBuffer(job);
255 osr_attempts_++; 252 osr_attempts_++;
256 BackEdgeTable::AddStackCheck(info); 253 BackEdgeTable::AddStackCheck(info);
257 } 254 }
258 input_queue_.Enqueue(optimizing_compiler); 255 input_queue_.Enqueue(job);
259 input_queue_semaphore_.Signal(); 256 input_queue_semaphore_.Signal();
260 } 257 }
261 258
262 259
263 OptimizingCompiler* OptimizingCompilerThread::FindReadyOSRCandidate( 260 RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
264 Handle<JSFunction> function, uint32_t osr_pc_offset) { 261 Handle<JSFunction> function, uint32_t osr_pc_offset) {
265 ASSERT(!IsOptimizerThread()); 262 ASSERT(!IsOptimizerThread());
266 OptimizingCompiler* result = NULL; 263 RecompileJob* result = NULL;
267 for (int i = 0; i < osr_buffer_size_; i++) { 264 for (int i = 0; i < osr_buffer_size_; i++) {
268 result = osr_buffer_[i]; 265 result = osr_buffer_[i];
269 if (result == NULL) continue; 266 if (result == NULL) continue;
270 if (result->IsWaitingForInstall() && 267 if (result->IsWaitingForInstall() &&
271 result->info()->HasSameOsrEntry(function, osr_pc_offset)) { 268 result->info()->HasSameOsrEntry(function, osr_pc_offset)) {
272 osr_hits_++; 269 osr_hits_++;
273 osr_buffer_[i] = NULL; 270 osr_buffer_[i] = NULL;
274 return result; 271 return result;
275 } 272 }
276 } 273 }
(...skipping 19 matching lines...) Expand all
296 for (int i = 0; i < osr_buffer_size_; i++) { 293 for (int i = 0; i < osr_buffer_size_; i++) {
297 if (osr_buffer_[i] != NULL && 294 if (osr_buffer_[i] != NULL &&
298 *osr_buffer_[i]->info()->closure() == function) { 295 *osr_buffer_[i]->info()->closure() == function) {
299 return !osr_buffer_[i]->IsWaitingForInstall(); 296 return !osr_buffer_[i]->IsWaitingForInstall();
300 } 297 }
301 } 298 }
302 return false; 299 return false;
303 } 300 }
304 301
305 302
306 void OptimizingCompilerThread::AddToOsrBuffer(OptimizingCompiler* compiler) { 303 void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
307 ASSERT(!IsOptimizerThread()); 304 ASSERT(!IsOptimizerThread());
308 // Store into next empty slot or replace next stale OSR job that's waiting 305 // Store into next empty slot or replace next stale OSR job that's waiting
309 // in vain. Dispose in the latter case. 306 // in vain. Dispose in the latter case.
310 OptimizingCompiler* stale; 307 RecompileJob* stale;
311 while (true) { 308 while (true) {
312 stale = osr_buffer_[osr_cursor_]; 309 stale = osr_buffer_[osr_cursor_];
313 if (stale == NULL) break; 310 if (stale == NULL) break;
314 if (stale->IsWaitingForInstall()) { 311 if (stale->IsWaitingForInstall()) {
315 CompilationInfo* info = stale->info(); 312 CompilationInfo* info = stale->info();
316 if (FLAG_trace_osr) { 313 if (FLAG_trace_osr) {
317 PrintF("[COSR - Discarded "); 314 PrintF("[COSR - Discarded ");
318 info->closure()->PrintName(); 315 info->closure()->PrintName();
319 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt()); 316 PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
320 } 317 }
321 BackEdgeTable::RemoveStackCheck(info); 318 BackEdgeTable::RemoveStackCheck(info);
322 DisposeOptimizingCompiler(stale, false); 319 DisposeRecompileJob(stale, false);
323 break; 320 break;
324 } 321 }
325 AdvanceOsrCursor(); 322 AdvanceOsrCursor();
326 } 323 }
327 324
328 osr_buffer_[osr_cursor_] = compiler; 325 osr_buffer_[osr_cursor_] = job;
329 AdvanceOsrCursor(); 326 AdvanceOsrCursor();
330 } 327 }
331 328
332 329
333 #ifdef DEBUG 330 #ifdef DEBUG
334 bool OptimizingCompilerThread::IsOptimizerThread() { 331 bool OptimizingCompilerThread::IsOptimizerThread() {
335 if (!FLAG_concurrent_recompilation) return false; 332 if (!FLAG_concurrent_recompilation) return false;
336 LockGuard<Mutex> lock_guard(&thread_id_mutex_); 333 LockGuard<Mutex> lock_guard(&thread_id_mutex_);
337 return ThreadId::Current().ToInteger() == thread_id_; 334 return ThreadId::Current().ToInteger() == thread_id_;
338 } 335 }
339 #endif 336 #endif
340 337
341 338
342 } } // namespace v8::internal 339 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/optimizing-compiler-thread.h ('k') | src/runtime.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698