OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
53 | 53 |
54 while (true) { | 54 while (true) { |
55 input_queue_semaphore_->Wait(); | 55 input_queue_semaphore_->Wait(); |
56 Logger::TimerEventScope timer( | 56 Logger::TimerEventScope timer( |
57 isolate_, Logger::TimerEventScope::v8_recompile_parallel); | 57 isolate_, Logger::TimerEventScope::v8_recompile_parallel); |
58 | 58 |
59 if (FLAG_parallel_recompilation_delay != 0) { | 59 if (FLAG_parallel_recompilation_delay != 0) { |
60 OS::Sleep(FLAG_parallel_recompilation_delay); | 60 OS::Sleep(FLAG_parallel_recompilation_delay); |
61 } | 61 } |
62 | 62 |
63 if (Acquire_Load(&stop_thread_)) { | 63 switch (static_cast<StopFlag>(Acquire_Load(&stop_thread_))) { |
64 stop_semaphore_->Signal(); | 64 case CONTINUE: |
65 if (FLAG_trace_parallel_recompilation) { | 65 break; |
66 time_spent_total_ = OS::Ticks() - epoch; | 66 case STOP: |
67 } | 67 if (FLAG_trace_parallel_recompilation) { |
68 return; | 68 time_spent_total_ = OS::Ticks() - epoch; |
69 } | |
70 stop_semaphore_->Signal(); | |
71 return; | |
72 case FLUSH: | |
73 // The main thread is blocked, waiting for the stop semaphore. | |
74 { AllowHandleDereference allow_handle_dereference; | |
75 FlushInputQueue(true); | |
76 FlushOutputQueue(true); | |
Hannes Payer (out of office)
2013/08/07 09:02:11
You could flush the output queue in the main threa
Yang
2013/08/07 09:07:38
Done.
| |
77 } | |
78 NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0)); | |
79 NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE)); | |
80 stop_semaphore_->Signal(); | |
81 // Return to start of consumer loop. | |
82 continue; | |
69 } | 83 } |
70 | 84 |
71 int64_t compiling_start = 0; | 85 int64_t compiling_start = 0; |
72 if (FLAG_trace_parallel_recompilation) compiling_start = OS::Ticks(); | 86 if (FLAG_trace_parallel_recompilation) compiling_start = OS::Ticks(); |
73 | 87 |
74 CompileNext(); | 88 CompileNext(); |
75 | 89 |
76 if (FLAG_trace_parallel_recompilation) { | 90 if (FLAG_trace_parallel_recompilation) { |
77 time_spent_compiling_ += OS::Ticks() - compiling_start; | 91 time_spent_compiling_ += OS::Ticks() - compiling_start; |
78 } | 92 } |
79 } | 93 } |
80 } | 94 } |
81 | 95 |
82 | 96 |
83 void OptimizingCompilerThread::CompileNext() { | 97 void OptimizingCompilerThread::CompileNext() { |
84 OptimizingCompiler* optimizing_compiler = NULL; | 98 OptimizingCompiler* optimizing_compiler = NULL; |
85 input_queue_.Dequeue(&optimizing_compiler); | 99 bool result = input_queue_.Dequeue(&optimizing_compiler); |
100 USE(result); | |
101 ASSERT(result); | |
86 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1)); | 102 Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1)); |
87 | 103 |
88 // The function may have already been optimized by OSR. Simply continue. | 104 // The function may have already been optimized by OSR. Simply continue. |
89 OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph(); | 105 OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph(); |
90 USE(status); // Prevent an unused-variable error in release mode. | 106 USE(status); // Prevent an unused-variable error in release mode. |
91 ASSERT(status != OptimizingCompiler::FAILED); | 107 ASSERT(status != OptimizingCompiler::FAILED); |
92 | 108 |
93 // The function may have already been optimized by OSR. Simply continue. | 109 // The function may have already been optimized by OSR. Simply continue. |
94 // Use a mutex to make sure that functions marked for install | 110 // Use a mutex to make sure that functions marked for install |
95 // are always also queued. | 111 // are always also queued. |
96 ScopedLock mark_and_queue(install_mutex_); | 112 ScopedLock mark_and_queue(install_mutex_); |
97 { Heap::RelocationLock relocation_lock(isolate_->heap()); | 113 { Heap::RelocationLock relocation_lock(isolate_->heap()); |
98 AllowHandleDereference ahd; | 114 AllowHandleDereference ahd; |
99 optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode(); | 115 optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode(); |
100 } | 116 } |
101 output_queue_.Enqueue(optimizing_compiler); | 117 output_queue_.Enqueue(optimizing_compiler); |
102 } | 118 } |
103 | 119 |
104 | 120 |
121 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) { | |
122 OptimizingCompiler* optimizing_compiler; | |
123 // The optimizing compiler is allocated in the CompilationInfo's zone. | |
124 while (input_queue_.Dequeue(&optimizing_compiler)) { | |
125 // This should not block, since we have one signal on the input queue | |
126 // semaphore corresponding to each element in the input queue. | |
127 input_queue_semaphore_->Wait(); | |
128 CompilationInfo* info = optimizing_compiler->info(); | |
129 if (restore_function_code) { | |
130 Handle<JSFunction> function = info->closure(); | |
131 function->ReplaceCode(function->shared()->code()); | |
132 } | |
133 delete info; | |
134 } | |
135 } | |
136 | |
137 | |
138 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) { | |
139 OptimizingCompiler* optimizing_compiler; | |
140 // The optimizing compiler is allocated in the CompilationInfo's zone. | |
141 while (output_queue_.Dequeue(&optimizing_compiler)) { | |
142 CompilationInfo* info = optimizing_compiler->info(); | |
143 if (restore_function_code) { | |
144 AllowHandleDereference allow_handle_dereference; | |
145 Handle<JSFunction> function = info->closure(); | |
146 function->ReplaceCode(function->shared()->code()); | |
147 } | |
148 delete info; | |
149 } | |
150 } | |
151 | |
152 | |
153 void OptimizingCompilerThread::Flush() { | |
154 ASSERT(!IsOptimizerThread()); | |
155 Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH)); | |
156 input_queue_semaphore_->Signal(); | |
157 stop_semaphore_->Wait(); | |
158 } | |
159 | |
160 | |
105 void OptimizingCompilerThread::Stop() { | 161 void OptimizingCompilerThread::Stop() { |
106 ASSERT(!IsOptimizerThread()); | 162 ASSERT(!IsOptimizerThread()); |
107 Release_Store(&stop_thread_, static_cast<AtomicWord>(true)); | 163 Release_Store(&stop_thread_, static_cast<AtomicWord>(STOP)); |
108 input_queue_semaphore_->Signal(); | 164 input_queue_semaphore_->Signal(); |
109 stop_semaphore_->Wait(); | 165 stop_semaphore_->Wait(); |
110 | 166 |
111 if (FLAG_parallel_recompilation_delay != 0) { | 167 if (FLAG_parallel_recompilation_delay != 0) { |
112 // Barrier when loading queue length is not necessary since the write | 168 // Barrier when loading queue length is not necessary since the write |
113 // happens in CompileNext on the same thread. | 169 // happens in CompileNext on the same thread. |
Hannes Payer (out of office)
2013/08/07 09:02:11
Can we add a comment that we need that code just f
Yang
2013/08/07 09:07:38
Done.
| |
114 while (NoBarrier_Load(&queue_length_) > 0) CompileNext(); | 170 while (NoBarrier_Load(&queue_length_) > 0) CompileNext(); |
115 InstallOptimizedFunctions(); | 171 InstallOptimizedFunctions(); |
116 } else { | 172 } else { |
117 OptimizingCompiler* optimizing_compiler; | 173 FlushInputQueue(false); |
118 // The optimizing compiler is allocated in the CompilationInfo's zone. | 174 FlushOutputQueue(false); |
Hannes Payer (out of office)
2013/08/07 09:02:11
Can we call flush in the main thread before callin
Yang
2013/08/07 09:07:38
Stop() is only called when the isolate is being to
| |
119 while (input_queue_.Dequeue(&optimizing_compiler)) { | |
120 delete optimizing_compiler->info(); | |
121 } | |
122 while (output_queue_.Dequeue(&optimizing_compiler)) { | |
123 delete optimizing_compiler->info(); | |
124 } | |
125 } | 175 } |
126 | 176 |
127 if (FLAG_trace_parallel_recompilation) { | 177 if (FLAG_trace_parallel_recompilation) { |
128 double compile_time = static_cast<double>(time_spent_compiling_); | 178 double compile_time = static_cast<double>(time_spent_compiling_); |
129 double total_time = static_cast<double>(time_spent_total_); | 179 double total_time = static_cast<double>(time_spent_total_); |
130 double percentage = (compile_time * 100) / total_time; | 180 double percentage = (compile_time * 100) / total_time; |
131 PrintF(" ** Compiler thread did %.2f%% useful work\n", percentage); | 181 PrintF(" ** Compiler thread did %.2f%% useful work\n", percentage); |
132 } | 182 } |
133 | 183 |
134 Join(); | 184 Join(); |
(...skipping 28 matching lines...) Expand all Loading... | |
163 #ifdef DEBUG | 213 #ifdef DEBUG |
164 bool OptimizingCompilerThread::IsOptimizerThread() { | 214 bool OptimizingCompilerThread::IsOptimizerThread() { |
165 if (!FLAG_parallel_recompilation) return false; | 215 if (!FLAG_parallel_recompilation) return false; |
166 ScopedLock lock(thread_id_mutex_); | 216 ScopedLock lock(thread_id_mutex_); |
167 return ThreadId::Current().ToInteger() == thread_id_; | 217 return ThreadId::Current().ToInteger() == thread_id_; |
168 } | 218 } |
169 #endif | 219 #endif |
170 | 220 |
171 | 221 |
172 } } // namespace v8::internal | 222 } } // namespace v8::internal |
OLD | NEW |