Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(21)

Side by Side Diff: src/runtime-profiler.cc

Issue 8700008: New approach to Crankshaft decision-making (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: fix function self-optimization; address first comment Created 9 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 28 matching lines...) Expand all
39 #include "mark-compact.h" 39 #include "mark-compact.h"
40 #include "platform.h" 40 #include "platform.h"
41 #include "scopeinfo.h" 41 #include "scopeinfo.h"
42 42
43 namespace v8 { 43 namespace v8 {
44 namespace internal { 44 namespace internal {
45 45
46 46
47 // Optimization sampler constants. 47 // Optimization sampler constants.
48 static const int kSamplerFrameCount = 2; 48 static const int kSamplerFrameCount = 2;
49 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
50 49
51 static const int kSamplerTicksBetweenThresholdAdjustment = 32; 50 static const int kProfilerTicksBeforeOptimization = 2;
52
53 static const int kSamplerThresholdInit = 3;
54 static const int kSamplerThresholdMin = 1;
55 static const int kSamplerThresholdDelta = 1;
56
57 static const int kSamplerThresholdSizeFactorInit = 3;
58
59 static const int kSizeLimit = 1500;
fschneider 2011/12/12 11:31:21 The reason for looking at the size in some form wa
Jakob Kummerow 2011/12/14 08:42:31 I see the reasoning and agree in principle, but wi
60 51
61 52
62 Atomic32 RuntimeProfiler::state_ = 0; 53 Atomic32 RuntimeProfiler::state_ = 0;
63 // TODO(isolates): Create the semaphore lazily and clean it up when no 54 // TODO(isolates): Create the semaphore lazily and clean it up when no
64 // longer required. 55 // longer required.
65 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0); 56 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
66 57
67 #ifdef DEBUG 58 #ifdef DEBUG
68 bool RuntimeProfiler::has_been_globally_setup_ = false; 59 bool RuntimeProfiler::has_been_globally_setup_ = false;
69 #endif 60 #endif
70 bool RuntimeProfiler::enabled_ = false; 61 bool RuntimeProfiler::enabled_ = false;
71 62
72 63
73 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) 64 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) : isolate_(isolate) {}
74 : isolate_(isolate),
75 sampler_threshold_(kSamplerThresholdInit),
76 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
77 sampler_ticks_until_threshold_adjustment_(
78 kSamplerTicksBetweenThresholdAdjustment),
79 sampler_window_position_(0) {
80 ClearSampleBuffer();
81 }
82 65
83 66
84 void RuntimeProfiler::GlobalSetup() { 67 void RuntimeProfiler::GlobalSetup() {
85 ASSERT(!has_been_globally_setup_); 68 ASSERT(!has_been_globally_setup_);
86 enabled_ = V8::UseCrankshaft() && FLAG_opt; 69 enabled_ = V8::UseCrankshaft() && FLAG_opt;
87 #ifdef DEBUG 70 #ifdef DEBUG
88 has_been_globally_setup_ = true; 71 has_been_globally_setup_ = true;
89 #endif 72 #endif
90 } 73 }
91 74
92 75
93 void RuntimeProfiler::Optimize(JSFunction* function) { 76 void RuntimeProfiler::Optimize(JSFunction* function, int reason) {
94 ASSERT(function->IsOptimizable()); 77 ASSERT(function->IsOptimizable());
95 if (FLAG_trace_opt) { 78 if (FLAG_trace_opt) {
96 PrintF("[marking "); 79 PrintF("[marking ");
97 function->PrintName(); 80 function->PrintName();
98 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address())); 81 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
99 PrintF(" for recompilation"); 82 PrintF(" for recompilation, reason: %d", reason);
fschneider 2011/12/13 11:53:01 Print reason in English instead of just a number.
Jakob Kummerow 2011/12/14 08:42:31 Done.
100 PrintF("]\n"); 83 PrintF("]\n");
101 } 84 }
102 85
103 // The next call to the function will trigger optimization. 86 // The next call to the function will trigger optimization.
104 function->MarkForLazyRecompilation(); 87 function->MarkForLazyRecompilation();
105 } 88 }
106 89
107 90
108 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { 91 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
109 // See AlwaysFullCompiler (in compiler.cc) comment on why we need 92 // See AlwaysFullCompiler (in compiler.cc) comment on why we need
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
141 Code* replacement_code = 124 Code* replacement_code =
142 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); 125 isolate_->builtins()->builtin(Builtins::kOnStackReplacement);
143 Code* unoptimized_code = shared->code(); 126 Code* unoptimized_code = shared->code();
144 Deoptimizer::PatchStackCheckCode(unoptimized_code, 127 Deoptimizer::PatchStackCheckCode(unoptimized_code,
145 stack_check_code, 128 stack_check_code,
146 replacement_code); 129 replacement_code);
147 } 130 }
148 } 131 }
149 132
150 133
151 void RuntimeProfiler::ClearSampleBuffer() {
152 memset(sampler_window_, 0, sizeof(sampler_window_));
153 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
154 }
155
156
157 int RuntimeProfiler::LookupSample(JSFunction* function) {
158 int weight = 0;
159 for (int i = 0; i < kSamplerWindowSize; i++) {
160 Object* sample = sampler_window_[i];
161 if (sample != NULL) {
162 if (function == sample) {
163 weight += sampler_window_weight_[i];
164 }
165 }
166 }
167 return weight;
168 }
169
170
171 void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
172 ASSERT(IsPowerOf2(kSamplerWindowSize));
173 sampler_window_[sampler_window_position_] = function;
174 sampler_window_weight_[sampler_window_position_] = weight;
175 sampler_window_position_ = (sampler_window_position_ + 1) &
176 (kSamplerWindowSize - 1);
177 }
178
179
180 void RuntimeProfiler::OptimizeNow() { 134 void RuntimeProfiler::OptimizeNow() {
181 HandleScope scope(isolate_); 135 HandleScope scope(isolate_);
182 136
183 // Run through the JavaScript frames and collect them. If we already 137 // Run through the JavaScript frames and collect them. If we already
184 // have a sample of the function, we mark it for optimizations 138 // have a sample of the function, we mark it for optimizations
185 // (eagerly or lazily). 139 // (eagerly or lazily).
186 JSFunction* samples[kSamplerFrameCount];
187 int sample_count = 0;
188 int frame_count = 0; 140 int frame_count = 0;
189 for (JavaScriptFrameIterator it(isolate_); 141 for (JavaScriptFrameIterator it(isolate_);
190 frame_count++ < kSamplerFrameCount && !it.done(); 142 frame_count++ < kSamplerFrameCount && !it.done();
191 it.Advance()) { 143 it.Advance()) {
192 JavaScriptFrame* frame = it.frame(); 144 JavaScriptFrame* frame = it.frame();
193 JSFunction* function = JSFunction::cast(frame->function()); 145 JSFunction* function = JSFunction::cast(frame->function());
194 146
195 // Adjust threshold each time we have processed
196 // a certain number of ticks.
197 if (sampler_ticks_until_threshold_adjustment_ > 0) {
198 sampler_ticks_until_threshold_adjustment_--;
199 if (sampler_ticks_until_threshold_adjustment_ <= 0) {
200 // If the threshold is not already at the minimum
201 // modify and reset the ticks until next adjustment.
202 if (sampler_threshold_ > kSamplerThresholdMin) {
203 sampler_threshold_ -= kSamplerThresholdDelta;
204 sampler_ticks_until_threshold_adjustment_ =
205 kSamplerTicksBetweenThresholdAdjustment;
206 }
207 }
208 }
209
210 if (function->IsMarkedForLazyRecompilation()) { 147 if (function->IsMarkedForLazyRecompilation()) {
211 Code* unoptimized = function->shared()->code(); 148 Code* unoptimized = function->shared()->code();
212 int nesting = unoptimized->allow_osr_at_loop_nesting_level(); 149 int nesting = unoptimized->allow_osr_at_loop_nesting_level();
213 if (nesting == 0) AttemptOnStackReplacement(function); 150 if (nesting == 0) AttemptOnStackReplacement(function);
214 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker); 151 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
215 unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting); 152 unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting);
216 } 153 }
217 154
218 // Do not record non-optimizable functions. 155 // Do not record non-optimizable functions.
219 if (!function->IsOptimizable()) continue; 156 if (!function->IsOptimizable()) continue;
220 samples[sample_count++] = function;
221 157
222 int function_size = function->shared()->SourceSize(); 158 int ticks = function->profiler_ticks();
223 int threshold_size_factor = (function_size > kSizeLimit)
224 ? sampler_threshold_size_factor_
225 : 1;
226 159
227 int threshold = sampler_threshold_ * threshold_size_factor; 160 if (ticks >= kProfilerTicksBeforeOptimization - 1) {
228 161 // If this particular function hasn't had any ICs patched for enough
229 if (LookupSample(function) >= threshold) { 162 // ticks, optimize it now.
230 Optimize(function); 163 Optimize(function, 1);
fschneider 2011/12/13 11:53:01 Name instead of an integer constant?
Jakob Kummerow 2011/12/14 08:42:31 Done.
164 } else if (!any_ic_changed_ &&
165 function->shared()->code()->instruction_size() < 500) {
166 // If no IC was patched since the last tick and this function is very
167 // small, optimistically optimize it now.
168 Optimize(function, 2);
169 } else if (!code_generated_ &&
170 !any_ic_changed_ &&
171 total_code_generated_ > 0 &&
172 total_code_generated_ < 2000) {
173 // If no code was generated and no IC was patched since the last tick,
174 // but a little code has already been generated since last Reset(),
175 // then type info might already be stable and we can optimize now.
176 Optimize(function, 3);
177 } else {
178 function->set_profiler_ticks(ticks + 1);
231 } 179 }
232 } 180 }
233 181 any_ic_changed_ = false;
234 // Add the collected functions as samples. It's important not to do 182 code_generated_ = false;
235 // this as part of collecting them because this will interfere with
236 // the sample lookup in case of recursive functions.
237 for (int i = 0; i < sample_count; i++) {
238 AddSample(samples[i], kSamplerFrameWeight[i]);
239 }
240 } 183 }
241 184
242 185
243 void RuntimeProfiler::NotifyTick() { 186 void RuntimeProfiler::NotifyTick() {
244 isolate_->stack_guard()->RequestRuntimeProfilerTick(); 187 isolate_->stack_guard()->RequestRuntimeProfilerTick();
245 } 188 }
246 189
247 190
248 void RuntimeProfiler::Setup() { 191 void RuntimeProfiler::Setup() {
249 ASSERT(has_been_globally_setup_); 192 ASSERT(has_been_globally_setup_);
250 ClearSampleBuffer();
251 // If the ticker hasn't already started, make sure to do so to get 193 // If the ticker hasn't already started, make sure to do so to get
252 // the ticks for the runtime profiler. 194 // the ticks for the runtime profiler.
253 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); 195 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
254 } 196 }
255 197
256 198
257 void RuntimeProfiler::Reset() { 199 void RuntimeProfiler::Reset() {
258 sampler_threshold_ = kSamplerThresholdInit; 200 isolate_->logger()->ResetTickerInterval();
259 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; 201 total_code_generated_ = 0;
260 sampler_ticks_until_threshold_adjustment_ =
261 kSamplerTicksBetweenThresholdAdjustment;
262 } 202 }
263 203
264 204
265 void RuntimeProfiler::TearDown() { 205 void RuntimeProfiler::TearDown() {
266 // Nothing to do. 206 // Nothing to do.
267 } 207 }
268 208
269 209
270 int RuntimeProfiler::SamplerWindowSize() {
271 return kSamplerWindowSize;
272 }
273
274
275 // Update the pointers in the sampler window after a GC.
276 void RuntimeProfiler::UpdateSamplesAfterScavenge() {
277 for (int i = 0; i < kSamplerWindowSize; i++) {
278 Object* function = sampler_window_[i];
279 if (function != NULL && isolate_->heap()->InNewSpace(function)) {
280 MapWord map_word = HeapObject::cast(function)->map_word();
281 if (map_word.IsForwardingAddress()) {
282 sampler_window_[i] = map_word.ToForwardingAddress();
283 } else {
284 sampler_window_[i] = NULL;
285 }
286 }
287 }
288 }
289
290
291 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) { 210 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
292 // The profiler thread must still be waiting. 211 // The profiler thread must still be waiting.
293 ASSERT(NoBarrier_Load(&state_) >= 0); 212 ASSERT(NoBarrier_Load(&state_) >= 0);
294 // In IsolateEnteredJS we have already incremented the counter and 213 // In IsolateEnteredJS we have already incremented the counter and
295 // undid the decrement done by the profiler thread. Increment again 214 // undid the decrement done by the profiler thread. Increment again
296 // to get the right count of active isolates. 215 // to get the right count of active isolates.
297 NoBarrier_AtomicIncrement(&state_, 1); 216 NoBarrier_AtomicIncrement(&state_, 1);
298 semaphore_->Signal(); 217 semaphore_->Signal();
299 } 218 }
300 219
(...skipping 27 matching lines...) Expand all
328 } 247 }
329 thread->Join(); 248 thread->Join();
330 // The profiler thread is now stopped. Undo the increment in case it 249 // The profiler thread is now stopped. Undo the increment in case it
331 // was not waiting. 250 // was not waiting.
332 if (new_state != 0) { 251 if (new_state != 0) {
333 NoBarrier_AtomicIncrement(&state_, -1); 252 NoBarrier_AtomicIncrement(&state_, -1);
334 } 253 }
335 } 254 }
336 255
337 256
338 void RuntimeProfiler::RemoveDeadSamples() {
339 for (int i = 0; i < kSamplerWindowSize; i++) {
340 Object* function = sampler_window_[i];
341 if (function != NULL &&
342 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) {
343 sampler_window_[i] = NULL;
344 }
345 }
346 }
347
348
349 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
350 for (int i = 0; i < kSamplerWindowSize; i++) {
351 visitor->VisitPointer(&sampler_window_[i]);
352 }
353 }
354
355
356 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { 257 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
357 if (!RuntimeProfiler::IsSomeIsolateInJS()) { 258 if (!RuntimeProfiler::IsSomeIsolateInJS()) {
358 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); 259 return RuntimeProfiler::WaitForSomeIsolateToEnterJS();
359 } 260 }
360 return false; 261 return false;
361 } 262 }
362 263
363 264
364 } } // namespace v8::internal 265 } } // namespace v8::internal
OLDNEW
« src/objects.h ('K') | « src/runtime-profiler.h ('k') | src/x64/code-stubs-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698