Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/runtime-profiler.cc

Issue 8700008: New approach to Crankshaft decision-making (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: address comments Created 9 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/runtime-profiler.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 28 matching lines...) Expand all
39 #include "mark-compact.h" 39 #include "mark-compact.h"
40 #include "platform.h" 40 #include "platform.h"
41 #include "scopeinfo.h" 41 #include "scopeinfo.h"
42 42
43 namespace v8 { 43 namespace v8 {
44 namespace internal { 44 namespace internal {
45 45
46 46
47 // Optimization sampler constants. 47 // Optimization sampler constants.
48 static const int kSamplerFrameCount = 2; 48 static const int kSamplerFrameCount = 2;
49 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
50 49
51 static const int kSamplerTicksBetweenThresholdAdjustment = 32; 50 // Number of times a function has to be seen on the stack before it is
51 // optimized.
52 static const int kProfilerTicksBeforeOptimization = 2;
52 53
53 static const int kSamplerThresholdInit = 3; 54 // Maximum size in bytes of generated code for a function to be optimized
54 static const int kSamplerThresholdMin = 1; 55 // the very first time it is seen on the stack.
55 static const int kSamplerThresholdDelta = 1; 56 static const int kSizeLimitEarlyOpt = 500;
56
57 static const int kSamplerThresholdSizeFactorInit = 3;
58
59 static const int kSizeLimit = 1500;
60 57
61 58
62 Atomic32 RuntimeProfiler::state_ = 0; 59 Atomic32 RuntimeProfiler::state_ = 0;
63 // TODO(isolates): Create the semaphore lazily and clean it up when no 60 // TODO(isolates): Create the semaphore lazily and clean it up when no
64 // longer required. 61 // longer required.
65 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0); 62 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
66 63
67 #ifdef DEBUG 64 #ifdef DEBUG
68 bool RuntimeProfiler::has_been_globally_setup_ = false; 65 bool RuntimeProfiler::has_been_globally_setup_ = false;
69 #endif 66 #endif
70 bool RuntimeProfiler::enabled_ = false; 67 bool RuntimeProfiler::enabled_ = false;
71 68
72 69
73 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) 70 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) : isolate_(isolate) {}
74 : isolate_(isolate),
75 sampler_threshold_(kSamplerThresholdInit),
76 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
77 sampler_ticks_until_threshold_adjustment_(
78 kSamplerTicksBetweenThresholdAdjustment),
79 sampler_window_position_(0) {
80 ClearSampleBuffer();
81 }
82 71
83 72
84 void RuntimeProfiler::GlobalSetup() { 73 void RuntimeProfiler::GlobalSetup() {
85 ASSERT(!has_been_globally_setup_); 74 ASSERT(!has_been_globally_setup_);
86 enabled_ = V8::UseCrankshaft() && FLAG_opt; 75 enabled_ = V8::UseCrankshaft() && FLAG_opt;
87 #ifdef DEBUG 76 #ifdef DEBUG
88 has_been_globally_setup_ = true; 77 has_been_globally_setup_ = true;
89 #endif 78 #endif
90 } 79 }
91 80
92 81
93 void RuntimeProfiler::Optimize(JSFunction* function) { 82 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
94 ASSERT(function->IsOptimizable()); 83 ASSERT(function->IsOptimizable());
95 if (FLAG_trace_opt) { 84 if (FLAG_trace_opt) {
96 PrintF("[marking "); 85 PrintF("[marking ");
97 function->PrintName(); 86 function->PrintName();
98 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address())); 87 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
99 PrintF(" for recompilation"); 88 PrintF(" for recompilation, reason: %s", reason);
100 PrintF("]\n"); 89 PrintF("]\n");
101 } 90 }
102 91
103 // The next call to the function will trigger optimization. 92 // The next call to the function will trigger optimization.
104 function->MarkForLazyRecompilation(); 93 function->MarkForLazyRecompilation();
105 } 94 }
106 95
107 96
108 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { 97 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
109 // See AlwaysFullCompiler (in compiler.cc) comment on why we need 98 // See AlwaysFullCompiler (in compiler.cc) comment on why we need
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
141 Code* replacement_code = 130 Code* replacement_code =
142 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); 131 isolate_->builtins()->builtin(Builtins::kOnStackReplacement);
143 Code* unoptimized_code = shared->code(); 132 Code* unoptimized_code = shared->code();
144 Deoptimizer::PatchStackCheckCode(unoptimized_code, 133 Deoptimizer::PatchStackCheckCode(unoptimized_code,
145 stack_check_code, 134 stack_check_code,
146 replacement_code); 135 replacement_code);
147 } 136 }
148 } 137 }
149 138
150 139
151 void RuntimeProfiler::ClearSampleBuffer() {
152 memset(sampler_window_, 0, sizeof(sampler_window_));
153 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
154 }
155
156
157 int RuntimeProfiler::LookupSample(JSFunction* function) {
158 int weight = 0;
159 for (int i = 0; i < kSamplerWindowSize; i++) {
160 Object* sample = sampler_window_[i];
161 if (sample != NULL) {
162 if (function == sample) {
163 weight += sampler_window_weight_[i];
164 }
165 }
166 }
167 return weight;
168 }
169
170
171 void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
172 ASSERT(IsPowerOf2(kSamplerWindowSize));
173 sampler_window_[sampler_window_position_] = function;
174 sampler_window_weight_[sampler_window_position_] = weight;
175 sampler_window_position_ = (sampler_window_position_ + 1) &
176 (kSamplerWindowSize - 1);
177 }
178
179
180 void RuntimeProfiler::OptimizeNow() { 140 void RuntimeProfiler::OptimizeNow() {
181 HandleScope scope(isolate_); 141 HandleScope scope(isolate_);
182 142
183 // Run through the JavaScript frames and collect them. If we already 143 // Run through the JavaScript frames and collect them. If we already
184 // have a sample of the function, we mark it for optimizations 144 // have a sample of the function, we mark it for optimizations
185 // (eagerly or lazily). 145 // (eagerly or lazily).
186 JSFunction* samples[kSamplerFrameCount];
187 int sample_count = 0;
188 int frame_count = 0; 146 int frame_count = 0;
189 for (JavaScriptFrameIterator it(isolate_); 147 for (JavaScriptFrameIterator it(isolate_);
190 frame_count++ < kSamplerFrameCount && !it.done(); 148 frame_count++ < kSamplerFrameCount && !it.done();
191 it.Advance()) { 149 it.Advance()) {
192 JavaScriptFrame* frame = it.frame(); 150 JavaScriptFrame* frame = it.frame();
193 JSFunction* function = JSFunction::cast(frame->function()); 151 JSFunction* function = JSFunction::cast(frame->function());
194 152
195 // Adjust threshold each time we have processed
196 // a certain number of ticks.
197 if (sampler_ticks_until_threshold_adjustment_ > 0) {
198 sampler_ticks_until_threshold_adjustment_--;
199 if (sampler_ticks_until_threshold_adjustment_ <= 0) {
200 // If the threshold is not already at the minimum
201 // modify and reset the ticks until next adjustment.
202 if (sampler_threshold_ > kSamplerThresholdMin) {
203 sampler_threshold_ -= kSamplerThresholdDelta;
204 sampler_ticks_until_threshold_adjustment_ =
205 kSamplerTicksBetweenThresholdAdjustment;
206 }
207 }
208 }
209
210 if (function->IsMarkedForLazyRecompilation()) { 153 if (function->IsMarkedForLazyRecompilation()) {
211 Code* unoptimized = function->shared()->code(); 154 Code* unoptimized = function->shared()->code();
212 int nesting = unoptimized->allow_osr_at_loop_nesting_level(); 155 int nesting = unoptimized->allow_osr_at_loop_nesting_level();
213 if (nesting == 0) AttemptOnStackReplacement(function); 156 if (nesting == 0) AttemptOnStackReplacement(function);
214 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker); 157 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
215 unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting); 158 unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting);
216 } 159 }
217 160
218 // Do not record non-optimizable functions. 161 // Do not record non-optimizable functions.
219 if (!function->IsOptimizable()) continue; 162 if (!function->IsOptimizable()) continue;
220 samples[sample_count++] = function;
221 163
222 int function_size = function->shared()->SourceSize(); 164 int ticks = function->shared()->profiler_ticks();
223 int threshold_size_factor = (function_size > kSizeLimit)
224 ? sampler_threshold_size_factor_
225 : 1;
226 165
227 int threshold = sampler_threshold_ * threshold_size_factor; 166 if (ticks >= kProfilerTicksBeforeOptimization - 1) {
228 167 // If this particular function hasn't had any ICs patched for enough
229 if (LookupSample(function) >= threshold) { 168 // ticks, optimize it now.
230 Optimize(function); 169 Optimize(function, "hot and stable");
170 } else if (!any_ic_changed_ &&
171 function->shared()->code()->instruction_size() < kSizeLimitEarlyOpt) {
172 // If no IC was patched since the last tick and this function is very
173 // small, optimistically optimize it now.
174 Optimize(function, "small function");
175 } else if (!code_generated_ &&
176 !any_ic_changed_ &&
177 total_code_generated_ > 0 &&
178 total_code_generated_ < 2000) {
179 // If no code was generated and no IC was patched since the last tick,
180 // but a little code has already been generated since last Reset(),
181 // then type info might already be stable and we can optimize now.
182 Optimize(function, "stable on startup");
183 } else {
184 function->shared()->set_profiler_ticks(ticks + 1);
231 } 185 }
232 } 186 }
233 187 any_ic_changed_ = false;
234 // Add the collected functions as samples. It's important not to do 188 code_generated_ = false;
235 // this as part of collecting them because this will interfere with
236 // the sample lookup in case of recursive functions.
237 for (int i = 0; i < sample_count; i++) {
238 AddSample(samples[i], kSamplerFrameWeight[i]);
239 }
240 } 189 }
241 190
242 191
243 void RuntimeProfiler::NotifyTick() { 192 void RuntimeProfiler::NotifyTick() {
244 isolate_->stack_guard()->RequestRuntimeProfilerTick(); 193 isolate_->stack_guard()->RequestRuntimeProfilerTick();
245 } 194 }
246 195
247 196
248 void RuntimeProfiler::Setup() { 197 void RuntimeProfiler::Setup() {
249 ASSERT(has_been_globally_setup_); 198 ASSERT(has_been_globally_setup_);
250 ClearSampleBuffer();
251 // If the ticker hasn't already started, make sure to do so to get 199 // If the ticker hasn't already started, make sure to do so to get
252 // the ticks for the runtime profiler. 200 // the ticks for the runtime profiler.
253 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); 201 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
254 } 202 }
255 203
256 204
257 void RuntimeProfiler::Reset() { 205 void RuntimeProfiler::Reset() {
258 sampler_threshold_ = kSamplerThresholdInit; 206 isolate_->logger()->ResetTickerInterval();
259 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; 207 total_code_generated_ = 0;
260 sampler_ticks_until_threshold_adjustment_ =
261 kSamplerTicksBetweenThresholdAdjustment;
262 } 208 }
263 209
264 210
265 void RuntimeProfiler::TearDown() { 211 void RuntimeProfiler::TearDown() {
266 // Nothing to do. 212 // Nothing to do.
267 } 213 }
268 214
269 215
270 int RuntimeProfiler::SamplerWindowSize() {
271 return kSamplerWindowSize;
272 }
273
274
275 // Update the pointers in the sampler window after a GC.
276 void RuntimeProfiler::UpdateSamplesAfterScavenge() {
277 for (int i = 0; i < kSamplerWindowSize; i++) {
278 Object* function = sampler_window_[i];
279 if (function != NULL && isolate_->heap()->InNewSpace(function)) {
280 MapWord map_word = HeapObject::cast(function)->map_word();
281 if (map_word.IsForwardingAddress()) {
282 sampler_window_[i] = map_word.ToForwardingAddress();
283 } else {
284 sampler_window_[i] = NULL;
285 }
286 }
287 }
288 }
289
290
291 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) { 216 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
292 // The profiler thread must still be waiting. 217 // The profiler thread must still be waiting.
293 ASSERT(NoBarrier_Load(&state_) >= 0); 218 ASSERT(NoBarrier_Load(&state_) >= 0);
294 // In IsolateEnteredJS we have already incremented the counter and 219 // In IsolateEnteredJS we have already incremented the counter and
295 // undid the decrement done by the profiler thread. Increment again 220 // undid the decrement done by the profiler thread. Increment again
296 // to get the right count of active isolates. 221 // to get the right count of active isolates.
297 NoBarrier_AtomicIncrement(&state_, 1); 222 NoBarrier_AtomicIncrement(&state_, 1);
298 semaphore_->Signal(); 223 semaphore_->Signal();
299 } 224 }
300 225
(...skipping 27 matching lines...) Expand all
328 } 253 }
329 thread->Join(); 254 thread->Join();
330 // The profiler thread is now stopped. Undo the increment in case it 255 // The profiler thread is now stopped. Undo the increment in case it
331 // was not waiting. 256 // was not waiting.
332 if (new_state != 0) { 257 if (new_state != 0) {
333 NoBarrier_AtomicIncrement(&state_, -1); 258 NoBarrier_AtomicIncrement(&state_, -1);
334 } 259 }
335 } 260 }
336 261
337 262
338 void RuntimeProfiler::RemoveDeadSamples() {
339 for (int i = 0; i < kSamplerWindowSize; i++) {
340 Object* function = sampler_window_[i];
341 if (function != NULL &&
342 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) {
343 sampler_window_[i] = NULL;
344 }
345 }
346 }
347
348
349 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
350 for (int i = 0; i < kSamplerWindowSize; i++) {
351 visitor->VisitPointer(&sampler_window_[i]);
352 }
353 }
354
355
356 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { 263 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
357 if (!RuntimeProfiler::IsSomeIsolateInJS()) { 264 if (!RuntimeProfiler::IsSomeIsolateInJS()) {
358 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); 265 return RuntimeProfiler::WaitForSomeIsolateToEnterJS();
359 } 266 }
360 return false; 267 return false;
361 } 268 }
362 269
363 270
364 } } // namespace v8::internal 271 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/runtime-profiler.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698