Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/runtime-profiler.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 18 matching lines...) Expand all
29 29
30 #include "runtime-profiler.h" 30 #include "runtime-profiler.h"
31 31
32 #include "assembler.h" 32 #include "assembler.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "compilation-cache.h" 34 #include "compilation-cache.h"
35 #include "deoptimizer.h" 35 #include "deoptimizer.h"
36 #include "execution.h" 36 #include "execution.h"
37 #include "global-handles.h" 37 #include "global-handles.h"
38 #include "mark-compact.h" 38 #include "mark-compact.h"
39 #include "platform.h"
39 #include "scopeinfo.h" 40 #include "scopeinfo.h"
40 #include "top.h"
41 41
42 namespace v8 { 42 namespace v8 {
43 namespace internal { 43 namespace internal {
44 44
45 45
46 class PendingListNode : public Malloced { 46 class PendingListNode : public Malloced {
47 public: 47 public:
48 explicit PendingListNode(JSFunction* function); 48 explicit PendingListNode(JSFunction* function);
49 ~PendingListNode() { Destroy(); } 49 ~PendingListNode() { Destroy(); }
50 50
(...skipping 11 matching lines...) Expand all
62 private: 62 private:
63 void Destroy(); 63 void Destroy();
64 static void WeakCallback(v8::Persistent<v8::Value> object, void* data); 64 static void WeakCallback(v8::Persistent<v8::Value> object, void* data);
65 65
66 PendingListNode* next_; 66 PendingListNode* next_;
67 Handle<Object> function_; // Weak handle. 67 Handle<Object> function_; // Weak handle.
68 int64_t start_; 68 int64_t start_;
69 }; 69 };
70 70
71 71
72 enum SamplerState {
73 IN_NON_JS_STATE = 0,
74 IN_JS_STATE = 1
75 };
76
77
78 // Optimization sampler constants. 72 // Optimization sampler constants.
79 static const int kSamplerFrameCount = 2; 73 static const int kSamplerFrameCount = 2;
80 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 }; 74 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
81 static const int kSamplerWindowSize = 16;
82 75
83 static const int kSamplerTicksBetweenThresholdAdjustment = 32; 76 static const int kSamplerTicksBetweenThresholdAdjustment = 32;
84 77
85 static const int kSamplerThresholdInit = 3; 78 static const int kSamplerThresholdInit = 3;
86 static const int kSamplerThresholdMin = 1; 79 static const int kSamplerThresholdMin = 1;
87 static const int kSamplerThresholdDelta = 1; 80 static const int kSamplerThresholdDelta = 1;
88 81
89 static const int kSamplerThresholdSizeFactorInit = 3; 82 static const int kSamplerThresholdSizeFactorInit = 3;
90 static const int kSamplerThresholdSizeFactorMin = 1; 83 static const int kSamplerThresholdSizeFactorMin = 1;
91 static const int kSamplerThresholdSizeFactorDelta = 1; 84 static const int kSamplerThresholdSizeFactorDelta = 1;
92 85
93 static const int kSizeLimit = 1500; 86 static const int kSizeLimit = 1500;
94 87
95 static int sampler_threshold = kSamplerThresholdInit;
96 static int sampler_threshold_size_factor = kSamplerThresholdSizeFactorInit;
97
98 static int sampler_ticks_until_threshold_adjustment =
99 kSamplerTicksBetweenThresholdAdjustment;
100
101 // The ratio of ticks spent in JS code in percent.
102 static Atomic32 js_ratio;
103
104 static Object* sampler_window[kSamplerWindowSize] = { NULL, };
105 static int sampler_window_position = 0;
106 static int sampler_window_weight[kSamplerWindowSize] = { 0, };
107
108
109 // Support for pending 'optimize soon' requests.
110 static PendingListNode* optimize_soon_list = NULL;
111
112 88
113 PendingListNode::PendingListNode(JSFunction* function) : next_(NULL) { 89 PendingListNode::PendingListNode(JSFunction* function) : next_(NULL) {
114 function_ = GlobalHandles::Create(function); 90 GlobalHandles* global_handles = Isolate::Current()->global_handles();
91 function_ = global_handles->Create(function);
115 start_ = OS::Ticks(); 92 start_ = OS::Ticks();
116 GlobalHandles::MakeWeak(function_.location(), this, &WeakCallback); 93 global_handles->MakeWeak(function_.location(), this, &WeakCallback);
117 } 94 }
118 95
119 96
120 void PendingListNode::Destroy() { 97 void PendingListNode::Destroy() {
121 if (!IsValid()) return; 98 if (!IsValid()) return;
122 GlobalHandles::Destroy(function_.location()); 99 GlobalHandles* global_handles = Isolate::Current()->global_handles();
100 global_handles->Destroy(function_.location());
123 function_= Handle<Object>::null(); 101 function_= Handle<Object>::null();
124 } 102 }
125 103
126 104
127 void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) { 105 void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) {
128 reinterpret_cast<PendingListNode*>(data)->Destroy(); 106 reinterpret_cast<PendingListNode*>(data)->Destroy();
129 } 107 }
130 108
131 109
132 static bool IsOptimizable(JSFunction* function) { 110 static bool IsOptimizable(JSFunction* function) {
133 Code* code = function->code(); 111 Code* code = function->code();
134 return code->kind() == Code::FUNCTION && code->optimizable(); 112 return code->kind() == Code::FUNCTION && code->optimizable();
135 } 113 }
136 114
137 115
138 static void Optimize(JSFunction* function, bool eager, int delay) { 116 Atomic32 RuntimeProfiler::state_ = 0;
117 // TODO(isolates): Create the semaphore lazily and clean it up when no
118 // longer required.
119 #ifdef ENABLE_LOGGING_AND_PROFILING
120 Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
121 #endif
122
123
124 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
125 : isolate_(isolate),
126 sampler_threshold_(kSamplerThresholdInit),
127 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
128 sampler_ticks_until_threshold_adjustment_(
129 kSamplerTicksBetweenThresholdAdjustment),
130 js_ratio_(0),
131 sampler_window_position_(0),
132 optimize_soon_list_(NULL),
133 state_window_position_(0) {
134 state_counts_[0] = kStateWindowSize;
135 state_counts_[1] = 0;
136 memset(state_window_, 0, sizeof(state_window_));
137 ClearSampleBuffer();
138 }
139
140
141 bool RuntimeProfiler::IsEnabled() {
142 return V8::UseCrankshaft() && FLAG_opt;
143 }
144
145
146 void RuntimeProfiler::Optimize(JSFunction* function, bool eager, int delay) {
139 ASSERT(IsOptimizable(function)); 147 ASSERT(IsOptimizable(function));
140 if (FLAG_trace_opt) { 148 if (FLAG_trace_opt) {
141 PrintF("[marking (%s) ", eager ? "eagerly" : "lazily"); 149 PrintF("[marking (%s) ", eager ? "eagerly" : "lazily");
142 function->PrintName(); 150 function->PrintName();
143 PrintF(" for recompilation"); 151 PrintF(" for recompilation");
144 if (delay > 0) { 152 if (delay > 0) {
145 PrintF(" (delayed %0.3f ms)", static_cast<double>(delay) / 1000); 153 PrintF(" (delayed %0.3f ms)", static_cast<double>(delay) / 1000);
146 } 154 }
147 PrintF("]\n"); 155 PrintF("]\n");
148 } 156 }
149 157
150 // The next call to the function will trigger optimization. 158 // The next call to the function will trigger optimization.
151 function->MarkForLazyRecompilation(); 159 function->MarkForLazyRecompilation();
152 } 160 }
153 161
154 162
155 static void AttemptOnStackReplacement(JSFunction* function) { 163 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
156 // See AlwaysFullCompiler (in compiler.cc) comment on why we need 164 // See AlwaysFullCompiler (in compiler.cc) comment on why we need
157 // Debug::has_break_points(). 165 // Debug::has_break_points().
158 ASSERT(function->IsMarkedForLazyRecompilation()); 166 ASSERT(function->IsMarkedForLazyRecompilation());
159 if (!FLAG_use_osr || Debug::has_break_points() || function->IsBuiltin()) { 167 if (!FLAG_use_osr ||
168 isolate_->debug()->has_break_points() ||
169 function->IsBuiltin()) {
160 return; 170 return;
161 } 171 }
162 172
163 SharedFunctionInfo* shared = function->shared(); 173 SharedFunctionInfo* shared = function->shared();
164 // If the code is not optimizable or references context slots, don't try OSR. 174 // If the code is not optimizable or references context slots, don't try OSR.
165 if (!shared->code()->optimizable() || !shared->allows_lazy_compilation()) { 175 if (!shared->code()->optimizable() || !shared->allows_lazy_compilation()) {
166 return; 176 return;
167 } 177 }
168 178
169 // We are not prepared to do OSR for a function that already has an 179 // We are not prepared to do OSR for a function that already has an
170 // allocated arguments object. The optimized code would bypass it for 180 // allocated arguments object. The optimized code would bypass it for
171 // arguments accesses, which is unsound. Don't try OSR. 181 // arguments accesses, which is unsound. Don't try OSR.
172 if (shared->scope_info()->HasArgumentsShadow()) return; 182 if (shared->scope_info()->HasArgumentsShadow()) return;
173 183
174 // We're using on-stack replacement: patch the unoptimized code so that 184 // We're using on-stack replacement: patch the unoptimized code so that
175 // any back edge in any unoptimized frame will trigger on-stack 185 // any back edge in any unoptimized frame will trigger on-stack
176 // replacement for that frame. 186 // replacement for that frame.
177 if (FLAG_trace_osr) { 187 if (FLAG_trace_osr) {
178 PrintF("[patching stack checks in "); 188 PrintF("[patching stack checks in ");
179 function->PrintName(); 189 function->PrintName();
180 PrintF(" for on-stack replacement]\n"); 190 PrintF(" for on-stack replacement]\n");
181 } 191 }
182 192
183 // Get the stack check stub code object to match against. We aren't 193 // Get the stack check stub code object to match against. We aren't
184 // prepared to generate it, but we don't expect to have to. 194 // prepared to generate it, but we don't expect to have to.
185 StackCheckStub check_stub; 195 StackCheckStub check_stub;
186 Object* check_code; 196 Object* check_code;
187 MaybeObject* maybe_check_code = check_stub.TryGetCode(); 197 MaybeObject* maybe_check_code = check_stub.TryGetCode();
188 if (maybe_check_code->ToObject(&check_code)) { 198 if (maybe_check_code->ToObject(&check_code)) {
189 Code* replacement_code = Builtins::builtin(Builtins::OnStackReplacement); 199 Code* replacement_code =
200 isolate_->builtins()->builtin(Builtins::OnStackReplacement);
190 Code* unoptimized_code = shared->code(); 201 Code* unoptimized_code = shared->code();
191 Deoptimizer::PatchStackCheckCode(unoptimized_code, 202 Deoptimizer::PatchStackCheckCode(unoptimized_code,
192 Code::cast(check_code), 203 Code::cast(check_code),
193 replacement_code); 204 replacement_code);
194 } 205 }
195 } 206 }
196 207
197 208
198 static void ClearSampleBuffer() { 209 void RuntimeProfiler::ClearSampleBuffer() {
199 for (int i = 0; i < kSamplerWindowSize; i++) { 210 memset(sampler_window_, 0, sizeof(sampler_window_));
200 sampler_window[i] = NULL; 211 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
201 sampler_window_weight[i] = 0;
202 }
203 } 212 }
204 213
205 214
206 static int LookupSample(JSFunction* function) { 215 int RuntimeProfiler::LookupSample(JSFunction* function) {
207 int weight = 0; 216 int weight = 0;
208 for (int i = 0; i < kSamplerWindowSize; i++) { 217 for (int i = 0; i < kSamplerWindowSize; i++) {
209 Object* sample = sampler_window[i]; 218 Object* sample = sampler_window_[i];
210 if (sample != NULL) { 219 if (sample != NULL) {
211 if (function == sample) { 220 if (function == sample) {
212 weight += sampler_window_weight[i]; 221 weight += sampler_window_weight_[i];
213 } 222 }
214 } 223 }
215 } 224 }
216 return weight; 225 return weight;
217 } 226 }
218 227
219 228
220 static void AddSample(JSFunction* function, int weight) { 229 void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
221 ASSERT(IsPowerOf2(kSamplerWindowSize)); 230 ASSERT(IsPowerOf2(kSamplerWindowSize));
222 sampler_window[sampler_window_position] = function; 231 sampler_window_[sampler_window_position_] = function;
223 sampler_window_weight[sampler_window_position] = weight; 232 sampler_window_weight_[sampler_window_position_] = weight;
224 sampler_window_position = (sampler_window_position + 1) & 233 sampler_window_position_ = (sampler_window_position_ + 1) &
225 (kSamplerWindowSize - 1); 234 (kSamplerWindowSize - 1);
226 } 235 }
227 236
228 237
229 void RuntimeProfiler::OptimizeNow() { 238 void RuntimeProfiler::OptimizeNow() {
230 HandleScope scope; 239 HandleScope scope(isolate_);
231 PendingListNode* current = optimize_soon_list; 240 PendingListNode* current = optimize_soon_list_;
232 while (current != NULL) { 241 while (current != NULL) {
233 PendingListNode* next = current->next(); 242 PendingListNode* next = current->next();
234 if (current->IsValid()) { 243 if (current->IsValid()) {
235 Handle<JSFunction> function = current->function(); 244 Handle<JSFunction> function = current->function();
236 int delay = current->Delay(); 245 int delay = current->Delay();
237 if (IsOptimizable(*function)) { 246 if (IsOptimizable(*function)) {
238 Optimize(*function, true, delay); 247 Optimize(*function, true, delay);
239 } 248 }
240 } 249 }
241 delete current; 250 delete current;
242 current = next; 251 current = next;
243 } 252 }
244 optimize_soon_list = NULL; 253 optimize_soon_list_ = NULL;
245 254
246 // Run through the JavaScript frames and collect them. If we already 255 // Run through the JavaScript frames and collect them. If we already
247 // have a sample of the function, we mark it for optimizations 256 // have a sample of the function, we mark it for optimizations
248 // (eagerly or lazily). 257 // (eagerly or lazily).
249 JSFunction* samples[kSamplerFrameCount]; 258 JSFunction* samples[kSamplerFrameCount];
250 int sample_count = 0; 259 int sample_count = 0;
251 int frame_count = 0; 260 int frame_count = 0;
252 for (JavaScriptFrameIterator it; 261 for (JavaScriptFrameIterator it;
253 frame_count++ < kSamplerFrameCount && !it.done(); 262 frame_count++ < kSamplerFrameCount && !it.done();
254 it.Advance()) { 263 it.Advance()) {
255 JavaScriptFrame* frame = it.frame(); 264 JavaScriptFrame* frame = it.frame();
256 JSFunction* function = JSFunction::cast(frame->function()); 265 JSFunction* function = JSFunction::cast(frame->function());
257 266
258 // Adjust threshold each time we have processed 267 // Adjust threshold each time we have processed
259 // a certain number of ticks. 268 // a certain number of ticks.
260 if (sampler_ticks_until_threshold_adjustment > 0) { 269 if (sampler_ticks_until_threshold_adjustment_ > 0) {
261 sampler_ticks_until_threshold_adjustment--; 270 sampler_ticks_until_threshold_adjustment_--;
262 if (sampler_ticks_until_threshold_adjustment <= 0) { 271 if (sampler_ticks_until_threshold_adjustment_ <= 0) {
263 // If the threshold is not already at the minimum 272 // If the threshold is not already at the minimum
264 // modify and reset the ticks until next adjustment. 273 // modify and reset the ticks until next adjustment.
265 if (sampler_threshold > kSamplerThresholdMin) { 274 if (sampler_threshold_ > kSamplerThresholdMin) {
266 sampler_threshold -= kSamplerThresholdDelta; 275 sampler_threshold_ -= kSamplerThresholdDelta;
267 sampler_ticks_until_threshold_adjustment = 276 sampler_ticks_until_threshold_adjustment_ =
268 kSamplerTicksBetweenThresholdAdjustment; 277 kSamplerTicksBetweenThresholdAdjustment;
269 } 278 }
270 } 279 }
271 } 280 }
272 281
273 if (function->IsMarkedForLazyRecompilation()) { 282 if (function->IsMarkedForLazyRecompilation()) {
274 Code* unoptimized = function->shared()->code(); 283 Code* unoptimized = function->shared()->code();
275 int nesting = unoptimized->allow_osr_at_loop_nesting_level(); 284 int nesting = unoptimized->allow_osr_at_loop_nesting_level();
276 if (nesting == 0) AttemptOnStackReplacement(function); 285 if (nesting == 0) AttemptOnStackReplacement(function);
277 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker); 286 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
278 unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting); 287 unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting);
279 } 288 }
280 289
281 // Do not record non-optimizable functions. 290 // Do not record non-optimizable functions.
282 if (!IsOptimizable(function)) continue; 291 if (!IsOptimizable(function)) continue;
283 samples[sample_count++] = function; 292 samples[sample_count++] = function;
284 293
285 int function_size = function->shared()->SourceSize(); 294 int function_size = function->shared()->SourceSize();
286 int threshold_size_factor = (function_size > kSizeLimit) 295 int threshold_size_factor = (function_size > kSizeLimit)
287 ? sampler_threshold_size_factor 296 ? sampler_threshold_size_factor_
288 : 1; 297 : 1;
289 298
290 int threshold = sampler_threshold * threshold_size_factor; 299 int threshold = sampler_threshold_ * threshold_size_factor;
291 int current_js_ratio = NoBarrier_Load(&js_ratio); 300 int current_js_ratio = NoBarrier_Load(&js_ratio_);
292 301
293 // Adjust threshold depending on the ratio of time spent 302 // Adjust threshold depending on the ratio of time spent
294 // in JS code. 303 // in JS code.
295 if (current_js_ratio < 20) { 304 if (current_js_ratio < 20) {
296 // If we spend less than 20% of the time in JS code, 305 // If we spend less than 20% of the time in JS code,
297 // do not optimize. 306 // do not optimize.
298 continue; 307 continue;
299 } else if (current_js_ratio < 75) { 308 } else if (current_js_ratio < 75) {
300 // Below 75% of time spent in JS code, only optimize very 309 // Below 75% of time spent in JS code, only optimize very
301 // frequently used functions. 310 // frequently used functions.
302 threshold *= 3; 311 threshold *= 3;
303 } 312 }
304 313
305 if (LookupSample(function) >= threshold) { 314 if (LookupSample(function) >= threshold) {
306 Optimize(function, false, 0); 315 Optimize(function, false, 0);
307 CompilationCache::MarkForEagerOptimizing(Handle<JSFunction>(function)); 316 isolate_->compilation_cache()->MarkForEagerOptimizing(
317 Handle<JSFunction>(function));
308 } 318 }
309 } 319 }
310 320
311 // Add the collected functions as samples. It's important not to do 321 // Add the collected functions as samples. It's important not to do
312 // this as part of collecting them because this will interfere with 322 // this as part of collecting them because this will interfere with
313 // the sample lookup in case of recursive functions. 323 // the sample lookup in case of recursive functions.
314 for (int i = 0; i < sample_count; i++) { 324 for (int i = 0; i < sample_count; i++) {
315 AddSample(samples[i], kSamplerFrameWeight[i]); 325 AddSample(samples[i], kSamplerFrameWeight[i]);
316 } 326 }
317 } 327 }
318 328
319 329
320 void RuntimeProfiler::OptimizeSoon(JSFunction* function) { 330 void RuntimeProfiler::OptimizeSoon(JSFunction* function) {
321 if (!IsOptimizable(function)) return; 331 if (!IsOptimizable(function)) return;
322 PendingListNode* node = new PendingListNode(function); 332 PendingListNode* node = new PendingListNode(function);
323 node->set_next(optimize_soon_list); 333 node->set_next(optimize_soon_list_);
324 optimize_soon_list = node; 334 optimize_soon_list_ = node;
325 } 335 }
326 336
327 337
328 #ifdef ENABLE_LOGGING_AND_PROFILING 338 #ifdef ENABLE_LOGGING_AND_PROFILING
329 static void UpdateStateRatio(SamplerState current_state) { 339 void RuntimeProfiler::UpdateStateRatio(SamplerState current_state) {
330 static const int kStateWindowSize = 128; 340 SamplerState old_state = state_window_[state_window_position_];
331 static SamplerState state_window[kStateWindowSize]; 341 state_counts_[old_state]--;
332 static int state_window_position = 0; 342 state_window_[state_window_position_] = current_state;
333 static int state_counts[2] = { kStateWindowSize, 0 }; 343 state_counts_[current_state]++;
334
335 SamplerState old_state = state_window[state_window_position];
336 state_counts[old_state]--;
337 state_window[state_window_position] = current_state;
338 state_counts[current_state]++;
339 ASSERT(IsPowerOf2(kStateWindowSize)); 344 ASSERT(IsPowerOf2(kStateWindowSize));
340 state_window_position = (state_window_position + 1) & 345 state_window_position_ = (state_window_position_ + 1) &
341 (kStateWindowSize - 1); 346 (kStateWindowSize - 1);
342 NoBarrier_Store(&js_ratio, state_counts[IN_JS_STATE] * 100 / 347 NoBarrier_Store(&js_ratio_, state_counts_[IN_JS_STATE] * 100 /
343 kStateWindowSize); 348 kStateWindowSize);
344 } 349 }
345 #endif 350 #endif
346 351
347 352
348 void RuntimeProfiler::NotifyTick() { 353 void RuntimeProfiler::NotifyTick() {
349 #ifdef ENABLE_LOGGING_AND_PROFILING 354 #ifdef ENABLE_LOGGING_AND_PROFILING
350 // Record state sample. 355 // Record state sample.
351 SamplerState state = Top::IsInJSState() 356 SamplerState state = IsSomeIsolateInJS()
352 ? IN_JS_STATE 357 ? IN_JS_STATE
353 : IN_NON_JS_STATE; 358 : IN_NON_JS_STATE;
354 UpdateStateRatio(state); 359 UpdateStateRatio(state);
355 StackGuard::RequestRuntimeProfilerTick(); 360 isolate_->stack_guard()->RequestRuntimeProfilerTick();
356 #endif 361 #endif
357 } 362 }
358 363
359 364
360 void RuntimeProfiler::Setup() { 365 void RuntimeProfiler::Setup() {
361 ClearSampleBuffer(); 366 ClearSampleBuffer();
362 // If the ticker hasn't already started, make sure to do so to get 367 // If the ticker hasn't already started, make sure to do so to get
363 // the ticks for the runtime profiler. 368 // the ticks for the runtime profiler.
364 if (IsEnabled()) Logger::EnsureTickerStarted(); 369 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
365 } 370 }
366 371
367 372
368 void RuntimeProfiler::Reset() { 373 void RuntimeProfiler::Reset() {
369 sampler_threshold = kSamplerThresholdInit; 374 sampler_threshold_ = kSamplerThresholdInit;
370 sampler_ticks_until_threshold_adjustment = 375 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
376 sampler_ticks_until_threshold_adjustment_ =
371 kSamplerTicksBetweenThresholdAdjustment; 377 kSamplerTicksBetweenThresholdAdjustment;
372 sampler_threshold_size_factor = kSamplerThresholdSizeFactorInit;
373 } 378 }
374 379
375 380
376 void RuntimeProfiler::TearDown() { 381 void RuntimeProfiler::TearDown() {
377 // Nothing to do. 382 // Nothing to do.
378 } 383 }
379 384
380 385
381 int RuntimeProfiler::SamplerWindowSize() { 386 int RuntimeProfiler::SamplerWindowSize() {
382 return kSamplerWindowSize; 387 return kSamplerWindowSize;
383 } 388 }
384 389
385 390
386 // Update the pointers in the sampler window after a GC. 391 // Update the pointers in the sampler window after a GC.
387 void RuntimeProfiler::UpdateSamplesAfterScavenge() { 392 void RuntimeProfiler::UpdateSamplesAfterScavenge() {
388 for (int i = 0; i < kSamplerWindowSize; i++) { 393 for (int i = 0; i < kSamplerWindowSize; i++) {
389 Object* function = sampler_window[i]; 394 Object* function = sampler_window_[i];
390 if (function != NULL && Heap::InNewSpace(function)) { 395 if (function != NULL && isolate_->heap()->InNewSpace(function)) {
391 MapWord map_word = HeapObject::cast(function)->map_word(); 396 MapWord map_word = HeapObject::cast(function)->map_word();
392 if (map_word.IsForwardingAddress()) { 397 if (map_word.IsForwardingAddress()) {
393 sampler_window[i] = map_word.ToForwardingAddress(); 398 sampler_window_[i] = map_word.ToForwardingAddress();
394 } else { 399 } else {
395 sampler_window[i] = NULL; 400 sampler_window_[i] = NULL;
396 } 401 }
397 } 402 }
398 } 403 }
399 } 404 }
400 405
401 406
407 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
408 #ifdef ENABLE_LOGGING_AND_PROFILING
409 // The profiler thread must still be waiting.
410 ASSERT(NoBarrier_Load(&state_) >= 0);
411 // In IsolateEnteredJS we have already incremented the counter and
412 // undid the decrement done by the profiler thread. Increment again
413 // to get the right count of active isolates.
414 NoBarrier_AtomicIncrement(&state_, 1);
415 semaphore_->Signal();
416 isolate->ResetEagerOptimizingData();
417 #endif
418 }
419
420
421 bool RuntimeProfiler::IsSomeIsolateInJS() {
422 return NoBarrier_Load(&state_) > 0;
423 }
424
425
426 bool RuntimeProfiler::WaitForSomeIsolateToEnterJS() {
427 #ifdef ENABLE_LOGGING_AND_PROFILING
428 Atomic32 old_state = NoBarrier_CompareAndSwap(&state_, 0, -1);
429 ASSERT(old_state >= -1);
430 if (old_state != 0) return false;
431 semaphore_->Wait();
432 #endif
433 return true;
434 }
435
436
437 void RuntimeProfiler::WakeUpRuntimeProfilerThreadBeforeShutdown() {
438 #ifdef ENABLE_LOGGING_AND_PROFILING
439 semaphore_->Signal();
440 #endif
441 }
442
443
402 void RuntimeProfiler::RemoveDeadSamples() { 444 void RuntimeProfiler::RemoveDeadSamples() {
403 for (int i = 0; i < kSamplerWindowSize; i++) { 445 for (int i = 0; i < kSamplerWindowSize; i++) {
404 Object* function = sampler_window[i]; 446 Object* function = sampler_window_[i];
447 // TODO(gc) ISOLATES MERGE
405 if (function != NULL && 448 if (function != NULL &&
406 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) { 449 !HEAP->marking()->MarkBitFrom(HeapObject::cast(function)).Get()) {
407 sampler_window[i] = NULL; 450 sampler_window_[i] = NULL;
408 } 451 }
409 } 452 }
410 } 453 }
411 454
412 455
413 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { 456 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
414 for (int i = 0; i < kSamplerWindowSize; i++) { 457 for (int i = 0; i < kSamplerWindowSize; i++) {
415 visitor->VisitPointer(&sampler_window[i]); 458 visitor->VisitPointer(&sampler_window_[i]);
416 } 459 }
417 } 460 }
418 461
419 462
420 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { 463 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
421 #ifdef ENABLE_LOGGING_AND_PROFILING 464 #ifdef ENABLE_LOGGING_AND_PROFILING
422 static const int kNonJSTicksThreshold = 100; 465 static const int kNonJSTicksThreshold = 100;
423 // We suspend the runtime profiler thread when not running 466 if (RuntimeProfiler::IsSomeIsolateInJS()) {
424 // JavaScript. If the CPU profiler is active we must not do this 467 non_js_ticks_ = 0;
425 // because it samples both JavaScript and C++ code. 468 } else {
426 if (RuntimeProfiler::IsEnabled() && 469 if (non_js_ticks_ < kNonJSTicksThreshold) {
427 !CpuProfiler::is_profiling() && 470 ++non_js_ticks_;
428 !(FLAG_prof && FLAG_prof_auto)) {
429 if (Top::IsInJSState()) {
430 non_js_ticks_ = 0;
431 } else { 471 } else {
432 if (non_js_ticks_ < kNonJSTicksThreshold) { 472 return RuntimeProfiler::WaitForSomeIsolateToEnterJS();
433 ++non_js_ticks_;
434 } else {
435 if (Top::WaitForJSState()) return true;
436 }
437 } 473 }
438 } 474 }
439 #endif 475 #endif
440 return false; 476 return false;
441 } 477 }
442 478
443 479
444 } } // namespace v8::internal 480 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698