| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 27 matching lines...) Expand all Loading... |
| 38 #include "global-handles.h" | 38 #include "global-handles.h" |
| 39 #include "isolate-inl.h" | 39 #include "isolate-inl.h" |
| 40 #include "mark-compact.h" | 40 #include "mark-compact.h" |
| 41 #include "platform.h" | 41 #include "platform.h" |
| 42 #include "scopeinfo.h" | 42 #include "scopeinfo.h" |
| 43 | 43 |
| 44 namespace v8 { | 44 namespace v8 { |
| 45 namespace internal { | 45 namespace internal { |
| 46 | 46 |
| 47 | 47 |
| 48 // Optimization sampler constants. | |
| 49 static const int kSamplerFrameCount = 2; | |
| 50 | |
| 51 // Constants for statistical profiler. | |
| 52 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 }; | |
| 53 | |
| 54 static const int kSamplerTicksBetweenThresholdAdjustment = 32; | |
| 55 | |
| 56 static const int kSamplerThresholdInit = 3; | |
| 57 static const int kSamplerThresholdMin = 1; | |
| 58 static const int kSamplerThresholdDelta = 1; | |
| 59 | |
| 60 static const int kSamplerThresholdSizeFactorInit = 3; | |
| 61 | |
| 62 static const int kSizeLimit = 1500; | |
| 63 | |
| 64 // Constants for counter based profiler. | |
| 65 | |
| 66 // Number of times a function has to be seen on the stack before it is | 48 // Number of times a function has to be seen on the stack before it is |
| 67 // optimized. | 49 // optimized. |
| 68 static const int kProfilerTicksBeforeOptimization = 2; | 50 static const int kProfilerTicksBeforeOptimization = 2; |
| 69 // If the function optimization was disabled due to high deoptimization count, | 51 // If the function optimization was disabled due to high deoptimization count, |
| 70 // but the function is hot and has been seen on the stack this number of times, | 52 // but the function is hot and has been seen on the stack this number of times, |
| 71 // then we try to reenable optimization for this function. | 53 // then we try to reenable optimization for this function. |
| 72 static const int kProfilerTicksBeforeReenablingOptimization = 250; | 54 static const int kProfilerTicksBeforeReenablingOptimization = 250; |
| 73 // If a function does not have enough type info (according to | 55 // If a function does not have enough type info (according to |
| 74 // FLAG_type_info_threshold), but has seen a huge number of ticks, | 56 // FLAG_type_info_threshold), but has seen a huge number of ticks, |
| 75 // optimize it as it is. | 57 // optimize it as it is. |
| (...skipping 11 matching lines...) Expand all Loading... |
| 87 3 * FullCodeGenerator::kCodeSizeMultiplier; | 69 3 * FullCodeGenerator::kCodeSizeMultiplier; |
| 88 | 70 |
| 89 // Maximum size in bytes of generated code for a function to be optimized | 71 // Maximum size in bytes of generated code for a function to be optimized |
| 90 // the very first time it is seen on the stack. | 72 // the very first time it is seen on the stack. |
| 91 static const int kMaxSizeEarlyOpt = | 73 static const int kMaxSizeEarlyOpt = |
| 92 5 * FullCodeGenerator::kCodeSizeMultiplier; | 74 5 * FullCodeGenerator::kCodeSizeMultiplier; |
| 93 | 75 |
| 94 | 76 |
| 95 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) | 77 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) |
| 96 : isolate_(isolate), | 78 : isolate_(isolate), |
| 97 sampler_threshold_(kSamplerThresholdInit), | |
| 98 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit), | |
| 99 sampler_ticks_until_threshold_adjustment_( | |
| 100 kSamplerTicksBetweenThresholdAdjustment), | |
| 101 sampler_window_position_(0), | |
| 102 any_ic_changed_(false), | 79 any_ic_changed_(false), |
| 103 code_generated_(false) { | 80 code_generated_(false) { |
| 104 ClearSampleBuffer(); | |
| 105 } | 81 } |
| 106 | 82 |
| 107 | 83 |
| 108 static void GetICCounts(Code* shared_code, | 84 static void GetICCounts(Code* shared_code, |
| 109 int* ic_with_type_info_count, | 85 int* ic_with_type_info_count, |
| 110 int* ic_total_count, | 86 int* ic_total_count, |
| 111 int* percentage) { | 87 int* percentage) { |
| 112 *ic_total_count = 0; | 88 *ic_total_count = 0; |
| 113 *ic_with_type_info_count = 0; | 89 *ic_with_type_info_count = 0; |
| 114 Object* raw_info = shared_code->type_feedback_info(); | 90 Object* raw_info = shared_code->type_feedback_info(); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 182 if (FLAG_trace_osr) { | 158 if (FLAG_trace_osr) { |
| 183 PrintF("[OSR - patching back edges in "); | 159 PrintF("[OSR - patching back edges in "); |
| 184 function->PrintName(); | 160 function->PrintName(); |
| 185 PrintF("]\n"); | 161 PrintF("]\n"); |
| 186 } | 162 } |
| 187 | 163 |
| 188 BackEdgeTable::Patch(isolate_, shared->code()); | 164 BackEdgeTable::Patch(isolate_, shared->code()); |
| 189 } | 165 } |
| 190 | 166 |
| 191 | 167 |
| 192 void RuntimeProfiler::ClearSampleBuffer() { | |
| 193 memset(sampler_window_, 0, sizeof(sampler_window_)); | |
| 194 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_)); | |
| 195 } | |
| 196 | |
| 197 | |
| 198 int RuntimeProfiler::LookupSample(JSFunction* function) { | |
| 199 int weight = 0; | |
| 200 for (int i = 0; i < kSamplerWindowSize; i++) { | |
| 201 Object* sample = sampler_window_[i]; | |
| 202 if (sample != NULL) { | |
| 203 bool fits = FLAG_lookup_sample_by_shared | |
| 204 ? (function->shared() == JSFunction::cast(sample)->shared()) | |
| 205 : (function == JSFunction::cast(sample)); | |
| 206 if (fits) { | |
| 207 weight += sampler_window_weight_[i]; | |
| 208 } | |
| 209 } | |
| 210 } | |
| 211 return weight; | |
| 212 } | |
| 213 | |
| 214 | |
| 215 void RuntimeProfiler::AddSample(JSFunction* function, int weight) { | |
| 216 ASSERT(IsPowerOf2(kSamplerWindowSize)); | |
| 217 sampler_window_[sampler_window_position_] = function; | |
| 218 sampler_window_weight_[sampler_window_position_] = weight; | |
| 219 sampler_window_position_ = (sampler_window_position_ + 1) & | |
| 220 (kSamplerWindowSize - 1); | |
| 221 } | |
| 222 | |
| 223 | |
| 224 void RuntimeProfiler::OptimizeNow() { | 168 void RuntimeProfiler::OptimizeNow() { |
| 225 HandleScope scope(isolate_); | 169 HandleScope scope(isolate_); |
| 226 | 170 |
| 227 if (isolate_->DebuggerHasBreakPoints()) return; | 171 if (isolate_->DebuggerHasBreakPoints()) return; |
| 228 | 172 |
| 229 DisallowHeapAllocation no_gc; | 173 DisallowHeapAllocation no_gc; |
| 230 | 174 |
| 231 // Run through the JavaScript frames and collect them. If we already | 175 // Run through the JavaScript frames and collect them. If we already |
| 232 // have a sample of the function, we mark it for optimizations | 176 // have a sample of the function, we mark it for optimizations |
| 233 // (eagerly or lazily). | 177 // (eagerly or lazily). |
| 234 JSFunction* samples[kSamplerFrameCount]; | |
| 235 int sample_count = 0; | |
| 236 int frame_count = 0; | 178 int frame_count = 0; |
| 237 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count | 179 int frame_count_limit = FLAG_frame_count; |
| 238 : kSamplerFrameCount; | |
| 239 for (JavaScriptFrameIterator it(isolate_); | 180 for (JavaScriptFrameIterator it(isolate_); |
| 240 frame_count++ < frame_count_limit && !it.done(); | 181 frame_count++ < frame_count_limit && !it.done(); |
| 241 it.Advance()) { | 182 it.Advance()) { |
| 242 JavaScriptFrame* frame = it.frame(); | 183 JavaScriptFrame* frame = it.frame(); |
| 243 JSFunction* function = frame->function(); | 184 JSFunction* function = frame->function(); |
| 244 | 185 |
| 245 if (!FLAG_watch_ic_patching) { | |
| 246 // Adjust threshold each time we have processed | |
| 247 // a certain number of ticks. | |
| 248 if (sampler_ticks_until_threshold_adjustment_ > 0) { | |
| 249 sampler_ticks_until_threshold_adjustment_--; | |
| 250 if (sampler_ticks_until_threshold_adjustment_ <= 0) { | |
| 251 // If the threshold is not already at the minimum | |
| 252 // modify and reset the ticks until next adjustment. | |
| 253 if (sampler_threshold_ > kSamplerThresholdMin) { | |
| 254 sampler_threshold_ -= kSamplerThresholdDelta; | |
| 255 sampler_ticks_until_threshold_adjustment_ = | |
| 256 kSamplerTicksBetweenThresholdAdjustment; | |
| 257 } | |
| 258 } | |
| 259 } | |
| 260 } | |
| 261 | |
| 262 SharedFunctionInfo* shared = function->shared(); | 186 SharedFunctionInfo* shared = function->shared(); |
| 263 Code* shared_code = shared->code(); | 187 Code* shared_code = shared->code(); |
| 264 | 188 |
| 265 if (shared_code->kind() != Code::FUNCTION) continue; | 189 if (shared_code->kind() != Code::FUNCTION) continue; |
| 266 if (function->IsInRecompileQueue()) continue; | 190 if (function->IsInRecompileQueue()) continue; |
| 267 | 191 |
| 268 if (FLAG_always_osr && | 192 if (FLAG_always_osr && |
| 269 shared_code->allow_osr_at_loop_nesting_level() == 0) { | 193 shared_code->allow_osr_at_loop_nesting_level() == 0) { |
| 270 // Testing mode: always try an OSR compile for every function. | 194 // Testing mode: always try an OSR compile for every function. |
| 271 for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) { | 195 for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) { |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 315 shared_code->set_profiler_ticks(0); | 239 shared_code->set_profiler_ticks(0); |
| 316 shared->TryReenableOptimization(); | 240 shared->TryReenableOptimization(); |
| 317 } else { | 241 } else { |
| 318 shared_code->set_profiler_ticks(ticks + 1); | 242 shared_code->set_profiler_ticks(ticks + 1); |
| 319 } | 243 } |
| 320 } | 244 } |
| 321 continue; | 245 continue; |
| 322 } | 246 } |
| 323 if (!function->IsOptimizable()) continue; | 247 if (!function->IsOptimizable()) continue; |
| 324 | 248 |
| 325 if (FLAG_watch_ic_patching) { | 249 int ticks = shared_code->profiler_ticks(); |
| 326 int ticks = shared_code->profiler_ticks(); | |
| 327 | 250 |
| 328 if (ticks >= kProfilerTicksBeforeOptimization) { | 251 if (ticks >= kProfilerTicksBeforeOptimization) { |
| 329 int typeinfo, total, percentage; | 252 int typeinfo, total, percentage; |
| 330 GetICCounts(shared_code, &typeinfo, &total, &percentage); | 253 GetICCounts(shared_code, &typeinfo, &total, &percentage); |
| 331 if (percentage >= FLAG_type_info_threshold) { | 254 if (percentage >= FLAG_type_info_threshold) { |
| 332 // If this particular function hasn't had any ICs patched for enough | 255 // If this particular function hasn't had any ICs patched for enough |
| 333 // ticks, optimize it now. | 256 // ticks, optimize it now. |
| 334 Optimize(function, "hot and stable"); | 257 Optimize(function, "hot and stable"); |
| 335 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { | 258 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { |
| 336 Optimize(function, "not much type info but very hot"); | 259 Optimize(function, "not much type info but very hot"); |
| 337 } else { | |
| 338 shared_code->set_profiler_ticks(ticks + 1); | |
| 339 if (FLAG_trace_opt_verbose) { | |
| 340 PrintF("[not yet optimizing "); | |
| 341 function->PrintName(); | |
| 342 PrintF(", not enough type info: %d/%d (%d%%)]\n", | |
| 343 typeinfo, total, percentage); | |
| 344 } | |
| 345 } | |
| 346 } else if (!any_ic_changed_ && | |
| 347 shared_code->instruction_size() < kMaxSizeEarlyOpt) { | |
| 348 // If no IC was patched since the last tick and this function is very | |
| 349 // small, optimistically optimize it now. | |
| 350 Optimize(function, "small function"); | |
| 351 } else { | 260 } else { |
| 352 shared_code->set_profiler_ticks(ticks + 1); | 261 shared_code->set_profiler_ticks(ticks + 1); |
| 262 if (FLAG_trace_opt_verbose) { |
| 263 PrintF("[not yet optimizing "); |
| 264 function->PrintName(); |
| 265 PrintF(", not enough type info: %d/%d (%d%%)]\n", |
| 266 typeinfo, total, percentage); |
| 267 } |
| 353 } | 268 } |
| 354 } else { // !FLAG_watch_ic_patching | 269 } else if (!any_ic_changed_ && |
| 355 samples[sample_count++] = function; | 270 shared_code->instruction_size() < kMaxSizeEarlyOpt) { |
| 356 | 271 // If no IC was patched since the last tick and this function is very |
| 357 int function_size = function->shared()->SourceSize(); | 272 // small, optimistically optimize it now. |
| 358 int threshold_size_factor = (function_size > kSizeLimit) | 273 Optimize(function, "small function"); |
| 359 ? sampler_threshold_size_factor_ | 274 } else { |
| 360 : 1; | 275 shared_code->set_profiler_ticks(ticks + 1); |
| 361 | |
| 362 int threshold = sampler_threshold_ * threshold_size_factor; | |
| 363 | |
| 364 if (LookupSample(function) >= threshold) { | |
| 365 Optimize(function, "sampler window lookup"); | |
| 366 } | |
| 367 } | 276 } |
| 368 } | 277 } |
| 369 if (FLAG_watch_ic_patching) { | 278 any_ic_changed_ = false; |
| 370 any_ic_changed_ = false; | |
| 371 } else { // !FLAG_watch_ic_patching | |
| 372 // Add the collected functions as samples. It's important not to do | |
| 373 // this as part of collecting them because this will interfere with | |
| 374 // the sample lookup in case of recursive functions. | |
| 375 for (int i = 0; i < sample_count; i++) { | |
| 376 AddSample(samples[i], kSamplerFrameWeight[i]); | |
| 377 } | |
| 378 } | |
| 379 } | 279 } |
| 380 | 280 |
| 381 | 281 |
| 382 void RuntimeProfiler::SetUp() { | 282 void RuntimeProfiler::SetUp() { |
| 383 if (!FLAG_watch_ic_patching) { | 283 // Nothing to do. |
| 384 ClearSampleBuffer(); | |
| 385 } | |
| 386 } | 284 } |
| 387 | 285 |
| 388 | 286 |
| 389 void RuntimeProfiler::Reset() { | 287 void RuntimeProfiler::Reset() { |
| 390 if (!FLAG_watch_ic_patching) { | 288 // Nothing to do. |
| 391 sampler_threshold_ = kSamplerThresholdInit; | |
| 392 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; | |
| 393 sampler_ticks_until_threshold_adjustment_ = | |
| 394 kSamplerTicksBetweenThresholdAdjustment; | |
| 395 } | |
| 396 } | 289 } |
| 397 | 290 |
| 398 | 291 |
| 399 void RuntimeProfiler::TearDown() { | 292 void RuntimeProfiler::TearDown() { |
| 400 // Nothing to do. | 293 // Nothing to do. |
| 401 } | 294 } |
| 402 | 295 |
| 403 | 296 |
| 404 // Update the pointers in the sampler window after a GC. | |
| 405 void RuntimeProfiler::UpdateSamplesAfterScavenge() { | |
| 406 for (int i = 0; i < kSamplerWindowSize; i++) { | |
| 407 Object* function = sampler_window_[i]; | |
| 408 if (function != NULL && isolate_->heap()->InNewSpace(function)) { | |
| 409 MapWord map_word = HeapObject::cast(function)->map_word(); | |
| 410 if (map_word.IsForwardingAddress()) { | |
| 411 sampler_window_[i] = map_word.ToForwardingAddress(); | |
| 412 } else { | |
| 413 sampler_window_[i] = NULL; | |
| 414 } | |
| 415 } | |
| 416 } | |
| 417 } | |
| 418 | |
| 419 | |
| 420 void RuntimeProfiler::RemoveDeadSamples() { | |
| 421 for (int i = 0; i < kSamplerWindowSize; i++) { | |
| 422 Object* function = sampler_window_[i]; | |
| 423 if (function != NULL && | |
| 424 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) { | |
| 425 sampler_window_[i] = NULL; | |
| 426 } | |
| 427 } | |
| 428 } | |
| 429 | |
| 430 | |
| 431 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) { | |
| 432 for (int i = 0; i < kSamplerWindowSize; i++) { | |
| 433 visitor->VisitPointer(&sampler_window_[i]); | |
| 434 } | |
| 435 } | |
| 436 | |
| 437 | |
| 438 } } // namespace v8::internal | 297 } } // namespace v8::internal |
| OLD | NEW |