Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 38 #include "../include/v8-profiler.h" | 38 #include "../include/v8-profiler.h" |
| 39 | 39 |
| 40 namespace v8 { | 40 namespace v8 { |
| 41 namespace internal { | 41 namespace internal { |
| 42 | 42 |
| 43 static const int kTickSamplesBufferChunkSize = 64 * KB; | 43 static const int kTickSamplesBufferChunkSize = 64 * KB; |
| 44 static const int kTickSamplesBufferChunksCount = 16; | 44 static const int kTickSamplesBufferChunksCount = 16; |
| 45 static const int kProfilerStackSize = 64 * KB; | 45 static const int kProfilerStackSize = 64 * KB; |
| 46 | 46 |
| 47 | 47 |
| 48 ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator) | 48 ProfilerEventsProcessor::ProfilerEventsProcessor( |
| 49 ProfileGenerator* generator, | |
| 50 Sampler* sampler, | |
| 51 int period_in_useconds) | |
| 49 : Thread(Thread::Options("v8:ProfEvntProc", kProfilerStackSize)), | 52 : Thread(Thread::Options("v8:ProfEvntProc", kProfilerStackSize)), |
| 50 generator_(generator), | 53 generator_(generator), |
| 54 sampler_(sampler), | |
| 51 running_(true), | 55 running_(true), |
| 56 period_in_useconds_(period_in_useconds), | |
| 52 ticks_buffer_(sizeof(TickSampleEventRecord), | 57 ticks_buffer_(sizeof(TickSampleEventRecord), |
| 53 kTickSamplesBufferChunkSize, | 58 kTickSamplesBufferChunkSize, |
| 54 kTickSamplesBufferChunksCount), | 59 kTickSamplesBufferChunksCount), |
| 55 last_code_event_id_(0), last_processed_code_event_id_(0) { | 60 last_code_event_id_(0), last_processed_code_event_id_(0) { |
| 56 } | 61 } |
| 57 | 62 |
| 58 | 63 |
| 59 void ProfilerEventsProcessor::Enqueue(const CodeEventsContainer& event) { | 64 void ProfilerEventsProcessor::Enqueue(const CodeEventsContainer& event) { |
| 60 event.generic.order = ++last_code_event_id_; | 65 event.generic.order = ++last_code_event_id_; |
| 61 events_buffer_.Enqueue(event); | 66 events_buffer_.Enqueue(event); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 129 // in case of frames_count having a wild value. | 134 // in case of frames_count having a wild value. |
| 130 if (record.sample.frames_count < 0 | 135 if (record.sample.frames_count < 0 |
| 131 || record.sample.frames_count > TickSample::kMaxFramesCount) | 136 || record.sample.frames_count > TickSample::kMaxFramesCount) |
| 132 record.sample.frames_count = 0; | 137 record.sample.frames_count = 0; |
| 133 generator_->RecordTickSample(record.sample); | 138 generator_->RecordTickSample(record.sample); |
| 134 ticks_buffer_.FinishDequeue(); | 139 ticks_buffer_.FinishDequeue(); |
| 135 } | 140 } |
| 136 } | 141 } |
| 137 | 142 |
| 138 | 143 |
| 144 void ProfilerEventsProcessor::ProcessEventsAndDoSample() { | |
| 145 int64_t stop_time = OS::Ticks() + period_in_useconds_; | |
| 146 // Keep processing existing events until we need to do next sample. | |
| 147 while (OS::Ticks() < stop_time) { | |
| 148 if (ProcessTicks()) { | |
| 149 // All ticks of the current dequeue_order are processed, | |
| 150 // proceed to the next code event. | |
| 151 ProcessCodeEvent(); | |
| 152 } | |
| 153 } | |
| 154 // Schedule next sample. sampler_ is NULL in tests. | |
| 155 if (sampler_) sampler_->DoSample(); | |
| 156 } | |
| 157 | |
| 158 | |
| 159 void ProfilerEventsProcessor::ProcessEventsAndYield() { | |
| 160 // Process ticks until we have any. | |
| 161 if (ProcessTicks()) { | |
| 162 // All ticks of the current dequeue_order are processed, | |
| 163 // proceed to the next code event. | |
| 164 ProcessCodeEvent(); | |
| 165 } | |
| 166 YieldCPU(); | |
| 167 } | |
| 168 | |
| 169 | |
| 139 void ProfilerEventsProcessor::Run() { | 170 void ProfilerEventsProcessor::Run() { |
| 140 while (running_) { | 171 while (running_) { |
| 141 // Process ticks until we have any. | 172 if (Sampler::CanSampleOnProfilerEventsProcessorThread()) { |
| 142 if (ProcessTicks()) { | 173 ProcessEventsAndDoSample(); |
| 143 // All ticks of the current last_processed_code_event_id_ are processed, | 174 } else { |
| 144 // proceed to the next code event. | 175 ProcessEventsAndYield(); |
|
Benedikt Meurer
2013/07/30 06:41:04
I think there are two severe issues with this loop
yurys
2013/07/30 08:52:26
The profiler code tries to avoid usage of synchron
| |
| 145 ProcessCodeEvent(); | |
| 146 } | 176 } |
| 147 YieldCPU(); | |
| 148 } | 177 } |
| 149 | 178 |
| 150 // Process remaining tick events. | 179 // Process remaining tick events. |
| 151 ticks_buffer_.FlushResidualRecords(); | 180 ticks_buffer_.FlushResidualRecords(); |
| 152 do { | 181 do { |
| 153 ProcessTicks(); | 182 ProcessTicks(); |
| 154 } while (ProcessCodeEvent()); | 183 } while (ProcessCodeEvent()); |
| 155 } | 184 } |
| 156 | 185 |
| 157 | 186 |
| (...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 421 } | 450 } |
| 422 | 451 |
| 423 | 452 |
| 424 void CpuProfiler::StartProcessorIfNotStarted() { | 453 void CpuProfiler::StartProcessorIfNotStarted() { |
| 425 if (processor_ == NULL) { | 454 if (processor_ == NULL) { |
| 426 Logger* logger = isolate_->logger(); | 455 Logger* logger = isolate_->logger(); |
| 427 // Disable logging when using the new implementation. | 456 // Disable logging when using the new implementation. |
| 428 saved_logging_nesting_ = logger->logging_nesting_; | 457 saved_logging_nesting_ = logger->logging_nesting_; |
| 429 logger->logging_nesting_ = 0; | 458 logger->logging_nesting_ = 0; |
| 430 generator_ = new ProfileGenerator(profiles_); | 459 generator_ = new ProfileGenerator(profiles_); |
| 431 processor_ = new ProfilerEventsProcessor(generator_); | 460 Sampler* sampler = logger->sampler(); |
| 461 processor_ = new ProfilerEventsProcessor( | |
| 462 generator_, sampler, FLAG_cpu_profiler_sampling_interval); | |
| 432 is_profiling_ = true; | 463 is_profiling_ = true; |
| 433 processor_->StartSynchronously(); | 464 processor_->StartSynchronously(); |
| 434 // Enumerate stuff we already have in the heap. | 465 // Enumerate stuff we already have in the heap. |
| 435 ASSERT(isolate_->heap()->HasBeenSetUp()); | 466 ASSERT(isolate_->heap()->HasBeenSetUp()); |
| 436 if (!FLAG_prof_browser_mode) { | 467 if (!FLAG_prof_browser_mode) { |
| 437 logger->LogCodeObjects(); | 468 logger->LogCodeObjects(); |
| 438 } | 469 } |
| 439 logger->LogCompiledFunctions(); | 470 logger->LogCompiledFunctions(); |
| 440 logger->LogAccessorCallbacks(); | 471 logger->LogAccessorCallbacks(); |
| 441 LogBuiltins(); | 472 LogBuiltins(); |
| 442 // Enable stack sampling. | 473 // Enable stack sampling. |
| 443 Sampler* sampler = logger->sampler(); | 474 if (Sampler::CanSampleOnProfilerEventsProcessorThread()) { |
| 475 sampler->SetHasProcessingThread(true); | |
| 476 } | |
| 444 sampler->IncreaseProfilingDepth(); | 477 sampler->IncreaseProfilingDepth(); |
| 445 if (!sampler->IsActive()) { | 478 if (!sampler->IsActive()) { |
| 446 sampler->Start(); | 479 sampler->Start(); |
| 447 need_to_stop_sampler_ = true; | 480 need_to_stop_sampler_ = true; |
| 448 } | 481 } |
| 449 } | 482 } |
| 450 } | 483 } |
| 451 | 484 |
| 452 | 485 |
| 453 CpuProfile* CpuProfiler::StopProfiling(const char* title) { | 486 CpuProfile* CpuProfiler::StopProfiling(const char* title) { |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 473 | 506 |
| 474 void CpuProfiler::StopProcessorIfLastProfile(const char* title) { | 507 void CpuProfiler::StopProcessorIfLastProfile(const char* title) { |
| 475 if (profiles_->IsLastProfile(title)) StopProcessor(); | 508 if (profiles_->IsLastProfile(title)) StopProcessor(); |
| 476 } | 509 } |
| 477 | 510 |
| 478 | 511 |
| 479 void CpuProfiler::StopProcessor() { | 512 void CpuProfiler::StopProcessor() { |
| 480 Logger* logger = isolate_->logger(); | 513 Logger* logger = isolate_->logger(); |
| 481 Sampler* sampler = reinterpret_cast<Sampler*>(logger->ticker_); | 514 Sampler* sampler = reinterpret_cast<Sampler*>(logger->ticker_); |
| 482 sampler->DecreaseProfilingDepth(); | 515 sampler->DecreaseProfilingDepth(); |
| 483 if (need_to_stop_sampler_) { | |
| 484 sampler->Stop(); | |
| 485 need_to_stop_sampler_ = false; | |
| 486 } | |
| 487 is_profiling_ = false; | 516 is_profiling_ = false; |
| 488 processor_->StopSynchronously(); | 517 processor_->StopSynchronously(); |
| 489 delete processor_; | 518 delete processor_; |
| 490 delete generator_; | 519 delete generator_; |
| 491 processor_ = NULL; | 520 processor_ = NULL; |
| 492 generator_ = NULL; | 521 generator_ = NULL; |
| 522 if (Sampler::CanSampleOnProfilerEventsProcessorThread()) { | |
| 523 sampler->SetHasProcessingThread(false); | |
| 524 } | |
| 525 if (need_to_stop_sampler_) { | |
| 526 sampler->Stop(); | |
| 527 need_to_stop_sampler_ = false; | |
| 528 } | |
| 493 logger->logging_nesting_ = saved_logging_nesting_; | 529 logger->logging_nesting_ = saved_logging_nesting_; |
| 494 } | 530 } |
| 495 | 531 |
| 496 | 532 |
| 497 void CpuProfiler::LogBuiltins() { | 533 void CpuProfiler::LogBuiltins() { |
| 498 Builtins* builtins = isolate_->builtins(); | 534 Builtins* builtins = isolate_->builtins(); |
| 499 ASSERT(builtins->is_initialized()); | 535 ASSERT(builtins->is_initialized()); |
| 500 for (int i = 0; i < Builtins::builtin_count; i++) { | 536 for (int i = 0; i < Builtins::builtin_count; i++) { |
| 501 CodeEventsContainer evt_rec(CodeEventRecord::REPORT_BUILTIN); | 537 CodeEventsContainer evt_rec(CodeEventRecord::REPORT_BUILTIN); |
| 502 ReportBuiltinEventRecord* rec = &evt_rec.ReportBuiltinEventRecord_; | 538 ReportBuiltinEventRecord* rec = &evt_rec.ReportBuiltinEventRecord_; |
| 503 Builtins::Name id = static_cast<Builtins::Name>(i); | 539 Builtins::Name id = static_cast<Builtins::Name>(i); |
| 504 rec->start = builtins->builtin(id)->address(); | 540 rec->start = builtins->builtin(id)->address(); |
| 505 rec->builtin_id = id; | 541 rec->builtin_id = id; |
| 506 processor_->Enqueue(evt_rec); | 542 processor_->Enqueue(evt_rec); |
| 507 } | 543 } |
| 508 } | 544 } |
| 509 | 545 |
| 510 | 546 |
| 511 } } // namespace v8::internal | 547 } } // namespace v8::internal |
| OLD | NEW |