OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_PROFILER_CPU_PROFILER_H_ | 5 #ifndef V8_PROFILER_CPU_PROFILER_H_ |
6 #define V8_PROFILER_CPU_PROFILER_H_ | 6 #define V8_PROFILER_CPU_PROFILER_H_ |
7 | 7 |
8 #include "src/allocation.h" | 8 #include "src/allocation.h" |
9 #include "src/atomic-utils.h" | 9 #include "src/atomic-utils.h" |
10 #include "src/base/atomicops.h" | 10 #include "src/base/atomicops.h" |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
132 base::TimeDelta period); | 132 base::TimeDelta period); |
133 virtual ~ProfilerEventsProcessor(); | 133 virtual ~ProfilerEventsProcessor(); |
134 | 134 |
135 // Thread control. | 135 // Thread control. |
136 virtual void Run(); | 136 virtual void Run(); |
137 void StopSynchronously(); | 137 void StopSynchronously(); |
138 INLINE(bool running()) { return !!base::NoBarrier_Load(&running_); } | 138 INLINE(bool running()) { return !!base::NoBarrier_Load(&running_); } |
139 void Enqueue(const CodeEventsContainer& event); | 139 void Enqueue(const CodeEventsContainer& event); |
140 | 140 |
141 // Puts current stack into tick sample events buffer. | 141 // Puts current stack into tick sample events buffer. |
142 void AddCurrentStack(Isolate* isolate); | 142 void AddCurrentStack(Isolate* isolate, bool update_stats = false); |
143 void AddDeoptStack(Isolate* isolate, Address from, int fp_to_sp_delta); | 143 void AddDeoptStack(Isolate* isolate, Address from, int fp_to_sp_delta); |
144 | 144 |
145 // Tick sample events are filled directly in the buffer of the circular | 145 // Tick sample events are filled directly in the buffer of the circular |
146 // queue (because the structure is of fixed width, but usually not all | 146 // queue (because the structure is of fixed width, but usually not all |
147 // stack frame entries are filled.) This method returns a pointer to the | 147 // stack frame entries are filled.) This method returns a pointer to the |
148 // next record of the buffer. | 148 // next record of the buffer. |
149 inline TickSample* StartTickSample(); | 149 inline TickSample* StartTickSample(); |
150 inline void FinishTickSample(); | 150 inline void FinishTickSample(); |
151 | 151 |
152 // SamplingCircularQueue has stricter alignment requirements than a normal new | 152 // SamplingCircularQueue has stricter alignment requirements than a normal new |
153 // can fulfil, so we need to provide our own new/delete here. | 153 // can fulfil, so we need to provide our own new/delete here. |
154 void* operator new(size_t size); | 154 void* operator new(size_t size); |
155 void operator delete(void* ptr); | 155 void operator delete(void* ptr); |
156 | 156 |
157 private: | 157 private: |
158 // Called from events processing thread (Run() method.) | 158 // Called from events processing thread (Run() method.) |
159 bool ProcessCodeEvent(); | 159 bool ProcessCodeEvent(); |
160 | 160 |
161 enum SampleProcessingResult { | 161 enum SampleProcessingResult { |
162 OneSampleProcessed, | 162 OneSampleProcessed, |
163 FoundSampleForNextCodeEvent, | 163 FoundSampleForNextCodeEvent, |
164 NoSamplesInQueue | 164 NoSamplesInQueue |
165 }; | 165 }; |
166 SampleProcessingResult ProcessOneSample(); | 166 SampleProcessingResult ProcessOneSample(); |
167 | 167 |
168 ProfileGenerator* generator_; | 168 ProfileGenerator* generator_; |
169 Sampler* sampler_; | 169 Sampler* sampler_; |
170 base::Atomic32 running_; | 170 base::Atomic32 running_; |
171 // Sampling period in microseconds. | 171 const base::TimeDelta period_; // Samples & code events processing period. |
172 const base::TimeDelta period_; | |
173 LockedQueue<CodeEventsContainer> events_buffer_; | 172 LockedQueue<CodeEventsContainer> events_buffer_; |
174 static const size_t kTickSampleBufferSize = 1 * MB; | 173 static const size_t kTickSampleBufferSize = 1 * MB; |
175 static const size_t kTickSampleQueueLength = | 174 static const size_t kTickSampleQueueLength = |
176 kTickSampleBufferSize / sizeof(TickSampleEventRecord); | 175 kTickSampleBufferSize / sizeof(TickSampleEventRecord); |
177 SamplingCircularQueue<TickSampleEventRecord, | 176 SamplingCircularQueue<TickSampleEventRecord, |
178 kTickSampleQueueLength> ticks_buffer_; | 177 kTickSampleQueueLength> ticks_buffer_; |
179 LockedQueue<TickSampleEventRecord> ticks_from_vm_buffer_; | 178 LockedQueue<TickSampleEventRecord> ticks_from_vm_buffer_; |
180 AtomicNumber<unsigned> last_code_event_id_; | 179 AtomicNumber<unsigned> last_code_event_id_; |
181 unsigned last_processed_code_event_id_; | 180 unsigned last_processed_code_event_id_; |
182 }; | 181 }; |
(...skipping 15 matching lines...) Expand all Loading... |
198 explicit CpuProfiler(Isolate* isolate); | 197 explicit CpuProfiler(Isolate* isolate); |
199 | 198 |
200 CpuProfiler(Isolate* isolate, | 199 CpuProfiler(Isolate* isolate, |
201 CpuProfilesCollection* test_collection, | 200 CpuProfilesCollection* test_collection, |
202 ProfileGenerator* test_generator, | 201 ProfileGenerator* test_generator, |
203 ProfilerEventsProcessor* test_processor); | 202 ProfilerEventsProcessor* test_processor); |
204 | 203 |
205 virtual ~CpuProfiler(); | 204 virtual ~CpuProfiler(); |
206 | 205 |
207 void set_sampling_interval(base::TimeDelta value); | 206 void set_sampling_interval(base::TimeDelta value); |
| 207 void CollectSample(); |
208 void StartProfiling(const char* title, bool record_samples = false); | 208 void StartProfiling(const char* title, bool record_samples = false); |
209 void StartProfiling(String* title, bool record_samples); | 209 void StartProfiling(String* title, bool record_samples); |
210 CpuProfile* StopProfiling(const char* title); | 210 CpuProfile* StopProfiling(const char* title); |
211 CpuProfile* StopProfiling(String* title); | 211 CpuProfile* StopProfiling(String* title); |
212 int GetProfilesCount(); | 212 int GetProfilesCount(); |
213 CpuProfile* GetProfile(int index); | 213 CpuProfile* GetProfile(int index); |
214 void DeleteAllProfiles(); | 214 void DeleteAllProfiles(); |
215 void DeleteProfile(CpuProfile* profile); | 215 void DeleteProfile(CpuProfile* profile); |
216 | 216 |
217 // Invoked from stack sampler (thread or signal handler.) | 217 // Invoked from stack sampler (thread or signal handler.) |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
269 bool is_profiling_; | 269 bool is_profiling_; |
270 | 270 |
271 DISALLOW_COPY_AND_ASSIGN(CpuProfiler); | 271 DISALLOW_COPY_AND_ASSIGN(CpuProfiler); |
272 }; | 272 }; |
273 | 273 |
274 } // namespace internal | 274 } // namespace internal |
275 } // namespace v8 | 275 } // namespace v8 |
276 | 276 |
277 | 277 |
278 #endif // V8_PROFILER_CPU_PROFILER_H_ | 278 #endif // V8_PROFILER_CPU_PROFILER_H_ |
OLD | NEW |