OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 26 matching lines...) Expand all Loading... |
37 namespace v8 { | 37 namespace v8 { |
38 namespace internal { | 38 namespace internal { |
39 | 39 |
40 // Forward declarations. | 40 // Forward declarations. |
41 class CodeEntry; | 41 class CodeEntry; |
42 class CodeMap; | 42 class CodeMap; |
43 class CompilationInfo; | 43 class CompilationInfo; |
44 class CpuProfile; | 44 class CpuProfile; |
45 class CpuProfilesCollection; | 45 class CpuProfilesCollection; |
46 class ProfileGenerator; | 46 class ProfileGenerator; |
47 class TokenEnumerator; | |
48 | 47 |
49 #define CODE_EVENTS_TYPE_LIST(V) \ | 48 #define CODE_EVENTS_TYPE_LIST(V) \ |
50 V(CODE_CREATION, CodeCreateEventRecord) \ | 49 V(CODE_CREATION, CodeCreateEventRecord) \ |
51 V(CODE_MOVE, CodeMoveEventRecord) \ | 50 V(CODE_MOVE, CodeMoveEventRecord) \ |
52 V(SHARED_FUNC_MOVE, SharedFunctionInfoMoveEventRecord) | 51 V(SHARED_FUNC_MOVE, SharedFunctionInfoMoveEventRecord) \ |
| 52 V(REPORT_BUILTIN, ReportBuiltinEventRecord) |
53 | 53 |
54 | 54 |
55 class CodeEventRecord { | 55 class CodeEventRecord { |
56 public: | 56 public: |
57 #define DECLARE_TYPE(type, ignore) type, | 57 #define DECLARE_TYPE(type, ignore) type, |
58 enum Type { | 58 enum Type { |
59 NONE = 0, | 59 NONE = 0, |
60 CODE_EVENTS_TYPE_LIST(DECLARE_TYPE) | 60 CODE_EVENTS_TYPE_LIST(DECLARE_TYPE) |
61 NUMBER_OF_TYPES | 61 NUMBER_OF_TYPES |
62 }; | 62 }; |
63 #undef DECLARE_TYPE | 63 #undef DECLARE_TYPE |
64 | 64 |
65 Type type; | 65 Type type; |
66 unsigned order; | 66 mutable unsigned order; |
67 }; | 67 }; |
68 | 68 |
69 | 69 |
70 class CodeCreateEventRecord : public CodeEventRecord { | 70 class CodeCreateEventRecord : public CodeEventRecord { |
71 public: | 71 public: |
72 Address start; | 72 Address start; |
73 CodeEntry* entry; | 73 CodeEntry* entry; |
74 unsigned size; | 74 unsigned size; |
75 Address shared; | 75 Address shared; |
76 | 76 |
(...skipping 12 matching lines...) Expand all Loading... |
89 | 89 |
90 class SharedFunctionInfoMoveEventRecord : public CodeEventRecord { | 90 class SharedFunctionInfoMoveEventRecord : public CodeEventRecord { |
91 public: | 91 public: |
92 Address from; | 92 Address from; |
93 Address to; | 93 Address to; |
94 | 94 |
95 INLINE(void UpdateCodeMap(CodeMap* code_map)); | 95 INLINE(void UpdateCodeMap(CodeMap* code_map)); |
96 }; | 96 }; |
97 | 97 |
98 | 98 |
| 99 class ReportBuiltinEventRecord : public CodeEventRecord { |
| 100 public: |
| 101 Address start; |
| 102 Builtins::Name builtin_id; |
| 103 |
| 104 INLINE(void UpdateCodeMap(CodeMap* code_map)); |
| 105 }; |
| 106 |
| 107 |
99 class TickSampleEventRecord { | 108 class TickSampleEventRecord { |
100 public: | 109 public: |
101 // The parameterless constructor is used when we dequeue data from | 110 // The parameterless constructor is used when we dequeue data from |
102 // the ticks buffer. | 111 // the ticks buffer. |
103 TickSampleEventRecord() { } | 112 TickSampleEventRecord() { } |
104 explicit TickSampleEventRecord(unsigned order) | 113 explicit TickSampleEventRecord(unsigned order) |
105 : filler(1), | 114 : filler(1), |
106 order(order) { | 115 order(order) { |
107 ASSERT(filler != SamplingCircularQueue::kClear); | 116 ASSERT(filler != SamplingCircularQueue::kClear); |
108 } | 117 } |
109 | 118 |
110 // The first machine word of a TickSampleEventRecord must not ever | 119 // The first machine word of a TickSampleEventRecord must not ever |
111 // become equal to SamplingCircularQueue::kClear. As both order and | 120 // become equal to SamplingCircularQueue::kClear. As both order and |
112 // TickSample's first field are not reliable in this sense (order | 121 // TickSample's first field are not reliable in this sense (order |
113 // can overflow, TickSample can have all fields reset), we are | 122 // can overflow, TickSample can have all fields reset), we are |
114 // forced to use an artificial filler field. | 123 // forced to use an artificial filler field. |
115 int filler; | 124 int filler; |
116 unsigned order; | 125 unsigned order; |
117 TickSample sample; | 126 TickSample sample; |
118 | 127 |
119 static TickSampleEventRecord* cast(void* value) { | 128 static TickSampleEventRecord* cast(void* value) { |
120 return reinterpret_cast<TickSampleEventRecord*>(value); | 129 return reinterpret_cast<TickSampleEventRecord*>(value); |
121 } | 130 } |
122 }; | 131 }; |
123 | 132 |
124 | 133 |
| 134 class CodeEventsContainer { |
| 135 public: |
| 136 explicit CodeEventsContainer( |
| 137 CodeEventRecord::Type type = CodeEventRecord::NONE) { |
| 138 generic.type = type; |
| 139 } |
| 140 union { |
| 141 CodeEventRecord generic; |
| 142 #define DECLARE_CLASS(ignore, type) type type##_; |
| 143 CODE_EVENTS_TYPE_LIST(DECLARE_CLASS) |
| 144 #undef DECLARE_TYPE |
| 145 }; |
| 146 }; |
| 147 |
| 148 |
125 // This class implements both the profile events processor thread and | 149 // This class implements both the profile events processor thread and |
126 // methods called by event producers: VM and stack sampler threads. | 150 // methods called by event producers: VM and stack sampler threads. |
127 class ProfilerEventsProcessor : public Thread { | 151 class ProfilerEventsProcessor : public Thread { |
128 public: | 152 public: |
129 ProfilerEventsProcessor(ProfileGenerator* generator, | 153 explicit ProfilerEventsProcessor(ProfileGenerator* generator); |
130 CpuProfilesCollection* profiles); | |
131 virtual ~ProfilerEventsProcessor() {} | 154 virtual ~ProfilerEventsProcessor() {} |
132 | 155 |
133 // Thread control. | 156 // Thread control. |
134 virtual void Run(); | 157 virtual void Run(); |
135 inline void Stop() { running_ = false; } | 158 void StopSynchronously(); |
136 INLINE(bool running()) { return running_; } | 159 INLINE(bool running()) { return running_; } |
| 160 void Enqueue(const CodeEventsContainer& event); |
137 | 161 |
138 // Events adding methods. Called by VM threads. | |
139 void CallbackCreateEvent(Logger::LogEventsAndTags tag, | |
140 const char* prefix, Name* name, | |
141 Address start); | |
142 void CodeCreateEvent(Logger::LogEventsAndTags tag, | |
143 Name* name, | |
144 String* resource_name, int line_number, | |
145 Address start, unsigned size, | |
146 Address shared, | |
147 CompilationInfo* info); | |
148 void CodeCreateEvent(Logger::LogEventsAndTags tag, | |
149 const char* name, | |
150 Address start, unsigned size); | |
151 void CodeCreateEvent(Logger::LogEventsAndTags tag, | |
152 int args_count, | |
153 Address start, unsigned size); | |
154 void CodeMoveEvent(Address from, Address to); | |
155 void CodeDeleteEvent(Address from); | |
156 void SharedFunctionInfoMoveEvent(Address from, Address to); | |
157 void RegExpCodeCreateEvent(Logger::LogEventsAndTags tag, | |
158 const char* prefix, String* name, | |
159 Address start, unsigned size); | |
160 // Puts current stack into tick sample events buffer. | 162 // Puts current stack into tick sample events buffer. |
161 void AddCurrentStack(); | 163 void AddCurrentStack(Isolate* isolate); |
162 | 164 |
163 // Tick sample events are filled directly in the buffer of the circular | 165 // Tick sample events are filled directly in the buffer of the circular |
164 // queue (because the structure is of fixed width, but usually not all | 166 // queue (because the structure is of fixed width, but usually not all |
165 // stack frame entries are filled.) This method returns a pointer to the | 167 // stack frame entries are filled.) This method returns a pointer to the |
166 // next record of the buffer. | 168 // next record of the buffer. |
167 INLINE(TickSample* TickSampleEvent()); | 169 INLINE(TickSample* TickSampleEvent()); |
168 | 170 |
169 private: | 171 private: |
170 union CodeEventsContainer { | |
171 CodeEventRecord generic; | |
172 #define DECLARE_CLASS(ignore, type) type type##_; | |
173 CODE_EVENTS_TYPE_LIST(DECLARE_CLASS) | |
174 #undef DECLARE_TYPE | |
175 }; | |
176 | |
177 // Called from events processing thread (Run() method.) | 172 // Called from events processing thread (Run() method.) |
178 bool ProcessCodeEvent(unsigned* dequeue_order); | 173 bool ProcessCodeEvent(); |
179 bool ProcessTicks(unsigned dequeue_order); | 174 bool ProcessTicks(); |
180 | |
181 INLINE(static bool FilterOutCodeCreateEvent(Logger::LogEventsAndTags tag)); | |
182 | 175 |
183 ProfileGenerator* generator_; | 176 ProfileGenerator* generator_; |
184 CpuProfilesCollection* profiles_; | |
185 bool running_; | 177 bool running_; |
186 UnboundQueue<CodeEventsContainer> events_buffer_; | 178 UnboundQueue<CodeEventsContainer> events_buffer_; |
187 SamplingCircularQueue ticks_buffer_; | 179 SamplingCircularQueue ticks_buffer_; |
188 UnboundQueue<TickSampleEventRecord> ticks_from_vm_buffer_; | 180 UnboundQueue<TickSampleEventRecord> ticks_from_vm_buffer_; |
189 unsigned enqueue_order_; | 181 unsigned last_code_event_id_; |
| 182 unsigned last_processed_code_event_id_; |
190 }; | 183 }; |
191 | 184 |
192 | 185 |
193 #define PROFILE(IsolateGetter, Call) \ | 186 #define PROFILE(IsolateGetter, Call) \ |
194 do { \ | 187 do { \ |
195 Isolate* cpu_profiler_isolate = (IsolateGetter); \ | 188 Isolate* cpu_profiler_isolate = (IsolateGetter); \ |
196 LOG_CODE_EVENT(cpu_profiler_isolate, Call); \ | 189 LOG_CODE_EVENT(cpu_profiler_isolate, Call); \ |
197 CpuProfiler* cpu_profiler = cpu_profiler_isolate->cpu_profiler(); \ | 190 CpuProfiler* cpu_profiler = cpu_profiler_isolate->cpu_profiler(); \ |
198 if (cpu_profiler->is_profiling()) { \ | 191 if (cpu_profiler->is_profiling()) { \ |
199 cpu_profiler->Call; \ | 192 cpu_profiler->Call; \ |
200 } \ | 193 } \ |
201 } while (false) | 194 } while (false) |
202 | 195 |
203 | 196 |
204 class CpuProfiler { | 197 class CpuProfiler { |
205 public: | 198 public: |
206 explicit CpuProfiler(Isolate* isolate); | 199 explicit CpuProfiler(Isolate* isolate); |
| 200 |
| 201 CpuProfiler(Isolate* isolate, |
| 202 CpuProfilesCollection* test_collection, |
| 203 ProfileGenerator* test_generator, |
| 204 ProfilerEventsProcessor* test_processor); |
| 205 |
207 ~CpuProfiler(); | 206 ~CpuProfiler(); |
208 | 207 |
209 void StartProfiling(const char* title, bool record_samples = false); | 208 void StartProfiling(const char* title, bool record_samples = false); |
210 void StartProfiling(String* title, bool record_samples); | 209 void StartProfiling(String* title, bool record_samples); |
211 CpuProfile* StopProfiling(const char* title); | 210 CpuProfile* StopProfiling(const char* title); |
212 CpuProfile* StopProfiling(Object* security_token, String* title); | 211 CpuProfile* StopProfiling(String* title); |
213 int GetProfilesCount(); | 212 int GetProfilesCount(); |
214 CpuProfile* GetProfile(Object* security_token, int index); | 213 CpuProfile* GetProfile(int index); |
215 CpuProfile* FindProfile(Object* security_token, unsigned uid); | |
216 void DeleteAllProfiles(); | 214 void DeleteAllProfiles(); |
217 void DeleteProfile(CpuProfile* profile); | 215 void DeleteProfile(CpuProfile* profile); |
218 bool HasDetachedProfiles(); | |
219 | 216 |
220 // Invoked from stack sampler (thread or signal handler.) | 217 // Invoked from stack sampler (thread or signal handler.) |
221 TickSample* TickSampleEvent(); | 218 TickSample* TickSampleEvent(); |
222 | 219 |
223 // Must be called via PROFILE macro, otherwise will crash when | 220 // Must be called via PROFILE macro, otherwise will crash when |
224 // profiling is not enabled. | 221 // profiling is not enabled. |
225 void CallbackEvent(Name* name, Address entry_point); | 222 void CallbackEvent(Name* name, Address entry_point); |
226 void CodeCreateEvent(Logger::LogEventsAndTags tag, | 223 void CodeCreateEvent(Logger::LogEventsAndTags tag, |
227 Code* code, const char* comment); | 224 Code* code, const char* comment); |
228 void CodeCreateEvent(Logger::LogEventsAndTags tag, | 225 void CodeCreateEvent(Logger::LogEventsAndTags tag, |
(...skipping 16 matching lines...) Expand all Loading... |
245 void GetterCallbackEvent(Name* name, Address entry_point); | 242 void GetterCallbackEvent(Name* name, Address entry_point); |
246 void RegExpCodeCreateEvent(Code* code, String* source); | 243 void RegExpCodeCreateEvent(Code* code, String* source); |
247 void SetterCallbackEvent(Name* name, Address entry_point); | 244 void SetterCallbackEvent(Name* name, Address entry_point); |
248 void SharedFunctionInfoMoveEvent(Address from, Address to); | 245 void SharedFunctionInfoMoveEvent(Address from, Address to); |
249 | 246 |
250 INLINE(bool is_profiling() const) { return is_profiling_; } | 247 INLINE(bool is_profiling() const) { return is_profiling_; } |
251 bool* is_profiling_address() { | 248 bool* is_profiling_address() { |
252 return &is_profiling_; | 249 return &is_profiling_; |
253 } | 250 } |
254 | 251 |
| 252 ProfileGenerator* generator() const { return generator_; } |
| 253 ProfilerEventsProcessor* processor() const { return processor_; } |
| 254 |
255 private: | 255 private: |
256 void StartProcessorIfNotStarted(); | 256 void StartProcessorIfNotStarted(); |
257 void StopProcessorIfLastProfile(const char* title); | 257 void StopProcessorIfLastProfile(const char* title); |
258 void StopProcessor(); | 258 void StopProcessor(); |
259 void ResetProfiles(); | 259 void ResetProfiles(); |
| 260 void LogBuiltins(); |
260 | 261 |
261 Isolate* isolate_; | 262 Isolate* isolate_; |
262 CpuProfilesCollection* profiles_; | 263 CpuProfilesCollection* profiles_; |
263 unsigned next_profile_uid_; | 264 unsigned next_profile_uid_; |
264 TokenEnumerator* token_enumerator_; | |
265 ProfileGenerator* generator_; | 265 ProfileGenerator* generator_; |
266 ProfilerEventsProcessor* processor_; | 266 ProfilerEventsProcessor* processor_; |
267 int saved_logging_nesting_; | 267 int saved_logging_nesting_; |
268 bool need_to_stop_sampler_; | 268 bool need_to_stop_sampler_; |
269 bool is_profiling_; | 269 bool is_profiling_; |
270 | 270 |
271 private: | 271 private: |
272 DISALLOW_COPY_AND_ASSIGN(CpuProfiler); | 272 DISALLOW_COPY_AND_ASSIGN(CpuProfiler); |
273 }; | 273 }; |
274 | 274 |
275 } } // namespace v8::internal | 275 } } // namespace v8::internal |
276 | 276 |
277 | 277 |
278 #endif // V8_CPU_PROFILER_H_ | 278 #endif // V8_CPU_PROFILER_H_ |
OLD | NEW |