Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(326)

Side by Side Diff: src/log.cc

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/log.h ('k') | src/mark-compact.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 13 matching lines...) Expand all
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include <stdarg.h> 28 #include <stdarg.h>
29 29
30 #include "v8.h" 30 #include "v8.h"
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "deoptimizer.h"
34 #include "global-handles.h" 35 #include "global-handles.h"
35 #include "log.h" 36 #include "log.h"
36 #include "macro-assembler.h" 37 #include "macro-assembler.h"
38 #include "runtime-profiler.h"
37 #include "serialize.h" 39 #include "serialize.h"
38 #include "string-stream.h" 40 #include "string-stream.h"
41 #include "vm-state-inl.h"
39 42
40 namespace v8 { 43 namespace v8 {
41 namespace internal { 44 namespace internal {
42 45
43 #ifdef ENABLE_LOGGING_AND_PROFILING 46 #ifdef ENABLE_LOGGING_AND_PROFILING
44 47
45 // 48 //
46 // Sliding state window. Updates counters to keep track of the last 49 // Sliding state window. Updates counters to keep track of the last
47 // window of kBufferSize states. This is useful to track where we 50 // window of kBufferSize states. This is useful to track where we
48 // spent our time. 51 // spent our time.
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 bool running_; 137 bool running_;
135 138
136 // Tells whether we are currently recording tick samples. 139 // Tells whether we are currently recording tick samples.
137 bool paused_; 140 bool paused_;
138 }; 141 };
139 142
140 143
141 // 144 //
142 // StackTracer implementation 145 // StackTracer implementation
143 // 146 //
144 void StackTracer::Trace(TickSample* sample) { 147 void StackTracer::Trace(Isolate* isolate, TickSample* sample) {
145 sample->function = NULL; 148 sample->function = NULL;
146 sample->frames_count = 0; 149 sample->frames_count = 0;
147 150
151 // Avoid collecting traces while doing GC.
148 if (sample->state == GC) return; 152 if (sample->state == GC) return;
149 153
150 const Address js_entry_sp = Isolate::js_entry_sp( 154 const Address js_entry_sp =
151 Isolate::Current()->thread_local_top()); 155 Isolate::js_entry_sp(isolate->thread_local_top());
152 if (js_entry_sp == 0) { 156 if (js_entry_sp == 0) {
153 // Not executing JS now. 157 // Not executing JS now.
154 return; 158 return;
155 } 159 }
156 160
157 const Address functionAddr = 161 const Address function_address =
158 sample->fp + JavaScriptFrameConstants::kFunctionOffset; 162 sample->fp + JavaScriptFrameConstants::kFunctionOffset;
159 if (SafeStackFrameIterator::IsWithinBounds(sample->sp, js_entry_sp, 163 if (SafeStackFrameIterator::IsWithinBounds(sample->sp, js_entry_sp,
160 functionAddr)) { 164 function_address)) {
161 sample->function = Memory::Address_at(functionAddr) - kHeapObjectTag; 165 Object* object = Memory::Object_at(function_address);
166 if (object->IsHeapObject()) {
167 sample->function = HeapObject::cast(object)->address();
168 }
162 } 169 }
163 170
164 int i = 0; 171 int i = 0;
165 const Address callback = VMState::external_callback(); 172 const Address callback = isolate->external_callback();
166 // Surprisingly, PC can point _exactly_ to callback start, with good 173 // Surprisingly, PC can point _exactly_ to callback start, with good
167 // probability, and this will result in reporting fake nested 174 // probability, and this will result in reporting fake nested
168 // callback call. 175 // callback call.
169 if (callback != NULL && callback != sample->pc) { 176 if (callback != NULL && callback != sample->pc) {
170 sample->stack[i++] = callback; 177 sample->stack[i++] = callback;
171 } 178 }
172 179
173 SafeStackTraceFrameIterator it(sample->fp, sample->sp, 180 SafeStackTraceFrameIterator it(isolate,
181 sample->fp, sample->sp,
174 sample->sp, js_entry_sp); 182 sample->sp, js_entry_sp);
175 while (!it.done() && i < TickSample::kMaxFramesCount) { 183 while (!it.done() && i < TickSample::kMaxFramesCount) {
176 sample->stack[i++] = 184 Object* object = it.frame()->function_slot_object();
177 reinterpret_cast<Address>(it.frame()->function_slot_object()) - 185 if (object->IsHeapObject()) {
178 kHeapObjectTag; 186 sample->stack[i++] = HeapObject::cast(object)->address();
187 }
179 it.Advance(); 188 it.Advance();
180 } 189 }
181 sample->frames_count = i; 190 sample->frames_count = i;
182 } 191 }
183 192
184 193
185 // 194 //
186 // Ticker used to provide ticks to the profiler and the sliding state 195 // Ticker used to provide ticks to the profiler and the sliding state
187 // window. 196 // window.
188 // 197 //
189 class Ticker: public Sampler { 198 class Ticker: public Sampler {
190 public: 199 public:
191 explicit Ticker(Isolate* isolate, int interval): 200 explicit Ticker(Isolate* isolate, int interval):
192 Sampler(isolate, interval, FLAG_prof), window_(NULL), profiler_(NULL) {} 201 Sampler(isolate, interval),
202 window_(NULL),
203 profiler_(NULL) {}
193 204
194 ~Ticker() { if (IsActive()) Stop(); } 205 ~Ticker() { if (IsActive()) Stop(); }
195 206
196 virtual void Tick(TickSample* sample) { 207 virtual void Tick(TickSample* sample) {
197 if (profiler_) profiler_->Insert(sample); 208 if (profiler_) profiler_->Insert(sample);
198 if (window_) window_->AddState(sample->state); 209 if (window_) window_->AddState(sample->state);
199 } 210 }
200 211
201 void SetWindow(SlidingStateWindow* window) { 212 void SetWindow(SlidingStateWindow* window) {
202 window_ = window; 213 window_ = window;
203 if (!IsActive()) Start(); 214 if (!IsActive()) Start();
204 } 215 }
205 216
206 void ClearWindow() { 217 void ClearWindow() {
207 window_ = NULL; 218 window_ = NULL;
208 if (!profiler_ && IsActive()) Stop(); 219 if (!profiler_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
209 } 220 }
210 221
211 void SetProfiler(Profiler* profiler) { 222 void SetProfiler(Profiler* profiler) {
223 ASSERT(profiler_ == NULL);
212 profiler_ = profiler; 224 profiler_ = profiler;
225 IncreaseProfilingDepth();
213 if (!FLAG_prof_lazy && !IsActive()) Start(); 226 if (!FLAG_prof_lazy && !IsActive()) Start();
214 } 227 }
215 228
216 void ClearProfiler() { 229 void ClearProfiler() {
230 DecreaseProfilingDepth();
217 profiler_ = NULL; 231 profiler_ = NULL;
218 if (!window_ && IsActive()) Stop(); 232 if (!window_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
219 } 233 }
220 234
221 protected: 235 protected:
222 virtual void DoSampleStack(TickSample* sample) { 236 virtual void DoSampleStack(TickSample* sample) {
223 ASSERT(IsSynchronous()); 237 StackTracer::Trace(isolate(), sample);
224 StackTracer::Trace(sample);
225 } 238 }
226 239
227 private: 240 private:
228 SlidingStateWindow* window_; 241 SlidingStateWindow* window_;
229 Profiler* profiler_; 242 Profiler* profiler_;
230 }; 243 };
231 244
232 245
233 // 246 //
234 // SlidingStateWindow implementation. 247 // SlidingStateWindow implementation.
(...skipping 542 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 void Logger::SetterCallbackEvent(String* name, Address entry_point) { 790 void Logger::SetterCallbackEvent(String* name, Address entry_point) {
778 #ifdef ENABLE_LOGGING_AND_PROFILING 791 #ifdef ENABLE_LOGGING_AND_PROFILING
779 if (!log_->IsEnabled() || !FLAG_log_code) return; 792 if (!log_->IsEnabled() || !FLAG_log_code) return;
780 SmartPointer<char> str = 793 SmartPointer<char> str =
781 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); 794 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
782 CallbackEventInternal("set ", *str, entry_point); 795 CallbackEventInternal("set ", *str, entry_point);
783 #endif 796 #endif
784 } 797 }
785 798
786 799
800 static const char* ComputeMarker(Code* code) {
801 switch (code->kind()) {
802 case Code::FUNCTION: return code->optimizable() ? "~" : "";
803 case Code::OPTIMIZED_FUNCTION: return "*";
804 default: return "";
805 }
806 }
807
808
787 void Logger::CodeCreateEvent(LogEventsAndTags tag, 809 void Logger::CodeCreateEvent(LogEventsAndTags tag,
788 Code* code, 810 Code* code,
789 const char* comment) { 811 const char* comment) {
790 #ifdef ENABLE_LOGGING_AND_PROFILING 812 #ifdef ENABLE_LOGGING_AND_PROFILING
791 if (!log_->IsEnabled() || !FLAG_log_code) return; 813 if (!log_->IsEnabled() || !FLAG_log_code) return;
792 LogMessageBuilder msg(this); 814 LogMessageBuilder msg(this);
793 msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]); 815 msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]);
794 msg.AppendAddress(code->address()); 816 msg.AppendAddress(code->address());
795 msg.Append(",%d,\"", code->ExecutableSize()); 817 msg.Append(",%d,\"%s", code->ExecutableSize(), ComputeMarker(code));
796 for (const char* p = comment; *p != '\0'; p++) { 818 for (const char* p = comment; *p != '\0'; p++) {
797 if (*p == '"') { 819 if (*p == '"') {
798 msg.Append('\\'); 820 msg.Append('\\');
799 } 821 }
800 msg.Append(*p); 822 msg.Append(*p);
801 } 823 }
802 msg.Append('"'); 824 msg.Append('"');
803 LowLevelCodeCreateEvent(code, &msg); 825 LowLevelCodeCreateEvent(code, &msg);
804 if (FLAG_compress_log) { 826 if (FLAG_compress_log) {
805 ASSERT(compression_helper_ != NULL); 827 ASSERT(compression_helper_ != NULL);
806 if (!compression_helper_->HandleMessage(&msg)) return; 828 if (!compression_helper_->HandleMessage(&msg)) return;
807 } 829 }
808 msg.Append('\n'); 830 msg.Append('\n');
809 msg.WriteToLogFile(); 831 msg.WriteToLogFile();
810 #endif 832 #endif
811 } 833 }
812 834
813 835
814 void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name) { 836 void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name) {
815 #ifdef ENABLE_LOGGING_AND_PROFILING 837 #ifdef ENABLE_LOGGING_AND_PROFILING
816 if (!log_->IsEnabled() || !FLAG_log_code) return; 838 if (!log_->IsEnabled() || !FLAG_log_code) return;
817 LogMessageBuilder msg(this); 839 LogMessageBuilder msg(this);
818 SmartPointer<char> str = 840 SmartPointer<char> str =
819 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); 841 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
820 msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]); 842 msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]);
821 msg.AppendAddress(code->address()); 843 msg.AppendAddress(code->address());
822 msg.Append(",%d,\"%s\"", code->ExecutableSize(), *str); 844 msg.Append(",%d,\"%s%s\"", code->ExecutableSize(), ComputeMarker(code), *str);
823 LowLevelCodeCreateEvent(code, &msg); 845 LowLevelCodeCreateEvent(code, &msg);
824 if (FLAG_compress_log) { 846 if (FLAG_compress_log) {
825 ASSERT(compression_helper_ != NULL); 847 ASSERT(compression_helper_ != NULL);
826 if (!compression_helper_->HandleMessage(&msg)) return; 848 if (!compression_helper_->HandleMessage(&msg)) return;
827 } 849 }
828 msg.Append('\n'); 850 msg.Append('\n');
829 msg.WriteToLogFile(); 851 msg.WriteToLogFile();
830 #endif 852 #endif
831 } 853 }
832 854
833 855
834 void Logger::CodeCreateEvent(LogEventsAndTags tag, 856 void Logger::CodeCreateEvent(LogEventsAndTags tag,
835 Code* code, String* name, 857 Code* code, String* name,
836 String* source, int line) { 858 String* source, int line) {
837 #ifdef ENABLE_LOGGING_AND_PROFILING 859 #ifdef ENABLE_LOGGING_AND_PROFILING
838 if (!log_->IsEnabled() || !FLAG_log_code) return; 860 if (!log_->IsEnabled() || !FLAG_log_code) return;
839 LogMessageBuilder msg(this); 861 LogMessageBuilder msg(this);
840 SmartPointer<char> str = 862 SmartPointer<char> str =
841 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); 863 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
842 SmartPointer<char> sourcestr = 864 SmartPointer<char> sourcestr =
843 source->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); 865 source->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
844 msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]); 866 msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]);
845 msg.AppendAddress(code->address()); 867 msg.AppendAddress(code->address());
846 msg.Append(",%d,\"%s %s:%d\"", 868 msg.Append(",%d,\"%s%s %s:%d\"",
847 code->ExecutableSize(), *str, *sourcestr, line); 869 code->ExecutableSize(),
870 ComputeMarker(code),
871 *str,
872 *sourcestr,
873 line);
848 LowLevelCodeCreateEvent(code, &msg); 874 LowLevelCodeCreateEvent(code, &msg);
849 if (FLAG_compress_log) { 875 if (FLAG_compress_log) {
850 ASSERT(compression_helper_ != NULL); 876 ASSERT(compression_helper_ != NULL);
851 if (!compression_helper_->HandleMessage(&msg)) return; 877 if (!compression_helper_->HandleMessage(&msg)) return;
852 } 878 }
853 msg.Append('\n'); 879 msg.Append('\n');
854 msg.WriteToLogFile(); 880 msg.WriteToLogFile();
855 #endif 881 #endif
856 } 882 }
857 883
(...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after
1241 } 1267 }
1242 1268
1243 1269
1244 void Logger::PauseProfiler(int flags, int tag) { 1270 void Logger::PauseProfiler(int flags, int tag) {
1245 if (!log_->IsEnabled()) return; 1271 if (!log_->IsEnabled()) return;
1246 if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) { 1272 if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) {
1247 // It is OK to have negative nesting. 1273 // It is OK to have negative nesting.
1248 if (--cpu_profiler_nesting_ == 0) { 1274 if (--cpu_profiler_nesting_ == 0) {
1249 profiler_->pause(); 1275 profiler_->pause();
1250 if (FLAG_prof_lazy) { 1276 if (FLAG_prof_lazy) {
1251 if (!FLAG_sliding_state_window) ticker_->Stop(); 1277 if (!FLAG_sliding_state_window && !RuntimeProfiler::IsEnabled()) {
1278 ticker_->Stop();
1279 }
1252 FLAG_log_code = false; 1280 FLAG_log_code = false;
1253 // Must be the same message as Log::kDynamicBufferSeal. 1281 // Must be the same message as Log::kDynamicBufferSeal.
1254 LOG(UncheckedStringEvent("profiler", "pause")); 1282 LOG(UncheckedStringEvent("profiler", "pause"));
1255 } 1283 }
1256 --logging_nesting_; 1284 --logging_nesting_;
1257 } 1285 }
1258 } 1286 }
1259 if (flags & 1287 if (flags &
1260 (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) { 1288 (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
1261 if (--heap_profiler_nesting_ == 0) { 1289 if (--heap_profiler_nesting_ == 0) {
(...skipping 15 matching lines...) Expand all
1277 if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) { 1305 if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) {
1278 if (cpu_profiler_nesting_++ == 0) { 1306 if (cpu_profiler_nesting_++ == 0) {
1279 ++logging_nesting_; 1307 ++logging_nesting_;
1280 if (FLAG_prof_lazy) { 1308 if (FLAG_prof_lazy) {
1281 profiler_->Engage(); 1309 profiler_->Engage();
1282 LOG(UncheckedStringEvent("profiler", "resume")); 1310 LOG(UncheckedStringEvent("profiler", "resume"));
1283 FLAG_log_code = true; 1311 FLAG_log_code = true;
1284 LogCompiledFunctions(); 1312 LogCompiledFunctions();
1285 LogFunctionObjects(); 1313 LogFunctionObjects();
1286 LogAccessorCallbacks(); 1314 LogAccessorCallbacks();
1287 if (!FLAG_sliding_state_window) ticker_->Start(); 1315 if (!FLAG_sliding_state_window && !ticker_->IsActive()) {
1316 ticker_->Start();
1317 }
1288 } 1318 }
1289 profiler_->resume(); 1319 profiler_->resume();
1290 } 1320 }
1291 } 1321 }
1292 if (flags & 1322 if (flags &
1293 (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) { 1323 (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
1294 if (heap_profiler_nesting_++ == 0) { 1324 if (heap_profiler_nesting_++ == 0) {
1295 ++logging_nesting_; 1325 ++logging_nesting_;
1296 FLAG_log_gc = true; 1326 FLAG_log_gc = true;
1297 } 1327 }
(...skipping 11 matching lines...) Expand all
1309 bool Logger::IsProfilerSamplerActive() { 1339 bool Logger::IsProfilerSamplerActive() {
1310 return ticker_->IsActive(); 1340 return ticker_->IsActive();
1311 } 1341 }
1312 1342
1313 1343
1314 int Logger::GetLogLines(int from_pos, char* dest_buf, int max_size) { 1344 int Logger::GetLogLines(int from_pos, char* dest_buf, int max_size) {
1315 return log_->GetLogLines(from_pos, dest_buf, max_size); 1345 return log_->GetLogLines(from_pos, dest_buf, max_size);
1316 } 1346 }
1317 1347
1318 1348
1319 static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis) { 1349 class EnumerateOptimizedFunctionsVisitor: public OptimizedFunctionVisitor {
1350 public:
1351 EnumerateOptimizedFunctionsVisitor(Handle<SharedFunctionInfo>* sfis,
1352 Handle<Code>* code_objects,
1353 int* count)
1354 : sfis_(sfis), code_objects_(code_objects), count_(count) { }
1355
1356 virtual void EnterContext(Context* context) {}
1357 virtual void LeaveContext(Context* context) {}
1358
1359 virtual void VisitFunction(JSFunction* function) {
1360 if (sfis_ != NULL) {
1361 sfis_[*count_] = Handle<SharedFunctionInfo>(function->shared());
1362 }
1363 if (code_objects_ != NULL) {
1364 ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION);
1365 code_objects_[*count_] = Handle<Code>(function->code());
1366 }
1367 *count_ = *count_ + 1;
1368 }
1369
1370 private:
1371 Handle<SharedFunctionInfo>* sfis_;
1372 Handle<Code>* code_objects_;
1373 int* count_;
1374 };
1375
1376
1377 static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis,
1378 Handle<Code>* code_objects) {
1320 AssertNoAllocation no_alloc; 1379 AssertNoAllocation no_alloc;
1321 int compiled_funcs_count = 0; 1380 int compiled_funcs_count = 0;
1381
1382 // Iterate the heap to find shared function info objects and record
1383 // the unoptimized code for them.
1322 HeapIterator iterator; 1384 HeapIterator iterator;
1323 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { 1385 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1324 if (!obj->IsSharedFunctionInfo()) continue; 1386 if (!obj->IsSharedFunctionInfo()) continue;
1325 SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj); 1387 SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
1326 if (sfi->is_compiled() 1388 if (sfi->is_compiled()
1327 && (!sfi->script()->IsScript() 1389 && (!sfi->script()->IsScript()
1328 || Script::cast(sfi->script())->HasValidSource())) { 1390 || Script::cast(sfi->script())->HasValidSource())) {
1329 if (sfis != NULL) 1391 if (sfis != NULL) {
1330 sfis[compiled_funcs_count] = Handle<SharedFunctionInfo>(sfi); 1392 sfis[compiled_funcs_count] = Handle<SharedFunctionInfo>(sfi);
1393 }
1394 if (code_objects != NULL) {
1395 code_objects[compiled_funcs_count] = Handle<Code>(sfi->code());
1396 }
1331 ++compiled_funcs_count; 1397 ++compiled_funcs_count;
1332 } 1398 }
1333 } 1399 }
1400
1401 // Iterate all optimized functions in all contexts.
1402 EnumerateOptimizedFunctionsVisitor visitor(sfis,
1403 code_objects,
1404 &compiled_funcs_count);
1405 Deoptimizer::VisitAllOptimizedFunctions(&visitor);
1406
1334 return compiled_funcs_count; 1407 return compiled_funcs_count;
1335 } 1408 }
1336 1409
1337 1410
1338 void Logger::LogCodeObject(Object* object) { 1411 void Logger::LogCodeObject(Object* object) {
1339 if (FLAG_log_code) { 1412 if (FLAG_log_code) {
1340 Code* code_object = Code::cast(object); 1413 Code* code_object = Code::cast(object);
1341 LogEventsAndTags tag = Logger::STUB_TAG; 1414 LogEventsAndTags tag = Logger::STUB_TAG;
1342 const char* description = "Unknown code from the snapshot"; 1415 const char* description = "Unknown code from the snapshot";
1343 switch (code_object->kind()) { 1416 switch (code_object->kind()) {
1344 case Code::FUNCTION: 1417 case Code::FUNCTION:
1418 case Code::OPTIMIZED_FUNCTION:
1345 return; // We log this later using LogCompiledFunctions. 1419 return; // We log this later using LogCompiledFunctions.
1346 case Code::BINARY_OP_IC: 1420 case Code::BINARY_OP_IC: // fall through
1347 // fall through 1421 case Code::TYPE_RECORDING_BINARY_OP_IC: // fall through
1422 case Code::COMPARE_IC: // fall through
1348 case Code::STUB: 1423 case Code::STUB:
1349 description = 1424 description =
1350 CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true); 1425 CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true);
1351 if (description == NULL) 1426 if (description == NULL)
1352 description = "A stub from the snapshot"; 1427 description = "A stub from the snapshot";
1353 tag = Logger::STUB_TAG; 1428 tag = Logger::STUB_TAG;
1354 break; 1429 break;
1355 case Code::BUILTIN: 1430 case Code::BUILTIN:
1356 description = "A builtin from the snapshot"; 1431 description = "A builtin from the snapshot";
1357 tag = Logger::BUILTIN_TAG; 1432 tag = Logger::BUILTIN_TAG;
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1420 AssertNoAllocation no_alloc; 1495 AssertNoAllocation no_alloc;
1421 HeapIterator iterator; 1496 HeapIterator iterator;
1422 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { 1497 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1423 if (obj->IsCode()) LogCodeObject(obj); 1498 if (obj->IsCode()) LogCodeObject(obj);
1424 } 1499 }
1425 } 1500 }
1426 1501
1427 1502
1428 void Logger::LogCompiledFunctions() { 1503 void Logger::LogCompiledFunctions() {
1429 HandleScope scope; 1504 HandleScope scope;
1430 const int compiled_funcs_count = EnumerateCompiledFunctions(NULL); 1505 const int compiled_funcs_count = EnumerateCompiledFunctions(NULL, NULL);
1431 ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count); 1506 ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
1432 EnumerateCompiledFunctions(sfis.start()); 1507 ScopedVector< Handle<Code> > code_objects(compiled_funcs_count);
1508 EnumerateCompiledFunctions(sfis.start(), code_objects.start());
1433 1509
1434 // During iteration, there can be heap allocation due to 1510 // During iteration, there can be heap allocation due to
1435 // GetScriptLineNumber call. 1511 // GetScriptLineNumber call.
1436 for (int i = 0; i < compiled_funcs_count; ++i) { 1512 for (int i = 0; i < compiled_funcs_count; ++i) {
1437 Handle<SharedFunctionInfo> shared = sfis[i]; 1513 Handle<SharedFunctionInfo> shared = sfis[i];
1438 Handle<String> name(String::cast(shared->name())); 1514 Handle<String> name(String::cast(shared->name()));
1439 Handle<String> func_name(name->length() > 0 ? 1515 Handle<String> func_name(name->length() > 0 ?
1440 *name : shared->inferred_name()); 1516 *name : shared->inferred_name());
1441 if (shared->script()->IsScript()) { 1517 if (shared->script()->IsScript()) {
1442 Handle<Script> script(Script::cast(shared->script())); 1518 Handle<Script> script(Script::cast(shared->script()));
1443 if (script->name()->IsString()) { 1519 if (script->name()->IsString()) {
1444 Handle<String> script_name(String::cast(script->name())); 1520 Handle<String> script_name(String::cast(script->name()));
1445 int line_num = GetScriptLineNumber(script, shared->start_position()); 1521 int line_num = GetScriptLineNumber(script, shared->start_position());
1446 if (line_num > 0) { 1522 if (line_num > 0) {
1447 PROFILE(CodeCreateEvent( 1523 PROFILE(CodeCreateEvent(
1448 Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script), 1524 Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
1449 shared->code(), *func_name, 1525 *code_objects[i], *func_name,
1450 *script_name, line_num + 1)); 1526 *script_name, line_num + 1));
1451 } else { 1527 } else {
1452 // Can't distinguish eval and script here, so always use Script. 1528 // Can't distinguish eval and script here, so always use Script.
1453 PROFILE(CodeCreateEvent( 1529 PROFILE(CodeCreateEvent(
1454 Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script), 1530 Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
1455 shared->code(), *script_name)); 1531 *code_objects[i], *script_name));
1456 } 1532 }
1457 } else { 1533 } else {
1458 PROFILE(CodeCreateEvent( 1534 PROFILE(CodeCreateEvent(
1459 Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script), 1535 Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
1460 shared->code(), *func_name)); 1536 *code_objects[i], *func_name));
1461 } 1537 }
1462 } else if (shared->IsApiFunction()) { 1538 } else if (shared->IsApiFunction()) {
1463 // API function. 1539 // API function.
1464 FunctionTemplateInfo* fun_data = shared->get_api_func_data(); 1540 FunctionTemplateInfo* fun_data = shared->get_api_func_data();
1465 Object* raw_call_data = fun_data->call_code(); 1541 Object* raw_call_data = fun_data->call_code();
1466 if (!raw_call_data->IsUndefined()) { 1542 if (!raw_call_data->IsUndefined()) {
1467 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data); 1543 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1468 Object* callback_obj = call_data->callback(); 1544 Object* callback_obj = call_data->callback();
1469 Address entry_point = v8::ToCData<Address>(callback_obj); 1545 Address entry_point = v8::ToCData<Address>(callback_obj);
1470 PROFILE(CallbackEvent(*func_name, entry_point)); 1546 PROFILE(CallbackEvent(*func_name, entry_point));
1471 } 1547 }
1472 } else { 1548 } else {
1473 PROFILE(CodeCreateEvent( 1549 PROFILE(CodeCreateEvent(
1474 Logger::LAZY_COMPILE_TAG, shared->code(), *func_name)); 1550 Logger::LAZY_COMPILE_TAG, *code_objects[i], *func_name));
1475 } 1551 }
1476 } 1552 }
1477 } 1553 }
1478 1554
1479 1555
1480 void Logger::LogFunctionObjects() { 1556 void Logger::LogFunctionObjects() {
1481 AssertNoAllocation no_alloc; 1557 AssertNoAllocation no_alloc;
1482 HeapIterator iterator; 1558 HeapIterator iterator;
1483 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { 1559 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1484 if (!obj->IsJSFunction()) continue; 1560 if (!obj->IsJSFunction()) continue;
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
1570 } 1646 }
1571 1647
1572 return true; 1648 return true;
1573 1649
1574 #else 1650 #else
1575 return false; 1651 return false;
1576 #endif 1652 #endif
1577 } 1653 }
1578 1654
1579 1655
1656 Sampler* Logger::sampler() {
1657 return ticker_;
1658 }
1659
1660
1661 void Logger::EnsureTickerStarted() {
1662 ASSERT(ticker_ != NULL);
1663 if (!ticker_->IsActive()) ticker_->Start();
1664 }
1665
1666
1667 void Logger::EnsureTickerStopped() {
1668 if (ticker_ != NULL && ticker_->IsActive()) ticker_->Stop();
1669 }
1670
1671
1580 void Logger::TearDown() { 1672 void Logger::TearDown() {
1581 #ifdef ENABLE_LOGGING_AND_PROFILING 1673 #ifdef ENABLE_LOGGING_AND_PROFILING
1582 if (!is_initialized_) return; 1674 if (!is_initialized_) return;
1583 is_initialized_ = false; 1675 is_initialized_ = false;
1584 1676
1585 // Stop the profiler before closing the file. 1677 // Stop the profiler before closing the file.
1586 if (profiler_ != NULL) { 1678 if (profiler_ != NULL) {
1587 profiler_->Disengage(); 1679 profiler_->Disengage();
1588 delete profiler_; 1680 delete profiler_;
1589 profiler_ = NULL; 1681 profiler_ = NULL;
(...skipping 24 matching lines...) Expand all
1614 return; 1706 return;
1615 } 1707 }
1616 // Otherwise, if the sliding state window computation has not been 1708 // Otherwise, if the sliding state window computation has not been
1617 // started we do it now. 1709 // started we do it now.
1618 if (sliding_state_window_ == NULL) { 1710 if (sliding_state_window_ == NULL) {
1619 sliding_state_window_ = new SlidingStateWindow(); 1711 sliding_state_window_ = new SlidingStateWindow();
1620 } 1712 }
1621 #endif 1713 #endif
1622 } 1714 }
1623 1715
1716
1717 Mutex* SamplerRegistry::mutex_ = OS::CreateMutex();
1718 List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
1719
1720
1721 bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
1722 ScopedLock lock(mutex_);
1723 for (int i = 0;
1724 ActiveSamplersExist() && i < active_samplers_->length();
1725 ++i) {
1726 func(active_samplers_->at(i), param);
1727 }
1728 return ActiveSamplersExist();
1729 }
1730
1731
1732 static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
1733 bool* flag = reinterpret_cast<bool*>(flag_ptr);
1734 *flag |= sampler->IsProfiling();
1735 }
1736
1737
1738 SamplerRegistry::State SamplerRegistry::GetState() {
1739 bool flag = false;
1740 if (!IterateActiveSamplers(&ComputeCpuProfiling, &flag)) {
1741 return HAS_NO_SAMPLERS;
1742 }
1743 return flag ? HAS_CPU_PROFILING_SAMPLERS : HAS_SAMPLERS;
1744 }
1745
1746
1747 void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
1748 ASSERT(sampler->IsActive());
1749 ScopedLock lock(mutex_);
1750 if (active_samplers_ == NULL) {
1751 active_samplers_ = new List<Sampler*>;
1752 } else {
1753 ASSERT(!active_samplers_->Contains(sampler));
1754 }
1755 active_samplers_->Add(sampler);
1756 }
1757
1758
1759 void SamplerRegistry::RemoveActiveSampler(Sampler* sampler) {
1760 ASSERT(sampler->IsActive());
1761 ScopedLock lock(mutex_);
1762 ASSERT(active_samplers_ != NULL);
1763 bool removed = active_samplers_->RemoveElement(sampler);
1764 ASSERT(removed);
1765 USE(removed);
1766 }
1767
1624 } } // namespace v8::internal 1768 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/log.h ('k') | src/mark-compact.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698