| Index: src/log.cc
|
| diff --git a/src/log.cc b/src/log.cc
|
| index fa6fe24234d835b900d3baf8b4d7c91088880955..c393fccccb5e4836bbc84d4546ceb031f7d8a6a5 100644
|
| --- a/src/log.cc
|
| +++ b/src/log.cc
|
| @@ -31,11 +31,14 @@
|
|
|
| #include "bootstrapper.h"
|
| #include "code-stubs.h"
|
| +#include "deoptimizer.h"
|
| #include "global-handles.h"
|
| #include "log.h"
|
| #include "macro-assembler.h"
|
| +#include "runtime-profiler.h"
|
| #include "serialize.h"
|
| #include "string-stream.h"
|
| +#include "vm-state-inl.h"
|
|
|
| namespace v8 {
|
| namespace internal {
|
| @@ -141,28 +144,32 @@ class Profiler: public Thread {
|
| //
|
| // StackTracer implementation
|
| //
|
| -void StackTracer::Trace(TickSample* sample) {
|
| +void StackTracer::Trace(Isolate* isolate, TickSample* sample) {
|
| sample->function = NULL;
|
| sample->frames_count = 0;
|
|
|
| + // Avoid collecting traces while doing GC.
|
| if (sample->state == GC) return;
|
|
|
| - const Address js_entry_sp = Isolate::js_entry_sp(
|
| - Isolate::Current()->thread_local_top());
|
| + const Address js_entry_sp =
|
| + Isolate::js_entry_sp(isolate->thread_local_top());
|
| if (js_entry_sp == 0) {
|
| // Not executing JS now.
|
| return;
|
| }
|
|
|
| - const Address functionAddr =
|
| + const Address function_address =
|
| sample->fp + JavaScriptFrameConstants::kFunctionOffset;
|
| if (SafeStackFrameIterator::IsWithinBounds(sample->sp, js_entry_sp,
|
| - functionAddr)) {
|
| - sample->function = Memory::Address_at(functionAddr) - kHeapObjectTag;
|
| + function_address)) {
|
| + Object* object = Memory::Object_at(function_address);
|
| + if (object->IsHeapObject()) {
|
| + sample->function = HeapObject::cast(object)->address();
|
| + }
|
| }
|
|
|
| int i = 0;
|
| - const Address callback = VMState::external_callback();
|
| + const Address callback = isolate->external_callback();
|
| // Surprisingly, PC can point _exactly_ to callback start, with good
|
| // probability, and this will result in reporting fake nested
|
| // callback call.
|
| @@ -170,12 +177,14 @@ void StackTracer::Trace(TickSample* sample) {
|
| sample->stack[i++] = callback;
|
| }
|
|
|
| - SafeStackTraceFrameIterator it(sample->fp, sample->sp,
|
| + SafeStackTraceFrameIterator it(isolate,
|
| + sample->fp, sample->sp,
|
| sample->sp, js_entry_sp);
|
| while (!it.done() && i < TickSample::kMaxFramesCount) {
|
| - sample->stack[i++] =
|
| - reinterpret_cast<Address>(it.frame()->function_slot_object()) -
|
| - kHeapObjectTag;
|
| + Object* object = it.frame()->function_slot_object();
|
| + if (object->IsHeapObject()) {
|
| + sample->stack[i++] = HeapObject::cast(object)->address();
|
| + }
|
| it.Advance();
|
| }
|
| sample->frames_count = i;
|
| @@ -189,7 +198,9 @@ void StackTracer::Trace(TickSample* sample) {
|
| class Ticker: public Sampler {
|
| public:
|
| explicit Ticker(Isolate* isolate, int interval):
|
| - Sampler(isolate, interval, FLAG_prof), window_(NULL), profiler_(NULL) {}
|
| + Sampler(isolate, interval),
|
| + window_(NULL),
|
| + profiler_(NULL) {}
|
|
|
| ~Ticker() { if (IsActive()) Stop(); }
|
|
|
| @@ -205,23 +216,25 @@ class Ticker: public Sampler {
|
|
|
| void ClearWindow() {
|
| window_ = NULL;
|
| - if (!profiler_ && IsActive()) Stop();
|
| + if (!profiler_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
|
| }
|
|
|
| void SetProfiler(Profiler* profiler) {
|
| + ASSERT(profiler_ == NULL);
|
| profiler_ = profiler;
|
| + IncreaseProfilingDepth();
|
| if (!FLAG_prof_lazy && !IsActive()) Start();
|
| }
|
|
|
| void ClearProfiler() {
|
| + DecreaseProfilingDepth();
|
| profiler_ = NULL;
|
| - if (!window_ && IsActive()) Stop();
|
| + if (!window_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
|
| }
|
|
|
| protected:
|
| virtual void DoSampleStack(TickSample* sample) {
|
| - ASSERT(IsSynchronous());
|
| - StackTracer::Trace(sample);
|
| + StackTracer::Trace(isolate(), sample);
|
| }
|
|
|
| private:
|
| @@ -784,6 +797,15 @@ void Logger::SetterCallbackEvent(String* name, Address entry_point) {
|
| }
|
|
|
|
|
| +static const char* ComputeMarker(Code* code) {
|
| + switch (code->kind()) {
|
| + case Code::FUNCTION: return code->optimizable() ? "~" : "";
|
| + case Code::OPTIMIZED_FUNCTION: return "*";
|
| + default: return "";
|
| + }
|
| +}
|
| +
|
| +
|
| void Logger::CodeCreateEvent(LogEventsAndTags tag,
|
| Code* code,
|
| const char* comment) {
|
| @@ -792,7 +814,7 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
|
| LogMessageBuilder msg(this);
|
| msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]);
|
| msg.AppendAddress(code->address());
|
| - msg.Append(",%d,\"", code->ExecutableSize());
|
| + msg.Append(",%d,\"%s", code->ExecutableSize(), ComputeMarker(code));
|
| for (const char* p = comment; *p != '\0'; p++) {
|
| if (*p == '"') {
|
| msg.Append('\\');
|
| @@ -819,7 +841,7 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name) {
|
| name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
|
| msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]);
|
| msg.AppendAddress(code->address());
|
| - msg.Append(",%d,\"%s\"", code->ExecutableSize(), *str);
|
| + msg.Append(",%d,\"%s%s\"", code->ExecutableSize(), ComputeMarker(code), *str);
|
| LowLevelCodeCreateEvent(code, &msg);
|
| if (FLAG_compress_log) {
|
| ASSERT(compression_helper_ != NULL);
|
| @@ -843,8 +865,12 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
|
| source->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
|
| msg.Append("%s,%s,", log_events_[CODE_CREATION_EVENT], log_events_[tag]);
|
| msg.AppendAddress(code->address());
|
| - msg.Append(",%d,\"%s %s:%d\"",
|
| - code->ExecutableSize(), *str, *sourcestr, line);
|
| + msg.Append(",%d,\"%s%s %s:%d\"",
|
| + code->ExecutableSize(),
|
| + ComputeMarker(code),
|
| + *str,
|
| + *sourcestr,
|
| + line);
|
| LowLevelCodeCreateEvent(code, &msg);
|
| if (FLAG_compress_log) {
|
| ASSERT(compression_helper_ != NULL);
|
| @@ -1248,7 +1274,9 @@ void Logger::PauseProfiler(int flags, int tag) {
|
| if (--cpu_profiler_nesting_ == 0) {
|
| profiler_->pause();
|
| if (FLAG_prof_lazy) {
|
| - if (!FLAG_sliding_state_window) ticker_->Stop();
|
| + if (!FLAG_sliding_state_window && !RuntimeProfiler::IsEnabled()) {
|
| + ticker_->Stop();
|
| + }
|
| FLAG_log_code = false;
|
| // Must be the same message as Log::kDynamicBufferSeal.
|
| LOG(UncheckedStringEvent("profiler", "pause"));
|
| @@ -1284,7 +1312,9 @@ void Logger::ResumeProfiler(int flags, int tag) {
|
| LogCompiledFunctions();
|
| LogFunctionObjects();
|
| LogAccessorCallbacks();
|
| - if (!FLAG_sliding_state_window) ticker_->Start();
|
| + if (!FLAG_sliding_state_window && !ticker_->IsActive()) {
|
| + ticker_->Start();
|
| + }
|
| }
|
| profiler_->resume();
|
| }
|
| @@ -1316,9 +1346,41 @@ int Logger::GetLogLines(int from_pos, char* dest_buf, int max_size) {
|
| }
|
|
|
|
|
| -static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis) {
|
| +class EnumerateOptimizedFunctionsVisitor: public OptimizedFunctionVisitor {
|
| + public:
|
| + EnumerateOptimizedFunctionsVisitor(Handle<SharedFunctionInfo>* sfis,
|
| + Handle<Code>* code_objects,
|
| + int* count)
|
| + : sfis_(sfis), code_objects_(code_objects), count_(count) { }
|
| +
|
| + virtual void EnterContext(Context* context) {}
|
| + virtual void LeaveContext(Context* context) {}
|
| +
|
| + virtual void VisitFunction(JSFunction* function) {
|
| + if (sfis_ != NULL) {
|
| + sfis_[*count_] = Handle<SharedFunctionInfo>(function->shared());
|
| + }
|
| + if (code_objects_ != NULL) {
|
| + ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION);
|
| + code_objects_[*count_] = Handle<Code>(function->code());
|
| + }
|
| + *count_ = *count_ + 1;
|
| + }
|
| +
|
| + private:
|
| + Handle<SharedFunctionInfo>* sfis_;
|
| + Handle<Code>* code_objects_;
|
| + int* count_;
|
| +};
|
| +
|
| +
|
| +static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis,
|
| + Handle<Code>* code_objects) {
|
| AssertNoAllocation no_alloc;
|
| int compiled_funcs_count = 0;
|
| +
|
| + // Iterate the heap to find shared function info objects and record
|
| + // the unoptimized code for them.
|
| HeapIterator iterator;
|
| for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
|
| if (!obj->IsSharedFunctionInfo()) continue;
|
| @@ -1326,11 +1388,22 @@ static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis) {
|
| if (sfi->is_compiled()
|
| && (!sfi->script()->IsScript()
|
| || Script::cast(sfi->script())->HasValidSource())) {
|
| - if (sfis != NULL)
|
| + if (sfis != NULL) {
|
| sfis[compiled_funcs_count] = Handle<SharedFunctionInfo>(sfi);
|
| + }
|
| + if (code_objects != NULL) {
|
| + code_objects[compiled_funcs_count] = Handle<Code>(sfi->code());
|
| + }
|
| ++compiled_funcs_count;
|
| }
|
| }
|
| +
|
| + // Iterate all optimized functions in all contexts.
|
| + EnumerateOptimizedFunctionsVisitor visitor(sfis,
|
| + code_objects,
|
| + &compiled_funcs_count);
|
| + Deoptimizer::VisitAllOptimizedFunctions(&visitor);
|
| +
|
| return compiled_funcs_count;
|
| }
|
|
|
| @@ -1342,9 +1415,11 @@ void Logger::LogCodeObject(Object* object) {
|
| const char* description = "Unknown code from the snapshot";
|
| switch (code_object->kind()) {
|
| case Code::FUNCTION:
|
| + case Code::OPTIMIZED_FUNCTION:
|
| return; // We log this later using LogCompiledFunctions.
|
| - case Code::BINARY_OP_IC:
|
| - // fall through
|
| + case Code::BINARY_OP_IC: // fall through
|
| + case Code::TYPE_RECORDING_BINARY_OP_IC: // fall through
|
| + case Code::COMPARE_IC: // fall through
|
| case Code::STUB:
|
| description =
|
| CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true);
|
| @@ -1427,9 +1502,10 @@ void Logger::LogCodeObjects() {
|
|
|
| void Logger::LogCompiledFunctions() {
|
| HandleScope scope;
|
| - const int compiled_funcs_count = EnumerateCompiledFunctions(NULL);
|
| + const int compiled_funcs_count = EnumerateCompiledFunctions(NULL, NULL);
|
| ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
|
| - EnumerateCompiledFunctions(sfis.start());
|
| + ScopedVector< Handle<Code> > code_objects(compiled_funcs_count);
|
| + EnumerateCompiledFunctions(sfis.start(), code_objects.start());
|
|
|
| // During iteration, there can be heap allocation due to
|
| // GetScriptLineNumber call.
|
| @@ -1446,18 +1522,18 @@ void Logger::LogCompiledFunctions() {
|
| if (line_num > 0) {
|
| PROFILE(CodeCreateEvent(
|
| Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
|
| - shared->code(), *func_name,
|
| + *code_objects[i], *func_name,
|
| *script_name, line_num + 1));
|
| } else {
|
| // Can't distinguish eval and script here, so always use Script.
|
| PROFILE(CodeCreateEvent(
|
| Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
|
| - shared->code(), *script_name));
|
| + *code_objects[i], *script_name));
|
| }
|
| } else {
|
| PROFILE(CodeCreateEvent(
|
| Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
|
| - shared->code(), *func_name));
|
| + *code_objects[i], *func_name));
|
| }
|
| } else if (shared->IsApiFunction()) {
|
| // API function.
|
| @@ -1471,7 +1547,7 @@ void Logger::LogCompiledFunctions() {
|
| }
|
| } else {
|
| PROFILE(CodeCreateEvent(
|
| - Logger::LAZY_COMPILE_TAG, shared->code(), *func_name));
|
| + Logger::LAZY_COMPILE_TAG, *code_objects[i], *func_name));
|
| }
|
| }
|
| }
|
| @@ -1577,6 +1653,22 @@ bool Logger::Setup() {
|
| }
|
|
|
|
|
| +Sampler* Logger::sampler() {
|
| + return ticker_;
|
| +}
|
| +
|
| +
|
| +void Logger::EnsureTickerStarted() {
|
| + ASSERT(ticker_ != NULL);
|
| + if (!ticker_->IsActive()) ticker_->Start();
|
| +}
|
| +
|
| +
|
| +void Logger::EnsureTickerStopped() {
|
| + if (ticker_ != NULL && ticker_->IsActive()) ticker_->Stop();
|
| +}
|
| +
|
| +
|
| void Logger::TearDown() {
|
| #ifdef ENABLE_LOGGING_AND_PROFILING
|
| if (!is_initialized_) return;
|
| @@ -1621,4 +1713,56 @@ void Logger::EnableSlidingStateWindow() {
|
| #endif
|
| }
|
|
|
| +
|
| +Mutex* SamplerRegistry::mutex_ = OS::CreateMutex();
|
| +List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
|
| +
|
| +
|
| +bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
|
| + ScopedLock lock(mutex_);
|
| + for (int i = 0;
|
| + ActiveSamplersExist() && i < active_samplers_->length();
|
| + ++i) {
|
| + func(active_samplers_->at(i), param);
|
| + }
|
| + return ActiveSamplersExist();
|
| +}
|
| +
|
| +
|
| +static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
|
| + bool* flag = reinterpret_cast<bool*>(flag_ptr);
|
| + *flag |= sampler->IsProfiling();
|
| +}
|
| +
|
| +
|
| +SamplerRegistry::State SamplerRegistry::GetState() {
|
| + bool flag = false;
|
| + if (!IterateActiveSamplers(&ComputeCpuProfiling, &flag)) {
|
| + return HAS_NO_SAMPLERS;
|
| + }
|
| + return flag ? HAS_CPU_PROFILING_SAMPLERS : HAS_SAMPLERS;
|
| +}
|
| +
|
| +
|
| +void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
|
| + ASSERT(sampler->IsActive());
|
| + ScopedLock lock(mutex_);
|
| + if (active_samplers_ == NULL) {
|
| + active_samplers_ = new List<Sampler*>;
|
| + } else {
|
| + ASSERT(!active_samplers_->Contains(sampler));
|
| + }
|
| + active_samplers_->Add(sampler);
|
| +}
|
| +
|
| +
|
| +void SamplerRegistry::RemoveActiveSampler(Sampler* sampler) {
|
| + ASSERT(sampler->IsActive());
|
| + ScopedLock lock(mutex_);
|
| + ASSERT(active_samplers_ != NULL);
|
| + bool removed = active_samplers_->RemoveElement(sampler);
|
| + ASSERT(removed);
|
| + USE(removed);
|
| +}
|
| +
|
| } } // namespace v8::internal
|
|
|