Index: src/inspector/v8-debugger.cc |
diff --git a/src/inspector/v8-debugger.cc b/src/inspector/v8-debugger.cc |
index 91d37b10881db7939ce298a5fcd649988a841f85..3fa2061077435bc25ef3b223929f5c0fd096104d 100644 |
--- a/src/inspector/v8-debugger.cc |
+++ b/src/inspector/v8-debugger.cc |
@@ -21,7 +21,10 @@ |
namespace { |
-static const int kMaxAsyncTaskStacks = 1024 * 1024; |
+// Based on DevTools frontend measurement, with asyncCallStackDepth = 4, |
+// average async call stack tail requires ~1 Kb. Let's reserve ~ 128 Mb |
+// for async stacks. |
+static const int kMaxAsyncTaskStacks = 128 * 1024; |
inline v8::Local<v8::Boolean> v8Boolean(bool value, v8::Isolate* isolate) { |
return value ? v8::True(isolate) : v8::False(isolate); |
@@ -164,6 +167,7 @@ |
m_runningNestedMessageLoop(false), |
m_ignoreScriptParsedEventsCounter(0), |
m_maxAsyncCallStacks(kMaxAsyncTaskStacks), |
+ m_lastTaskId(0), |
m_maxAsyncCallStackDepth(0), |
m_pauseOnExceptionsState(v8::debug::NoBreakOnException), |
m_wasmTranslation(isolate) {} |
@@ -658,6 +662,10 @@ |
case v8::debug::kDebugEnqueuePromiseReject: |
asyncTaskScheduledForStack("Promise.reject", task, true); |
break; |
+ case v8::debug::kDebugPromiseCollected: |
+ asyncTaskCanceledForStack(task); |
+ asyncTaskCanceledForStepping(task); |
+ break; |
case v8::debug::kDebugWillHandle: |
asyncTaskStartedForStack(task); |
asyncTaskStartedForStepping(task); |
@@ -666,20 +674,17 @@ |
asyncTaskFinishedForStack(task); |
asyncTaskFinishedForStepping(task); |
break; |
- case v8::debug::kDebugPromiseCollected: |
- asyncTaskCanceledForStack(task); |
- asyncTaskCanceledForStepping(task); |
- break; |
- } |
-} |
- |
-std::shared_ptr<AsyncStackTrace> V8Debugger::currentAsyncParent() { |
- return m_currentAsyncParent.empty() ? nullptr : m_currentAsyncParent.back(); |
-} |
- |
-std::shared_ptr<AsyncStackTrace> V8Debugger::currentAsyncCreation() { |
- return m_currentAsyncCreation.empty() ? nullptr |
- : m_currentAsyncCreation.back(); |
+ } |
+} |
+ |
+V8StackTraceImpl* V8Debugger::currentAsyncCallChain() { |
+ if (!m_currentStacks.size()) return nullptr; |
+ return m_currentStacks.back().get(); |
+} |
+ |
+V8StackTraceImpl* V8Debugger::currentAsyncTaskCreationStack() { |
+ if (!m_currentCreationStacks.size()) return nullptr; |
+ return m_currentCreationStacks.back().get(); |
} |
void V8Debugger::compileDebuggerScript() { |
@@ -820,8 +825,8 @@ |
} |
std::unique_ptr<V8StackTraceImpl> V8Debugger::createStackTrace( |
- v8::Local<v8::StackTrace> v8StackTrace) { |
- return V8StackTraceImpl::create(this, currentContextGroupId(), v8StackTrace, |
+ v8::Local<v8::StackTrace> stackTrace) { |
+ return V8StackTraceImpl::create(this, currentContextGroupId(), stackTrace, |
V8StackTraceImpl::maxCallStackSizeToCapture); |
} |
@@ -842,18 +847,31 @@ |
if (!maxAsyncCallStackDepth) allAsyncTasksCanceled(); |
} |
+void V8Debugger::registerAsyncTaskIfNeeded(void* task) { |
+ if (m_taskToId.find(task) != m_taskToId.end()) return; |
+ |
+ int id = ++m_lastTaskId; |
+ m_taskToId[task] = id; |
+ m_idToTask[id] = task; |
+ if (static_cast<int>(m_idToTask.size()) > m_maxAsyncCallStacks) { |
+ void* taskToRemove = m_idToTask.begin()->second; |
+ asyncTaskCanceledForStack(taskToRemove); |
+ } |
+} |
+ |
void V8Debugger::asyncTaskCreatedForStack(void* task, void* parentTask) { |
if (!m_maxAsyncCallStackDepth) return; |
if (parentTask) m_parentTask[task] = parentTask; |
v8::HandleScope scope(m_isolate); |
- std::shared_ptr<AsyncStackTrace> asyncCreation = |
- AsyncStackTrace::capture(this, currentContextGroupId(), String16(), 1); |
- // Passing one as maxStackSize forces no async chain for the new stack. |
- if (asyncCreation && !asyncCreation->isEmpty()) { |
- m_asyncTaskCreationStacks[task] = asyncCreation; |
- m_allAsyncStacks.push_back(asyncCreation); |
- ++m_asyncStacksCount; |
- collectOldAsyncStacksIfNeeded(); |
+ // We don't need to pass context group id here because we get this callback |
+ // from V8 for promise events only. |
+ // Passing one as maxStackSize forces no async chain for the new stack and |
+ // allows us to not grow exponentially. |
+ std::unique_ptr<V8StackTraceImpl> creationStack = |
+ V8StackTraceImpl::capture(this, 0, 1, String16()); |
+ if (creationStack && !creationStack->isEmpty()) { |
+ m_asyncTaskCreationStacks[task] = std::move(creationStack); |
+ registerAsyncTaskIfNeeded(task); |
} |
} |
@@ -882,16 +900,13 @@ |
void* task, bool recurring) { |
if (!m_maxAsyncCallStackDepth) return; |
v8::HandleScope scope(m_isolate); |
- std::shared_ptr<AsyncStackTrace> asyncStack = |
- AsyncStackTrace::capture(this, currentContextGroupId(), taskName, |
- V8StackTraceImpl::maxCallStackSizeToCapture); |
- if (asyncStack) { |
- m_asyncTaskStacks[task] = asyncStack; |
+ std::unique_ptr<V8StackTraceImpl> chain = V8StackTraceImpl::capture( |
+ this, currentContextGroupId(), |
+ V8StackTraceImpl::maxCallStackSizeToCapture, taskName); |
+ if (chain) { |
+ m_asyncTaskStacks[task] = std::move(chain); |
if (recurring) m_recurringTasks.insert(task); |
- |
- m_allAsyncStacks.push_back(asyncStack); |
- ++m_asyncStacksCount; |
- collectOldAsyncStacksIfNeeded(); |
+ registerAsyncTaskIfNeeded(task); |
} |
} |
@@ -901,6 +916,10 @@ |
m_recurringTasks.erase(task); |
m_parentTask.erase(task); |
m_asyncTaskCreationStacks.erase(task); |
+ auto it = m_taskToId.find(task); |
+ if (it == m_taskToId.end()) return; |
+ m_idToTask.erase(it->second); |
+ m_taskToId.erase(it); |
} |
void V8Debugger::asyncTaskStartedForStack(void* task) { |
@@ -916,28 +935,28 @@ |
// - asyncTaskCanceled <-- canceled before finished |
// <-- async stack requested here --> |
// - asyncTaskFinished |
- std::shared_ptr<AsyncStackTrace> asyncParent; |
- if (stackIt != m_asyncTaskStacks.end()) asyncParent = stackIt->second.lock(); |
+ std::unique_ptr<V8StackTraceImpl> stack; |
+ if (stackIt != m_asyncTaskStacks.end() && stackIt->second) |
+ stack = stackIt->second->cloneImpl(); |
auto itCreation = m_asyncTaskCreationStacks.find(task); |
- if (asyncParent && itCreation != m_asyncTaskCreationStacks.end()) { |
- m_currentAsyncCreation.push_back(itCreation->second.lock()); |
+ if (stack && itCreation != m_asyncTaskCreationStacks.end()) { |
+ m_currentCreationStacks.push_back(itCreation->second->cloneImpl()); |
} else { |
- m_currentAsyncCreation.push_back(nullptr); |
- } |
- m_currentAsyncParent.push_back(asyncParent); |
+ m_currentCreationStacks.push_back(nullptr); |
+ } |
+ m_currentStacks.push_back(std::move(stack)); |
} |
void V8Debugger::asyncTaskFinishedForStack(void* task) { |
if (!m_maxAsyncCallStackDepth) return; |
// We could start instrumenting half way and the stack is empty. |
- if (!m_currentTasks.size()) return; |
+ if (!m_currentStacks.size()) return; |
DCHECK(m_currentTasks.back() == task); |
m_currentTasks.pop_back(); |
- DCHECK(m_currentAsyncParent.size() == m_currentAsyncCreation.size()); |
- m_currentAsyncParent.pop_back(); |
- m_currentAsyncCreation.pop_back(); |
- |
+ DCHECK(m_currentStacks.size() == m_currentCreationStacks.size()); |
+ m_currentStacks.pop_back(); |
+ m_currentCreationStacks.pop_back(); |
if (m_recurringTasks.find(task) == m_recurringTasks.end()) { |
asyncTaskCanceledForStack(task); |
} |
@@ -974,14 +993,14 @@ |
void V8Debugger::allAsyncTasksCanceled() { |
m_asyncTaskStacks.clear(); |
m_recurringTasks.clear(); |
- m_currentAsyncParent.clear(); |
- m_currentAsyncCreation.clear(); |
+ m_currentStacks.clear(); |
+ m_currentCreationStacks.clear(); |
m_currentTasks.clear(); |
m_parentTask.clear(); |
m_asyncTaskCreationStacks.clear(); |
- |
- m_allAsyncStacks.clear(); |
- m_asyncStacksCount = 0; |
+ m_idToTask.clear(); |
+ m_taskToId.clear(); |
+ m_lastTaskId = 0; |
} |
void V8Debugger::muteScriptParsedEvents() { |
@@ -1001,10 +1020,11 @@ |
int contextGroupId = currentContextGroupId(); |
if (!contextGroupId) return nullptr; |
- int stackSize = 1; |
- if (fullStack || m_inspector->enabledRuntimeAgentForGroup(contextGroupId)) { |
+ size_t stackSize = |
+ fullStack ? V8StackTraceImpl::maxCallStackSizeToCapture : 1; |
+ if (m_inspector->enabledRuntimeAgentForGroup(contextGroupId)) |
stackSize = V8StackTraceImpl::maxCallStackSizeToCapture; |
- } |
+ |
return V8StackTraceImpl::capture(this, contextGroupId, stackSize); |
} |
@@ -1013,30 +1033,4 @@ |
return m_inspector->contextGroupId(m_isolate->GetCurrentContext()); |
} |
-void V8Debugger::collectOldAsyncStacksIfNeeded() { |
- if (m_asyncStacksCount <= m_maxAsyncCallStacks) return; |
- int halfOfLimitRoundedUp = |
- m_maxAsyncCallStacks / 2 + m_maxAsyncCallStacks % 2; |
- while (m_asyncStacksCount > halfOfLimitRoundedUp) { |
- m_allAsyncStacks.pop_front(); |
- --m_asyncStacksCount; |
- } |
- removeOldAsyncTasks(m_asyncTaskStacks); |
- removeOldAsyncTasks(m_asyncTaskCreationStacks); |
-} |
- |
-void V8Debugger::removeOldAsyncTasks(AsyncTaskToStackTrace& map) { |
- AsyncTaskToStackTrace cleanCopy; |
- for (auto it : map) { |
- if (!it.second.expired()) cleanCopy.insert(it); |
- } |
- map.swap(cleanCopy); |
-} |
- |
-void V8Debugger::setMaxAsyncTaskStacksForTest(int limit) { |
- m_maxAsyncCallStacks = 0; |
- collectOldAsyncStacksIfNeeded(); |
- m_maxAsyncCallStacks = limit; |
-} |
- |
} // namespace v8_inspector |