| Index: src/inspector/v8-debugger.cc
|
| diff --git a/src/inspector/v8-debugger.cc b/src/inspector/v8-debugger.cc
|
| index 3d3f18189440609a561c57157cae8a681db0db1c..09c7f89cd4c64a6c5d511202c5e51b216069789d 100644
|
| --- a/src/inspector/v8-debugger.cc
|
| +++ b/src/inspector/v8-debugger.cc
|
| @@ -26,6 +26,11 @@ static const char v8AsyncTaskEventWillHandle[] = "willHandle";
|
| static const char v8AsyncTaskEventDidHandle[] = "didHandle";
|
| static const char v8AsyncTaskEventCancel[] = "cancel";
|
|
|
| +// Based on DevTools frontend measurement, with asyncCallStackDepth = 4,
|
| +// average async call stack tail requires ~1 Kb. Let's reserve ~ 128 Mb
|
| +// for async stacks.
|
| +static const int kMaxAsyncTaskStacks = 128 * 1024;
|
| +
|
| inline v8::Local<v8::Boolean> v8Boolean(bool value, v8::Isolate* isolate) {
|
| return value ? v8::True(isolate) : v8::False(isolate);
|
| }
|
| @@ -901,12 +906,23 @@ void V8Debugger::asyncTaskScheduled(const String16& taskName, void* task,
|
| if (chain) {
|
| m_asyncTaskStacks[task] = std::move(chain);
|
| if (recurring) m_recurringTasks.insert(task);
|
| + m_scheduledOrder.push_back(task);
|
| + while (m_scheduledOrder.size() - m_scheduledOrderDeleted.size() >
|
| + kMaxAsyncTaskStacks) {
|
| + void* task = m_scheduledOrder.front();
|
| + m_scheduledOrder.pop_front();
|
| + m_asyncTaskStacks.erase(task);
|
| + m_recurringTasks.erase(task);
|
| + m_scheduledOrderDeleted.erase(task);
|
| + }
|
| }
|
| }
|
|
|
| void V8Debugger::asyncTaskCanceled(void* task) {
|
| if (!m_maxAsyncCallStackDepth) return;
|
| m_asyncTaskStacks.erase(task);
|
| + m_scheduledOrderDeleted.insert(task);
|
| + cleanupScheduledOrderIfNeeded();
|
| m_recurringTasks.erase(task);
|
| }
|
|
|
| @@ -936,17 +952,34 @@ void V8Debugger::asyncTaskFinished(void* task) {
|
| m_currentTasks.pop_back();
|
|
|
| m_currentStacks.pop_back();
|
| - if (m_recurringTasks.find(task) == m_recurringTasks.end())
|
| + if (m_recurringTasks.find(task) == m_recurringTasks.end()) {
|
| m_asyncTaskStacks.erase(task);
|
| + m_scheduledOrderDeleted.insert(task);
|
| + cleanupScheduledOrderIfNeeded();
|
| + }
|
| }
|
|
|
| void V8Debugger::allAsyncTasksCanceled() {
|
| m_asyncTaskStacks.clear();
|
| + m_scheduledOrder.clear();
|
| + m_scheduledOrderDeleted.clear();
|
| m_recurringTasks.clear();
|
| m_currentStacks.clear();
|
| m_currentTasks.clear();
|
| }
|
|
|
| +void V8Debugger::cleanupScheduledOrderIfNeeded() {
|
| + if (m_scheduledOrder.size() < kMaxAsyncTaskStacks * 2) return;
|
| + std::deque<void*> scheduledOrder;
|
| + for (void* task : m_scheduledOrder) {
|
| + if (m_scheduledOrderDeleted.find(task) == m_scheduledOrderDeleted.end()) {
|
| + scheduledOrder.push_back(task);
|
| + }
|
| + }
|
| + m_scheduledOrder.swap(scheduledOrder);
|
| + m_scheduledOrderDeleted.clear();
|
| +}
|
| +
|
| void V8Debugger::muteScriptParsedEvents() {
|
| ++m_ignoreScriptParsedEventsCounter;
|
| }
|
|
|