| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/debug/debug.h" | 5 #include "src/debug/debug.h" |
| 6 | 6 |
| 7 #include <memory> | 7 #include <memory> |
| 8 | 8 |
| 9 #include "src/api.h" | 9 #include "src/api.h" |
| 10 #include "src/arguments.h" | 10 #include "src/arguments.h" |
| (...skipping 1158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1169 Code* current = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 1169 Code* current = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 1170 if (MatchingCodeTargets(target, current)) index--; | 1170 if (MatchingCodeTargets(target, current)) index--; |
| 1171 if (index == 0) return rinfo->pc() + delta; | 1171 if (index == 0) return rinfo->pc() + delta; |
| 1172 } | 1172 } |
| 1173 | 1173 |
| 1174 UNREACHABLE(); | 1174 UNREACHABLE(); |
| 1175 return NULL; | 1175 return NULL; |
| 1176 } | 1176 } |
| 1177 | 1177 |
| 1178 | 1178 |
| 1179 // Count the number of continuations at which the current pc offset is at. | |
| 1180 static int ComputeContinuationIndexFromPcOffset(Code* code, int pc_offset) { | |
| 1181 UNREACHABLE(); | |
| 1182 return 666; | |
| 1183 } | |
| 1184 | |
| 1185 | |
| 1186 // Find the pc offset for the given continuation index. | |
| 1187 static int ComputePcOffsetFromContinuationIndex(Code* code, int index) { | |
| 1188 UNREACHABLE(); | |
| 1189 return 666; | |
| 1190 } | |
| 1191 | |
| 1192 | |
| 1193 class RedirectActiveFunctions : public ThreadVisitor { | 1179 class RedirectActiveFunctions : public ThreadVisitor { |
| 1194 public: | 1180 public: |
| 1195 explicit RedirectActiveFunctions(SharedFunctionInfo* shared) | 1181 explicit RedirectActiveFunctions(SharedFunctionInfo* shared) |
| 1196 : shared_(shared) { | 1182 : shared_(shared) { |
| 1197 DCHECK(shared->HasDebugCode()); | 1183 DCHECK(shared->HasDebugCode()); |
| 1198 } | 1184 } |
| 1199 | 1185 |
| 1200 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { | 1186 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { |
| 1201 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { | 1187 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { |
| 1202 JavaScriptFrame* frame = it.frame(); | 1188 JavaScriptFrame* frame = it.frame(); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1246 | 1232 |
| 1247 | 1233 |
| 1248 bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) { | 1234 bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) { |
| 1249 DCHECK(shared->is_compiled()); | 1235 DCHECK(shared->is_compiled()); |
| 1250 | 1236 |
| 1251 if (isolate_->concurrent_recompilation_enabled()) { | 1237 if (isolate_->concurrent_recompilation_enabled()) { |
| 1252 isolate_->optimizing_compile_dispatcher()->Flush(); | 1238 isolate_->optimizing_compile_dispatcher()->Flush(); |
| 1253 } | 1239 } |
| 1254 | 1240 |
| 1255 List<Handle<JSFunction> > functions; | 1241 List<Handle<JSFunction> > functions; |
| 1256 List<Handle<JSGeneratorObject> > suspended_generators; | |
| 1257 | 1242 |
| 1258 // Flush all optimized code maps. Note that the below heap iteration does not | 1243 // Flush all optimized code maps. Note that the below heap iteration does not |
| 1259 // cover this, because the given function might have been inlined into code | 1244 // cover this, because the given function might have been inlined into code |
| 1260 // for which no JSFunction exists. | 1245 // for which no JSFunction exists. |
| 1261 { | 1246 { |
| 1262 SharedFunctionInfo::Iterator iterator(isolate_); | 1247 SharedFunctionInfo::Iterator iterator(isolate_); |
| 1263 while (SharedFunctionInfo* shared = iterator.Next()) { | 1248 while (SharedFunctionInfo* shared = iterator.Next()) { |
| 1264 shared->ClearCodeFromOptimizedCodeMap(); | 1249 shared->ClearCodeFromOptimizedCodeMap(); |
| 1265 } | 1250 } |
| 1266 } | 1251 } |
| 1267 | 1252 |
| 1268 // Make sure we abort incremental marking. | 1253 // Make sure we abort incremental marking. |
| 1269 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 1254 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
| 1270 GarbageCollectionReason::kDebugger); | 1255 GarbageCollectionReason::kDebugger); |
| 1271 | 1256 |
| 1272 DCHECK(shared->is_compiled()); | 1257 DCHECK(shared->is_compiled()); |
| 1273 bool baseline_exists = shared->HasBaselineCode(); | 1258 bool baseline_exists = shared->HasBaselineCode(); |
| 1274 | 1259 |
| 1275 { | 1260 { |
| 1276 // TODO(yangguo): with bytecode, we still walk the heap to find all | 1261 // TODO(yangguo): with bytecode, we still walk the heap to find all |
| 1277 // optimized code for the function to deoptimize. We can probably be | 1262 // optimized code for the function to deoptimize. We can probably be |
| 1278 // smarter here and avoid the heap walk. | 1263 // smarter here and avoid the heap walk. |
| 1279 HeapIterator iterator(isolate_->heap()); | 1264 HeapIterator iterator(isolate_->heap()); |
| 1280 HeapObject* obj; | 1265 HeapObject* obj; |
| 1281 // Continuation from old-style generators need to be recomputed. | |
| 1282 // TODO(yangguo): Remove code for old-style generators. | |
| 1283 bool find_resumables = | |
| 1284 baseline_exists && IsResumableFunction(shared->kind()); | |
| 1285 | 1266 |
| 1286 while ((obj = iterator.next())) { | 1267 while ((obj = iterator.next())) { |
| 1287 if (obj->IsJSFunction()) { | 1268 if (obj->IsJSFunction()) { |
| 1288 JSFunction* function = JSFunction::cast(obj); | 1269 JSFunction* function = JSFunction::cast(obj); |
| 1289 if (!function->Inlines(*shared)) continue; | 1270 if (!function->Inlines(*shared)) continue; |
| 1290 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { | 1271 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| 1291 Deoptimizer::DeoptimizeFunction(function); | 1272 Deoptimizer::DeoptimizeFunction(function); |
| 1292 } | 1273 } |
| 1293 if (baseline_exists && function->shared() == *shared) { | 1274 if (baseline_exists && function->shared() == *shared) { |
| 1294 functions.Add(handle(function)); | 1275 functions.Add(handle(function)); |
| 1295 } | 1276 } |
| 1296 } else if (find_resumables && obj->IsJSGeneratorObject()) { | |
| 1297 // This case handles async functions as well, as they use generator | |
| 1298 // objects for in-progress async function execution. | |
| 1299 JSGeneratorObject* generator_obj = JSGeneratorObject::cast(obj); | |
| 1300 if (!generator_obj->is_suspended()) continue; | |
| 1301 JSFunction* function = generator_obj->function(); | |
| 1302 if (!function->Inlines(*shared)) continue; | |
| 1303 int pc_offset = generator_obj->continuation(); | |
| 1304 int index = | |
| 1305 ComputeContinuationIndexFromPcOffset(function->code(), pc_offset); | |
| 1306 generator_obj->set_continuation(index); | |
| 1307 suspended_generators.Add(handle(generator_obj)); | |
| 1308 } | 1277 } |
| 1309 } | 1278 } |
| 1310 } | 1279 } |
| 1311 | 1280 |
| 1312 // We do not need to replace code to debug bytecode. | 1281 // We do not need to replace code to debug bytecode. |
| 1313 DCHECK(baseline_exists || functions.is_empty()); | 1282 DCHECK(baseline_exists || functions.is_empty()); |
| 1314 DCHECK(baseline_exists || suspended_generators.is_empty()); | |
| 1315 | 1283 |
| 1316 // We do not need to recompile to debug bytecode. | 1284 // We do not need to recompile to debug bytecode. |
| 1317 if (baseline_exists && !shared->code()->has_debug_break_slots()) { | 1285 if (baseline_exists && !shared->code()->has_debug_break_slots()) { |
| 1318 if (!Compiler::CompileDebugCode(shared)) return false; | 1286 if (!Compiler::CompileDebugCode(shared)) return false; |
| 1319 } | 1287 } |
| 1320 | 1288 |
| 1321 for (Handle<JSFunction> const function : functions) { | 1289 for (Handle<JSFunction> const function : functions) { |
| 1322 function->ReplaceCode(shared->code()); | 1290 function->ReplaceCode(shared->code()); |
| 1323 JSFunction::EnsureLiterals(function); | 1291 JSFunction::EnsureLiterals(function); |
| 1324 } | 1292 } |
| 1325 | 1293 |
| 1326 for (Handle<JSGeneratorObject> const generator_obj : suspended_generators) { | |
| 1327 int index = generator_obj->continuation(); | |
| 1328 int pc_offset = ComputePcOffsetFromContinuationIndex(shared->code(), index); | |
| 1329 generator_obj->set_continuation(pc_offset); | |
| 1330 } | |
| 1331 | |
| 1332 // Update PCs on the stack to point to recompiled code. | 1294 // Update PCs on the stack to point to recompiled code. |
| 1333 RedirectActiveFunctions redirect_visitor(*shared); | 1295 RedirectActiveFunctions redirect_visitor(*shared); |
| 1334 redirect_visitor.VisitThread(isolate_, isolate_->thread_local_top()); | 1296 redirect_visitor.VisitThread(isolate_, isolate_->thread_local_top()); |
| 1335 isolate_->thread_manager()->IterateArchivedThreads(&redirect_visitor); | 1297 isolate_->thread_manager()->IterateArchivedThreads(&redirect_visitor); |
| 1336 | 1298 |
| 1337 return true; | 1299 return true; |
| 1338 } | 1300 } |
| 1339 | 1301 |
| 1340 namespace { | 1302 namespace { |
| 1341 template <typename Iterator> | 1303 template <typename Iterator> |
| (...skipping 1269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2611 } | 2573 } |
| 2612 | 2574 |
| 2613 | 2575 |
| 2614 void LockingCommandMessageQueue::Clear() { | 2576 void LockingCommandMessageQueue::Clear() { |
| 2615 base::LockGuard<base::Mutex> lock_guard(&mutex_); | 2577 base::LockGuard<base::Mutex> lock_guard(&mutex_); |
| 2616 queue_.Clear(); | 2578 queue_.Clear(); |
| 2617 } | 2579 } |
| 2618 | 2580 |
| 2619 } // namespace internal | 2581 } // namespace internal |
| 2620 } // namespace v8 | 2582 } // namespace v8 |
| OLD | NEW |