| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/frames.h" | 5 #include "src/frames.h" |
| 6 | 6 |
| 7 #include <memory> | 7 #include <memory> |
| 8 #include <sstream> | 8 #include <sstream> |
| 9 | 9 |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 2251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2262 | 2262 |
| 2263 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) { | 2263 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) { |
| 2264 ZoneList<StackFrame*> list(10, zone); | 2264 ZoneList<StackFrame*> list(10, zone); |
| 2265 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) { | 2265 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) { |
| 2266 StackFrame* frame = AllocateFrameCopy(it.frame(), zone); | 2266 StackFrame* frame = AllocateFrameCopy(it.frame(), zone); |
| 2267 list.Add(frame, zone); | 2267 list.Add(frame, zone); |
| 2268 } | 2268 } |
| 2269 return list.ToVector(); | 2269 return list.ToVector(); |
| 2270 } | 2270 } |
| 2271 | 2271 |
| 2272 #ifdef DEBUG |
| 2273 |
| 2274 class HeapPointerZapper : public RootVisitor { |
| 2275 public: |
| 2276 explicit HeapPointerZapper(Isolate* isolate) |
| 2277 : isolate_(isolate), |
| 2278 heap_(isolate->heap()), |
| 2279 next_relocatable_(isolate->relocatable_top()), |
| 2280 next_trycatch_(isolate->try_catch_handler()) {} |
| 2281 |
| 2282 // Stack layout (in case there is an exit frame): |
| 2283 // - entry frame's FP <-- frame->fp() |
| 2284 // - saved C++ PC |
| 2285 // - last C++ stack slot <-- cpp_start |
| 2286 // [C++ stuff...] |
| 2287 // - saved FP <-- cpp_limit (exclusive) |
| 2288 // - saved CEntry PC |
| 2289 // - exit frame's SP <-- caller_sp |
| 2290 // [exit frame's contents...] |
| 2291 // - exit frame's FP <-- caller_fp |
| 2292 |
| 2293 void Zap(ThreadLocalTop* t) { |
| 2294 // Part One: the current bunch of C++ frames on top of the stack. |
| 2295 ZapRange(reinterpret_cast<Address>(GetCurrentStackPosition()), |
| 2296 isolate_->c_entry_fp(t)); |
| 2297 |
| 2298 // Part Two: any nested C++ stack sections buried under some JS. |
| 2299 for (StackFrameIterator it(isolate_, t); !it.done(); it.Advance()) { |
| 2300 if (it.frame()->type() != StackFrame::ENTRY) continue; |
| 2301 |
| 2302 EntryFrame* frame = reinterpret_cast<EntryFrame*>(it.frame()); |
| 2303 // Add 2 * kPointerSize for next FP and caller's PC. |
| 2304 Address cpp_start = frame->fp() + 2 * kPointerSize; |
| 2305 const int offset = EntryFrameConstants::kCallerFPOffset; |
| 2306 Address caller_fp = Memory::Address_at(frame->fp() + offset); |
| 2307 ZapRange(cpp_start, caller_fp); |
| 2308 } |
| 2309 } |
| 2310 |
| 2311 // RootVisitor interface. |
| 2312 void VisitRootPointers(Root root, Object** start, Object** end) override { |
| 2313 // Support for nested constellations of Relocatables and their skipped |
| 2314 // ranges is currently unimplemented. |
| 2315 DCHECK_NULL(next_relocatable_start_); |
| 2316 // Remember that the range [start, end) should be skipped. |
| 2317 next_relocatable_start_ = start; |
| 2318 next_relocatable_end_ = end; |
| 2319 } |
| 2320 |
| 2321 private: |
| 2322 void ZapRange(Address start, Address maybe_exit_frame_fp) { |
| 2323 Address limit; |
| 2324 if (maybe_exit_frame_fp == nullptr) { |
| 2325 // Walk until the end of the stack, which has been saved by |
| 2326 // StackGuard::ThreadLocal::Initialize(). |
| 2327 limit = reinterpret_cast<Address>( |
| 2328 isolate_->stack_guard()->stack_base_position()); |
| 2329 } else { |
| 2330 // If there's a previous JavaScript exit frame, stop iterating at its top. |
| 2331 Address exit_frame_sp = |
| 2332 ExitFrame::ComputeStackPointer(maybe_exit_frame_fp); |
| 2333 // Subtract 2 * kPointerSize for CEntry PC and next FP. |
| 2334 limit = exit_frame_sp - 2 * kPointerSize; |
| 2335 } |
| 2336 |
| 2337 Object** start_obj = reinterpret_cast<Object**>(start); |
| 2338 Object** limit_obj = reinterpret_cast<Object**>(limit); |
| 2339 for (cursor_ = start_obj; cursor_ < limit_obj; ++cursor_) { |
| 2340 // Check if we've found a Relocatable (a GC safe on-stack object that |
| 2341 // must not be zapped). |
| 2342 if (reinterpret_cast<intptr_t>(cursor_) == |
| 2343 reinterpret_cast<intptr_t>(next_relocatable_)) { |
| 2344 // FlatStringReader restores its fields in PostGarbageCollection() |
| 2345 // (after we may have zapped them). |
| 2346 if (prev_relocatable_ != nullptr) { |
| 2347 prev_relocatable_->PostGarbageCollection(); |
| 2348 } |
| 2349 // CustomArgumentsBase can inform us about its skippable fields |
| 2350 // via IterateInstance(). |
| 2351 next_relocatable_->IterateInstance(this); |
| 2352 prev_relocatable_ = next_relocatable_; |
| 2353 next_relocatable_ = next_relocatable_->prev_; |
| 2354 continue; |
| 2355 } |
| 2356 |
| 2357 // Check if we're running into a Relocatable's skippable range. |
| 2358 if (cursor_ == next_relocatable_start_) { |
| 2359 cursor_ = next_relocatable_end_; |
| 2360 next_relocatable_start_ = nullptr; // Mark as consumed. |
| 2361 } |
| 2362 |
| 2363 // Check if we've found a v8::TryCatch. |
| 2364 if (reinterpret_cast<intptr_t>(cursor_) == |
| 2365 reinterpret_cast<intptr_t>(next_trycatch_)) { |
| 2366 cursor_ += (sizeof(v8::TryCatch) / kPointerSize) - 1; |
| 2367 next_trycatch_ = Isolate::next_try_catch(next_trycatch_); |
| 2368 continue; |
| 2369 } |
| 2370 |
| 2371 if (!(*cursor_)->IsHeapObject()) continue; |
| 2372 HeapObject* obj = HeapObject::cast(*cursor_); |
| 2373 // If something has a heap pointer tag but doesn't point into the heap, |
| 2374 // ignore it. (We'll crash anyway if we try to use it later.) |
| 2375 if (!heap_->ContainsSlow(obj->address())) continue; |
| 2376 // TODO(jkummerow): Maps don't move, but they can die. |
| 2377 // Should we be more strict here? |
| 2378 // The difficulty is the "Allocate(map, allocation_space)" pattern. |
| 2379 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 2380 if (chunk->NeverEvacuate()) continue; |
| 2381 // Keeping raw pointers to immortal immovables is fine. |
| 2382 if (SlowIsImmortalImmovable(obj)) continue; |
| 2383 // PrintF("Found heap pointer: *%p = %p, zapping it!\n", cursor_, obj); |
| 2384 |
| 2385 // Local variables of type bool present a difficulty. When they get |
| 2386 // allocated over a garbage heap pointer, they tend to change it so |
| 2387 // little that it still looks like a heap pointer; but if we zap it |
| 2388 // by overwriting it with kZapValue, that makes such bools invalid. |
| 2389 // To avoid this, zap the values in such a way that the values of any |
| 2390 // bytes used for bools are preserved. |
| 2391 #if V8_HOST_ARCH_64_BIT |
| 2392 const uintptr_t kMask = 0x0101010101010101ul; |
| 2393 #else |
| 2394 const uintptr_t kMask = 0x01010101u; |
| 2395 #endif |
| 2396 *(reinterpret_cast<uintptr_t*>(cursor_)) &= kMask; |
| 2397 } |
| 2398 if (prev_relocatable_ != nullptr) { |
| 2399 prev_relocatable_->PostGarbageCollection(); |
| 2400 } |
| 2401 } |
| 2402 |
| 2403 bool SlowIsImmortalImmovable(HeapObject* obj) { |
| 2404 // Fast paths. |
| 2405 if (obj->IsUndefined(isolate_)) return true; |
| 2406 // Slow lookup. |
| 2407 Object** roots = heap_->roots_array_start(); |
| 2408 for (int index = 0; index < Heap::kRootListLength; index++) { |
| 2409 if (obj == roots[index]) { |
| 2410 return Heap::RootIsImmortalImmovable(index); |
| 2411 } |
| 2412 } |
| 2413 return false; |
| 2414 } |
| 2415 |
| 2416 Isolate* isolate_; |
| 2417 Heap* heap_; |
| 2418 Relocatable* next_relocatable_; |
| 2419 Relocatable* prev_relocatable_ = nullptr; |
| 2420 Object** cursor_ = nullptr; |
| 2421 Object** next_relocatable_start_ = nullptr; |
| 2422 Object** next_relocatable_end_ = nullptr; |
| 2423 v8::TryCatch* next_trycatch_; |
| 2424 }; |
| 2425 |
| 2426 void ZapHeapPointersInCppFrames(Isolate* isolate, ThreadLocalTop* t) { |
| 2427 DCHECK(FLAG_zap_cpp_pointers); |
| 2428 if (t == NULL) t = isolate->thread_local_top(); |
| 2429 HeapPointerZapper zapper(isolate); |
| 2430 zapper.Zap(t); |
| 2431 } |
| 2432 |
| 2433 #endif |
| 2272 | 2434 |
| 2273 } // namespace internal | 2435 } // namespace internal |
| 2274 } // namespace v8 | 2436 } // namespace v8 |
| OLD | NEW |