| Index: src/frames.cc
|
| diff --git a/src/frames.cc b/src/frames.cc
|
| index 90610fafb67500e67ce868e13e21276212831ad8..f5b9e8551c2625a9cda21298aeebbe0e7f941b70 100644
|
| --- a/src/frames.cc
|
| +++ b/src/frames.cc
|
| @@ -2269,6 +2269,168 @@ Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
|
| return list.ToVector();
|
| }
|
|
|
| +#ifdef DEBUG
|
| +
|
| +class HeapPointerZapper : public RootVisitor {
|
| + public:
|
| + explicit HeapPointerZapper(Isolate* isolate)
|
| + : isolate_(isolate),
|
| + heap_(isolate->heap()),
|
| + next_relocatable_(isolate->relocatable_top()),
|
| + next_trycatch_(isolate->try_catch_handler()) {}
|
| +
|
| + // Stack layout (in case there is an exit frame):
|
| + // - entry frame's FP <-- frame->fp()
|
| + // - saved C++ PC
|
| + // - last C++ stack slot <-- cpp_start
|
| + // [C++ stuff...]
|
| + // - saved FP <-- cpp_limit (exclusive)
|
| + // - saved CEntry PC
|
| + // - exit frame's SP <-- caller_sp
|
| + // [exit frame's contents...]
|
| + // - exit frame's FP <-- caller_fp
|
| +
|
| + void Zap(ThreadLocalTop* t) {
|
| + // Part One: the current bunch of C++ frames on top of the stack.
|
| + ZapRange(reinterpret_cast<Address>(GetCurrentStackPosition()),
|
| + isolate_->c_entry_fp(t));
|
| +
|
| + // Part Two: any nested C++ stack sections buried under some JS.
|
| + for (StackFrameIterator it(isolate_, t); !it.done(); it.Advance()) {
|
| + if (it.frame()->type() != StackFrame::ENTRY) continue;
|
| +
|
| + EntryFrame* frame = reinterpret_cast<EntryFrame*>(it.frame());
|
| + // Add 2 * kPointerSize for next FP and caller's PC.
|
| + Address cpp_start = frame->fp() + 2 * kPointerSize;
|
| + const int offset = EntryFrameConstants::kCallerFPOffset;
|
| + Address caller_fp = Memory::Address_at(frame->fp() + offset);
|
| + ZapRange(cpp_start, caller_fp);
|
| + }
|
| + }
|
| +
|
| + // RootVisitor interface.
|
| + void VisitRootPointers(Root root, Object** start, Object** end) override {
|
| + // Support for nested constellations of Relocatables and their skipped
|
| + // ranges is currently unimplemented.
|
| + DCHECK_NULL(next_relocatable_start_);
|
| + // Remember that the range [start, end) should be skipped.
|
| + next_relocatable_start_ = start;
|
| + next_relocatable_end_ = end;
|
| + }
|
| +
|
| + private:
|
| + void ZapRange(Address start, Address maybe_exit_frame_fp) {
|
| + Address limit;
|
| + if (maybe_exit_frame_fp == nullptr) {
|
| + // Walk until the end of the stack, which has been saved by
|
| + // StackGuard::ThreadLocal::Initialize().
|
| + limit = reinterpret_cast<Address>(
|
| + isolate_->stack_guard()->stack_base_position());
|
| + } else {
|
| + // If there's a previous JavaScript exit frame, stop iterating at its top.
|
| + Address exit_frame_sp =
|
| + ExitFrame::ComputeStackPointer(maybe_exit_frame_fp);
|
| + // Subtract 2 * kPointerSize for CEntry PC and next FP.
|
| + limit = exit_frame_sp - 2 * kPointerSize;
|
| + }
|
| +
|
| + Object** start_obj = reinterpret_cast<Object**>(start);
|
| + Object** limit_obj = reinterpret_cast<Object**>(limit);
|
| + for (cursor_ = start_obj; cursor_ < limit_obj; ++cursor_) {
|
| + // Check if we've found a Relocatable (a GC safe on-stack object that
|
| + // must not be zapped).
|
| + if (reinterpret_cast<intptr_t>(cursor_) ==
|
| + reinterpret_cast<intptr_t>(next_relocatable_)) {
|
| + // FlatStringReader restores its fields in PostGarbageCollection()
|
| + // (after we may have zapped them).
|
| + if (prev_relocatable_ != nullptr) {
|
| + prev_relocatable_->PostGarbageCollection();
|
| + }
|
| + // CustomArgumentsBase can inform us about its skippable fields
|
| + // via IterateInstance().
|
| + next_relocatable_->IterateInstance(this);
|
| + prev_relocatable_ = next_relocatable_;
|
| + next_relocatable_ = next_relocatable_->prev_;
|
| + continue;
|
| + }
|
| +
|
| + // Check if we're running into a Relocatable's skippable range.
|
| + if (cursor_ == next_relocatable_start_) {
|
| + cursor_ = next_relocatable_end_;
|
| + next_relocatable_start_ = nullptr; // Mark as consumed.
|
| + }
|
| +
|
| + // Check if we've found a v8::TryCatch.
|
| + if (reinterpret_cast<intptr_t>(cursor_) ==
|
| + reinterpret_cast<intptr_t>(next_trycatch_)) {
|
| + cursor_ += (sizeof(v8::TryCatch) / kPointerSize) - 1;
|
| + next_trycatch_ = Isolate::next_try_catch(next_trycatch_);
|
| + continue;
|
| + }
|
| +
|
| + if (!(*cursor_)->IsHeapObject()) continue;
|
| + HeapObject* obj = HeapObject::cast(*cursor_);
|
| + // If something has a heap pointer tag but doesn't point into the heap,
|
| + // ignore it. (We'll crash anyway if we try to use it later.)
|
| + if (!heap_->ContainsSlow(obj->address())) continue;
|
| + // TODO(jkummerow): Maps don't move, but they can die.
|
| + // Should we be more strict here?
|
| + // The difficulty is the "Allocate(map, allocation_space)" pattern.
|
| + MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
| + if (chunk->NeverEvacuate()) continue;
|
| + // Keeping raw pointers to immortal immovables is fine.
|
| + if (SlowIsImmortalImmovable(obj)) continue;
|
| + // PrintF("Found heap pointer: *%p = %p, zapping it!\n", cursor_, obj);
|
| +
|
| + // Local variables of type bool present a difficulty. When they get
|
| + // allocated over a garbage heap pointer, they tend to change it so
|
| + // little that it still looks like a heap pointer; but if we zap it
|
| + // by overwriting it with kZapValue, that makes such bools invalid.
|
| + // To avoid this, zap the values in such a way that the values of any
|
| + // bytes used for bools are preserved.
|
| +#if V8_HOST_ARCH_64_BIT
|
| + const uintptr_t kMask = 0x0101010101010101ul;
|
| +#else
|
| + const uintptr_t kMask = 0x01010101u;
|
| +#endif
|
| + *(reinterpret_cast<uintptr_t*>(cursor_)) &= kMask;
|
| + }
|
| + if (prev_relocatable_ != nullptr) {
|
| + prev_relocatable_->PostGarbageCollection();
|
| + }
|
| + }
|
| +
|
| + bool SlowIsImmortalImmovable(HeapObject* obj) {
|
| + // Fast paths.
|
| + if (obj->IsUndefined(isolate_)) return true;
|
| + // Slow lookup.
|
| + Object** roots = heap_->roots_array_start();
|
| + for (int index = 0; index < Heap::kRootListLength; index++) {
|
| + if (obj == roots[index]) {
|
| + return Heap::RootIsImmortalImmovable(index);
|
| + }
|
| + }
|
| + return false;
|
| + }
|
| +
|
| + Isolate* isolate_;
|
| + Heap* heap_;
|
| + Relocatable* next_relocatable_;
|
| + Relocatable* prev_relocatable_ = nullptr;
|
| + Object** cursor_ = nullptr;
|
| + Object** next_relocatable_start_ = nullptr;
|
| + Object** next_relocatable_end_ = nullptr;
|
| + v8::TryCatch* next_trycatch_;
|
| +};
|
| +
|
| +void ZapHeapPointersInCppFrames(Isolate* isolate, ThreadLocalTop* t) {
|
| + DCHECK(FLAG_zap_cpp_pointers);
|
| + if (t == NULL) t = isolate->thread_local_top();
|
| + HeapPointerZapper zapper(isolate);
|
| + zapper.Zap(t);
|
| +}
|
| +
|
| +#endif
|
|
|
| } // namespace internal
|
| } // namespace v8
|
|
|