Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2017 the V8 project authors. All rights reserved. | 1 // Copyright 2017 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/debug/debug-coverage.h" | 5 #include "src/debug/debug-coverage.h" |
| 6 | 6 |
| 7 #include "src/base/hashmap.h" | 7 #include "src/base/hashmap.h" |
| 8 #include "src/deoptimizer.h" | |
| 9 #include "src/isolate.h" | |
| 8 #include "src/objects-inl.h" | 10 #include "src/objects-inl.h" |
| 9 #include "src/objects.h" | 11 #include "src/objects.h" |
| 10 | 12 |
| 11 namespace v8 { | 13 namespace v8 { |
| 12 namespace internal { | 14 namespace internal { |
| 13 | 15 |
| 14 class SharedToCounterMap | 16 class SharedToCounterMap |
| 15 : public base::TemplateHashMapImpl<SharedFunctionInfo*, uint32_t, | 17 : public base::TemplateHashMapImpl<SharedFunctionInfo*, uint32_t, |
| 16 base::KeyEqualityMatcher<void*>, | 18 base::KeyEqualityMatcher<void*>, |
| 17 base::DefaultAllocationPolicy> { | 19 base::DefaultAllocationPolicy> { |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 54 } | 56 } |
| 55 } | 57 } |
| 56 std::vector<Coverage::RangeEntry> Finish() { return std::move(entries_); } | 58 std::vector<Coverage::RangeEntry> Finish() { return std::move(entries_); } |
| 57 | 59 |
| 58 private: | 60 private: |
| 59 std::vector<Coverage::RangeEntry> entries_; | 61 std::vector<Coverage::RangeEntry> entries_; |
| 60 }; | 62 }; |
| 61 | 63 |
| 62 std::vector<Coverage::ScriptData> Coverage::Collect(Isolate* isolate) { | 64 std::vector<Coverage::ScriptData> Coverage::Collect(Isolate* isolate) { |
| 63 SharedToCounterMap counter_map; | 65 SharedToCounterMap counter_map; |
| 64 // Iterate the heap to find all feedback vectors and accumulate the | 66 |
| 65 // invocation counts into the map for each shared function info. | 67 if (isolate->IsCodeCoverageEnabled()) { |
| 66 HeapIterator heap_iterator(isolate->heap()); | 68 // Feedback vectors are already listed to prevent losing them to GC. |
| 67 HeapObject* current_obj; | 69 Handle<ArrayList> list = |
| 68 while ((current_obj = heap_iterator.next())) { | 70 Handle<ArrayList>::cast(isolate->factory()->code_coverage_list()); |
| 69 if (!current_obj->IsFeedbackVector()) continue; | 71 for (int i = 0; i < list->Length(); i++) { |
| 70 FeedbackVector* vector = FeedbackVector::cast(current_obj); | 72 FeedbackVector* vector = FeedbackVector::cast(list->Get(i)); |
| 71 SharedFunctionInfo* shared = vector->shared_function_info(); | 73 SharedFunctionInfo* shared = vector->shared_function_info(); |
| 72 if (!shared->IsSubjectToDebugging()) continue; | 74 DCHECK(shared->IsSubjectToDebugging()); |
| 73 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); | 75 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); |
| 74 counter_map.Add(shared, count); | 76 counter_map.Add(shared, count); |
| 77 } | |
| 78 } else { | |
| 79 // Iterate the heap to find all feedback vectors and accumulate the | |
| 80 // invocation counts into the map for each shared function info. | |
| 81 HeapIterator heap_iterator(isolate->heap()); | |
| 82 // Initializing the heap iterator might have triggered a GC, which | |
| 83 // invalidates entries in the counter_map. | |
| 84 DCHECK_EQ(0, counter_map.occupancy()); | |
| 85 while (HeapObject* current_obj = heap_iterator.next()) { | |
| 86 if (!current_obj->IsFeedbackVector()) continue; | |
| 87 FeedbackVector* vector = FeedbackVector::cast(current_obj); | |
| 88 SharedFunctionInfo* shared = vector->shared_function_info(); | |
| 89 if (!shared->IsSubjectToDebugging()) continue; | |
| 90 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); | |
| 91 counter_map.Add(shared, count); | |
| 92 } | |
| 75 } | 93 } |
| 76 | 94 |
| 77 // Make sure entries in the counter map is not invalidated by GC. | 95 // Make sure entries in the counter map is not invalidated by GC. |
| 78 DisallowHeapAllocation no_gc; | 96 DisallowHeapAllocation no_gc; |
| 79 | 97 |
| 80 // Stack to track nested functions. | 98 // Stack to track nested functions. |
| 81 struct FunctionNode { | 99 struct FunctionNode { |
| 82 int start; | 100 int start; |
| 83 int end; | 101 int end; |
| 84 uint32_t count; | 102 uint32_t count; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 126 while (!stack.empty()) { | 144 while (!stack.empty()) { |
| 127 // Write out rest of function being dropped. | 145 // Write out rest of function being dropped. |
| 128 builder.Add(stack.back().end, stack.back().count); | 146 builder.Add(stack.back().end, stack.back().count); |
| 129 stack.pop_back(); | 147 stack.pop_back(); |
| 130 } | 148 } |
| 131 result.push_back({script->id(), builder.Finish()}); | 149 result.push_back({script->id(), builder.Finish()}); |
| 132 } | 150 } |
| 133 return result; | 151 return result; |
| 134 } | 152 } |
| 135 | 153 |
| 154 void Coverage::EnablePrecise(Isolate* isolate) { | |
| 155 HandleScope scope(isolate); | |
| 156 // Remove all optimized function. Optimized and inlined functions do not | |
| 157 // increment invocation count. | |
| 158 Deoptimizer::DeoptimizeAll(isolate); | |
| 159 // Collect existing feedback vectors. | |
| 160 std::vector<Handle<FeedbackVector>> vectors; | |
| 161 { | |
| 162 HeapIterator heap_iterator(isolate->heap()); | |
| 163 while (HeapObject* current_obj = heap_iterator.next()) { | |
| 164 if (!current_obj->IsFeedbackVector()) continue; | |
| 165 FeedbackVector* vector = FeedbackVector::cast(current_obj); | |
| 166 SharedFunctionInfo* shared = vector->shared_function_info(); | |
| 167 if (!shared->IsSubjectToDebugging()) continue; | |
| 168 vector->clear_invocation_count(); | |
| 169 vectors.push_back(Handle<FeedbackVector>(vector, isolate)); | |
|
jgruber
2017/02/09 16:08:47
Nit: emplace_back
Yang
2017/02/09 17:42:15
Done.
| |
| 170 } | |
| 171 } | |
| 172 // Add collected feedback vectors to the root list lest we lose them to GC. | |
| 173 Handle<ArrayList> list = | |
| 174 Handle<ArrayList>::cast(isolate->factory()->empty_fixed_array()); | |
|
jgruber
2017/02/09 16:08:48
We could preallocate a fixed array of the right si
Yang
2017/02/09 17:42:15
Done.
| |
| 175 for (const auto& vector : vectors) list = ArrayList::Add(list, vector); | |
| 176 isolate->SetCodeCoverageList(*list); | |
| 177 } | |
| 178 | |
| 179 void Coverage::DisablePrecise(Isolate* isolate) { | |
| 180 isolate->SetCodeCoverageList(isolate->heap()->undefined_value()); | |
| 181 } | |
| 182 | |
| 136 } // namespace internal | 183 } // namespace internal |
| 137 } // namespace v8 | 184 } // namespace v8 |
| OLD | NEW |