OLD | NEW |
---|---|
1 // Copyright 2017 the V8 project authors. All rights reserved. | 1 // Copyright 2017 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/debug/debug-coverage.h" | 5 #include "src/debug/debug-coverage.h" |
6 | 6 |
7 #include "src/base/hashmap.h" | 7 #include "src/base/hashmap.h" |
8 #include "src/deoptimizer.h" | 8 #include "src/deoptimizer.h" |
9 #include "src/frames-inl.h" | 9 #include "src/frames-inl.h" |
10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
52 } | 52 } |
53 | 53 |
54 bool CompareSharedFunctionInfo(SharedFunctionInfo* a, SharedFunctionInfo* b) { | 54 bool CompareSharedFunctionInfo(SharedFunctionInfo* a, SharedFunctionInfo* b) { |
55 int a_start = StartPosition(a); | 55 int a_start = StartPosition(a); |
56 int b_start = StartPosition(b); | 56 int b_start = StartPosition(b); |
57 if (a_start == b_start) return a->end_position() > b->end_position(); | 57 if (a_start == b_start) return a->end_position() > b->end_position(); |
58 return a_start < b_start; | 58 return a_start < b_start; |
59 } | 59 } |
60 } // anonymous namespace | 60 } // anonymous namespace |
61 | 61 |
62 Coverage* Coverage::CollectPrecise(Isolate* isolate) { | |
63 DCHECK(!isolate->is_best_effort_code_coverage()); | |
64 Coverage* result = Collect(isolate, true); | |
65 if (isolate->is_precise_binary_code_coverage()) { | |
66 // We do not have to hold onto feedback vectors for invocations we already | |
67 // reported. So we can reset the list. | |
68 isolate->SetCodeCoverageList(*ArrayList::New(isolate, 0)); | |
69 } | |
70 return result; | |
71 } | |
72 | |
73 Coverage* Coverage::CollectBestEffort(Isolate* isolate) { | |
74 return Collect(isolate, false); | |
75 } | |
76 | |
62 Coverage* Coverage::Collect(Isolate* isolate, bool reset_count) { | 77 Coverage* Coverage::Collect(Isolate* isolate, bool reset_count) { |
63 SharedToCounterMap counter_map; | 78 SharedToCounterMap counter_map; |
64 | 79 |
65 // Feed invocation count into the counter map. | 80 if (isolate->factory()->code_coverage_list()->IsArrayList()) { |
66 switch (isolate->code_coverage_mode()) { | 81 DCHECK(!isolate->is_best_effort_code_coverage()); |
67 case debug::Coverage::kPreciseCount: { | 82 Handle<ArrayList> list = |
68 // Feedback vectors are already listed to prevent losing them to GC. | 83 Handle<ArrayList>::cast(isolate->factory()->code_coverage_list()); |
jgruber
2017/03/21 16:54:34
Nit: These comments were pretty useful. I also pre
| |
69 Handle<ArrayList> list = | 84 for (int i = 0; i < list->Length(); i++) { |
70 Handle<ArrayList>::cast(isolate->factory()->code_coverage_list()); | 85 FeedbackVector* vector = FeedbackVector::cast(list->Get(i)); |
71 for (int i = 0; i < list->Length(); i++) { | 86 SharedFunctionInfo* shared = vector->shared_function_info(); |
72 FeedbackVector* vector = FeedbackVector::cast(list->Get(i)); | 87 DCHECK(shared->IsSubjectToDebugging()); |
73 SharedFunctionInfo* shared = vector->shared_function_info(); | 88 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); |
74 DCHECK(shared->IsSubjectToDebugging()); | 89 if (reset_count) vector->clear_invocation_count(); |
jgruber
2017/03/21 16:54:34
If counts are reset iff collecting precise coverag
Yang
2017/03/22 06:46:00
One use case I expect is that precise coverage has
| |
75 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); | 90 counter_map.Add(shared, count); |
76 if (reset_count) vector->clear_invocation_count(); | |
77 counter_map.Add(shared, count); | |
78 } | |
79 break; | |
80 } | 91 } |
81 case debug::Coverage::kBestEffort: { | 92 } else { |
82 // Iterate the heap to find all feedback vectors and accumulate the | 93 DCHECK(isolate->is_best_effort_code_coverage()); |
83 // invocation counts into the map for each shared function info. | 94 HeapIterator heap_iterator(isolate->heap()); |
84 HeapIterator heap_iterator(isolate->heap()); | 95 while (HeapObject* current_obj = heap_iterator.next()) { |
85 while (HeapObject* current_obj = heap_iterator.next()) { | 96 if (!current_obj->IsFeedbackVector()) continue; |
86 if (!current_obj->IsFeedbackVector()) continue; | 97 FeedbackVector* vector = FeedbackVector::cast(current_obj); |
87 FeedbackVector* vector = FeedbackVector::cast(current_obj); | 98 SharedFunctionInfo* shared = vector->shared_function_info(); |
88 SharedFunctionInfo* shared = vector->shared_function_info(); | 99 if (!shared->IsSubjectToDebugging()) continue; |
89 if (!shared->IsSubjectToDebugging()) continue; | 100 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); |
90 uint32_t count = static_cast<uint32_t>(vector->invocation_count()); | 101 if (reset_count) vector->clear_invocation_count(); |
91 if (reset_count) vector->clear_invocation_count(); | 102 counter_map.Add(shared, count); |
92 counter_map.Add(shared, count); | |
93 } | |
94 break; | |
95 } | 103 } |
96 } | 104 } |
97 | 105 |
98 // Iterate shared function infos of every script and build a mapping | 106 // Iterate shared function infos of every script and build a mapping |
99 // between source ranges and invocation counts. | 107 // between source ranges and invocation counts. |
100 Coverage* result = new Coverage(); | 108 Coverage* result = new Coverage(); |
101 Script::Iterator scripts(isolate); | 109 Script::Iterator scripts(isolate); |
102 while (Script* script = scripts.Next()) { | 110 while (Script* script = scripts.Next()) { |
103 if (!script->IsUserJavaScript()) continue; | 111 if (!script->IsUserJavaScript()) continue; |
104 | 112 |
(...skipping 11 matching lines...) Expand all Loading... | |
116 sorted.push_back(info); | 124 sorted.push_back(info); |
117 } | 125 } |
118 std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo); | 126 std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo); |
119 } | 127 } |
120 | 128 |
121 // Use sorted list to reconstruct function nesting. | 129 // Use sorted list to reconstruct function nesting. |
122 for (SharedFunctionInfo* info : sorted) { | 130 for (SharedFunctionInfo* info : sorted) { |
123 int start = StartPosition(info); | 131 int start = StartPosition(info); |
124 int end = info->end_position(); | 132 int end = info->end_position(); |
125 uint32_t count = counter_map.Get(info); | 133 uint32_t count = counter_map.Get(info); |
134 if (isolate->is_precise_binary_code_coverage() && count > 0) { | |
135 count = info->has_reported_binary_coverage() ? 0 : 1; | |
136 info->set_has_reported_binary_coverage(true); | |
137 } | |
126 Handle<String> name(info->DebugName(), isolate); | 138 Handle<String> name(info->DebugName(), isolate); |
127 functions->emplace_back(start, end, count, name); | 139 functions->emplace_back(start, end, count, name); |
128 } | 140 } |
129 } | 141 } |
130 return result; | 142 return result; |
131 } | 143 } |
132 | 144 |
133 void Coverage::SelectMode(Isolate* isolate, debug::Coverage::Mode mode) { | 145 void Coverage::SelectMode(Isolate* isolate, debug::Coverage::Mode mode) { |
134 switch (mode) { | 146 switch (mode) { |
135 case debug::Coverage::kBestEffort: | 147 case debug::Coverage::kBestEffort: |
136 isolate->SetCodeCoverageList(isolate->heap()->undefined_value()); | 148 isolate->SetCodeCoverageList(isolate->heap()->undefined_value()); |
137 break; | 149 break; |
150 case debug::Coverage::kPreciseBinary: | |
138 case debug::Coverage::kPreciseCount: { | 151 case debug::Coverage::kPreciseCount: { |
139 HandleScope scope(isolate); | 152 HandleScope scope(isolate); |
140 // Remove all optimized function. Optimized and inlined functions do not | 153 // Remove all optimized function. Optimized and inlined functions do not |
141 // increment invocation count. | 154 // increment invocation count. |
142 Deoptimizer::DeoptimizeAll(isolate); | 155 Deoptimizer::DeoptimizeAll(isolate); |
143 // Collect existing feedback vectors. | 156 // Collect existing feedback vectors. |
144 std::vector<Handle<FeedbackVector>> vectors; | 157 std::vector<Handle<FeedbackVector>> vectors; |
145 { | 158 { |
146 HeapIterator heap_iterator(isolate->heap()); | 159 HeapIterator heap_iterator(isolate->heap()); |
147 while (HeapObject* current_obj = heap_iterator.next()) { | 160 while (HeapObject* current_obj = heap_iterator.next()) { |
148 if (!current_obj->IsFeedbackVector()) continue; | 161 if (current_obj->IsSharedFunctionInfo()) { |
149 FeedbackVector* vector = FeedbackVector::cast(current_obj); | 162 SharedFunctionInfo* shared = SharedFunctionInfo::cast(current_obj); |
150 SharedFunctionInfo* shared = vector->shared_function_info(); | 163 shared->set_has_reported_binary_coverage(false); |
151 if (!shared->IsSubjectToDebugging()) continue; | 164 } else if (current_obj->IsFeedbackVector()) { |
152 vectors.emplace_back(vector, isolate); | 165 FeedbackVector* vector = FeedbackVector::cast(current_obj); |
166 SharedFunctionInfo* shared = vector->shared_function_info(); | |
167 if (!shared->IsSubjectToDebugging()) continue; | |
168 vectors.emplace_back(vector, isolate); | |
169 } | |
153 } | 170 } |
154 } | 171 } |
155 // Add collected feedback vectors to the root list lest we lose them to | 172 // Add collected feedback vectors to the root list lest we lose them to |
156 // GC. | 173 // GC. |
157 Handle<ArrayList> list = | 174 Handle<ArrayList> list = |
158 ArrayList::New(isolate, static_cast<int>(vectors.size())); | 175 ArrayList::New(isolate, static_cast<int>(vectors.size())); |
159 for (const auto& vector : vectors) list = ArrayList::Add(list, vector); | 176 for (const auto& vector : vectors) list = ArrayList::Add(list, vector); |
160 isolate->SetCodeCoverageList(*list); | 177 isolate->SetCodeCoverageList(*list); |
161 break; | 178 break; |
162 } | 179 } |
163 } | 180 } |
164 isolate->set_code_coverage_mode(mode); | 181 isolate->set_code_coverage_mode(mode); |
165 } | 182 } |
166 | 183 |
167 } // namespace internal | 184 } // namespace internal |
168 } // namespace v8 | 185 } // namespace v8 |
OLD | NEW |