OLD | NEW |
1 // Copyright 2009-2010 the V8 project authors. All rights reserved. | 1 // Copyright 2009-2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 10 matching lines...) Expand all Loading... |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #include "heap-profiler.h" | 30 #include "heap-profiler.h" |
31 #include "frames-inl.h" | |
32 #include "global-handles.h" | |
33 #include "profile-generator.h" | 31 #include "profile-generator.h" |
34 #include "string-stream.h" | |
35 | 32 |
36 namespace v8 { | 33 namespace v8 { |
37 namespace internal { | 34 namespace internal { |
38 | 35 |
39 | 36 |
40 #ifdef ENABLE_LOGGING_AND_PROFILING | 37 #ifdef ENABLE_LOGGING_AND_PROFILING |
41 namespace { | |
42 | |
43 // Clusterizer is a set of helper functions for converting | |
44 // object references into clusters. | |
45 class Clusterizer : public AllStatic { | |
46 public: | |
47 static JSObjectsCluster Clusterize(HeapObject* obj) { | |
48 return Clusterize(obj, true); | |
49 } | |
50 static void InsertIntoTree(JSObjectsClusterTree* tree, | |
51 HeapObject* obj, bool fine_grain); | |
52 static void InsertReferenceIntoTree(JSObjectsClusterTree* tree, | |
53 const JSObjectsCluster& cluster) { | |
54 InsertIntoTree(tree, cluster, 0); | |
55 } | |
56 | |
57 private: | |
58 static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain); | |
59 static int CalculateNetworkSize(JSObject* obj); | |
60 static int GetObjectSize(HeapObject* obj) { | |
61 return obj->IsJSObject() ? | |
62 CalculateNetworkSize(JSObject::cast(obj)) : obj->Size(); | |
63 } | |
64 static void InsertIntoTree(JSObjectsClusterTree* tree, | |
65 const JSObjectsCluster& cluster, int size); | |
66 }; | |
67 | |
68 | |
69 JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) { | |
70 if (obj->IsJSObject()) { | |
71 JSObject* js_obj = JSObject::cast(obj); | |
72 String* constructor = GetConstructorNameForHeapProfile( | |
73 JSObject::cast(js_obj)); | |
74 // Differentiate Object and Array instances. | |
75 if (fine_grain && (constructor == HEAP->Object_symbol() || | |
76 constructor == HEAP->Array_symbol())) { | |
77 return JSObjectsCluster(constructor, obj); | |
78 } else { | |
79 return JSObjectsCluster(constructor); | |
80 } | |
81 } else if (obj->IsString()) { | |
82 return JSObjectsCluster(HEAP->String_symbol()); | |
83 } else if (obj->IsJSGlobalPropertyCell()) { | |
84 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY); | |
85 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) { | |
86 return JSObjectsCluster(JSObjectsCluster::CODE); | |
87 } | |
88 return JSObjectsCluster(); | |
89 } | |
90 | |
91 | |
92 void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree, | |
93 HeapObject* obj, bool fine_grain) { | |
94 JSObjectsCluster cluster = Clusterize(obj, fine_grain); | |
95 if (cluster.is_null()) return; | |
96 InsertIntoTree(tree, cluster, GetObjectSize(obj)); | |
97 } | |
98 | |
99 | |
100 void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree, | |
101 const JSObjectsCluster& cluster, int size) { | |
102 JSObjectsClusterTree::Locator loc; | |
103 tree->Insert(cluster, &loc); | |
104 NumberAndSizeInfo number_and_size = loc.value(); | |
105 number_and_size.increment_number(1); | |
106 number_and_size.increment_bytes(size); | |
107 loc.set_value(number_and_size); | |
108 } | |
109 | |
110 | |
111 int Clusterizer::CalculateNetworkSize(JSObject* obj) { | |
112 int size = obj->Size(); | |
113 // If 'properties' and 'elements' are non-empty (thus, non-shared), | |
114 // take their size into account. | |
115 if (obj->properties() != HEAP->empty_fixed_array()) { | |
116 size += obj->properties()->Size(); | |
117 } | |
118 if (obj->elements() != HEAP->empty_fixed_array()) { | |
119 size += obj->elements()->Size(); | |
120 } | |
121 // For functions, also account non-empty context and literals sizes. | |
122 if (obj->IsJSFunction()) { | |
123 JSFunction* f = JSFunction::cast(obj); | |
124 if (f->unchecked_context()->IsContext()) { | |
125 size += f->context()->Size(); | |
126 } | |
127 if (f->literals()->length() != 0) { | |
128 size += f->literals()->Size(); | |
129 } | |
130 } | |
131 return size; | |
132 } | |
133 | |
134 | |
135 // A helper class for recording back references. | |
136 class ReferencesExtractor : public ObjectVisitor { | |
137 public: | |
138 ReferencesExtractor(const JSObjectsCluster& cluster, | |
139 RetainerHeapProfile* profile) | |
140 : cluster_(cluster), | |
141 profile_(profile), | |
142 inside_array_(false) { | |
143 } | |
144 | |
145 void VisitPointer(Object** o) { | |
146 if ((*o)->IsFixedArray() && !inside_array_) { | |
147 // Traverse one level deep for data members that are fixed arrays. | |
148 // This covers the case of 'elements' and 'properties' of JSObject, | |
149 // and function contexts. | |
150 inside_array_ = true; | |
151 FixedArray::cast(*o)->Iterate(this); | |
152 inside_array_ = false; | |
153 } else if ((*o)->IsHeapObject()) { | |
154 profile_->StoreReference(cluster_, HeapObject::cast(*o)); | |
155 } | |
156 } | |
157 | |
158 void VisitPointers(Object** start, Object** end) { | |
159 for (Object** p = start; p < end; p++) VisitPointer(p); | |
160 } | |
161 | |
162 private: | |
163 const JSObjectsCluster& cluster_; | |
164 RetainerHeapProfile* profile_; | |
165 bool inside_array_; | |
166 }; | |
167 | |
168 | |
169 // A printer interface implementation for the Retainers profile. | |
170 class RetainersPrinter : public RetainerHeapProfile::Printer { | |
171 public: | |
172 void PrintRetainers(const JSObjectsCluster& cluster, | |
173 const StringStream& retainers) { | |
174 HeapStringAllocator allocator; | |
175 StringStream stream(&allocator); | |
176 cluster.Print(&stream); | |
177 LOG(ISOLATE, | |
178 HeapSampleJSRetainersEvent( | |
179 *(stream.ToCString()), *(retainers.ToCString()))); | |
180 } | |
181 }; | |
182 | |
183 | |
184 // Visitor for printing a cluster tree. | |
185 class ClusterTreePrinter BASE_EMBEDDED { | |
186 public: | |
187 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {} | |
188 void Call(const JSObjectsCluster& cluster, | |
189 const NumberAndSizeInfo& number_and_size) { | |
190 Print(stream_, cluster, number_and_size); | |
191 } | |
192 static void Print(StringStream* stream, | |
193 const JSObjectsCluster& cluster, | |
194 const NumberAndSizeInfo& number_and_size); | |
195 | |
196 private: | |
197 StringStream* stream_; | |
198 }; | |
199 | |
200 | |
201 void ClusterTreePrinter::Print(StringStream* stream, | |
202 const JSObjectsCluster& cluster, | |
203 const NumberAndSizeInfo& number_and_size) { | |
204 stream->Put(','); | |
205 cluster.Print(stream); | |
206 stream->Add(";%d", number_and_size.number()); | |
207 } | |
208 | |
209 | |
210 // Visitor for printing a retainer tree. | |
211 class SimpleRetainerTreePrinter BASE_EMBEDDED { | |
212 public: | |
213 explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer) | |
214 : printer_(printer) {} | |
215 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); | |
216 | |
217 private: | |
218 RetainerHeapProfile::Printer* printer_; | |
219 }; | |
220 | |
221 | |
222 void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster, | |
223 JSObjectsClusterTree* tree) { | |
224 HeapStringAllocator allocator; | |
225 StringStream stream(&allocator); | |
226 ClusterTreePrinter retainers_printer(&stream); | |
227 tree->ForEach(&retainers_printer); | |
228 printer_->PrintRetainers(cluster, stream); | |
229 } | |
230 | |
231 | |
232 // Visitor for aggregating references count of equivalent clusters. | |
233 class RetainersAggregator BASE_EMBEDDED { | |
234 public: | |
235 RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree) | |
236 : coarser_(coarser), dest_tree_(dest_tree) {} | |
237 void Call(const JSObjectsCluster& cluster, | |
238 const NumberAndSizeInfo& number_and_size); | |
239 | |
240 private: | |
241 ClustersCoarser* coarser_; | |
242 JSObjectsClusterTree* dest_tree_; | |
243 }; | |
244 | |
245 | |
246 void RetainersAggregator::Call(const JSObjectsCluster& cluster, | |
247 const NumberAndSizeInfo& number_and_size) { | |
248 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster); | |
249 if (eq.is_null()) eq = cluster; | |
250 JSObjectsClusterTree::Locator loc; | |
251 dest_tree_->Insert(eq, &loc); | |
252 NumberAndSizeInfo aggregated_number = loc.value(); | |
253 aggregated_number.increment_number(number_and_size.number()); | |
254 loc.set_value(aggregated_number); | |
255 } | |
256 | |
257 | |
258 // Visitor for printing retainers tree. Aggregates equivalent retainer clusters. | |
259 class AggregatingRetainerTreePrinter BASE_EMBEDDED { | |
260 public: | |
261 AggregatingRetainerTreePrinter(ClustersCoarser* coarser, | |
262 RetainerHeapProfile::Printer* printer) | |
263 : coarser_(coarser), printer_(printer) {} | |
264 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); | |
265 | |
266 private: | |
267 ClustersCoarser* coarser_; | |
268 RetainerHeapProfile::Printer* printer_; | |
269 }; | |
270 | |
271 | |
272 void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster, | |
273 JSObjectsClusterTree* tree) { | |
274 if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return; | |
275 JSObjectsClusterTree dest_tree_; | |
276 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_); | |
277 tree->ForEach(&retainers_aggregator); | |
278 HeapStringAllocator allocator; | |
279 StringStream stream(&allocator); | |
280 ClusterTreePrinter retainers_printer(&stream); | |
281 dest_tree_.ForEach(&retainers_printer); | |
282 printer_->PrintRetainers(cluster, stream); | |
283 } | |
284 | |
285 } // namespace | |
286 | |
287 | |
288 // A helper class for building a retainers tree, that aggregates | |
289 // all equivalent clusters. | |
290 class RetainerTreeAggregator { | |
291 public: | |
292 explicit RetainerTreeAggregator(ClustersCoarser* coarser) | |
293 : coarser_(coarser) {} | |
294 void Process(JSObjectsRetainerTree* input_tree) { | |
295 input_tree->ForEach(this); | |
296 } | |
297 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); | |
298 JSObjectsRetainerTree& output_tree() { return output_tree_; } | |
299 | |
300 private: | |
301 ClustersCoarser* coarser_; | |
302 JSObjectsRetainerTree output_tree_; | |
303 }; | |
304 | |
305 | |
306 void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster, | |
307 JSObjectsClusterTree* tree) { | |
308 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster); | |
309 if (eq.is_null()) return; | |
310 JSObjectsRetainerTree::Locator loc; | |
311 if (output_tree_.Insert(eq, &loc)) { | |
312 loc.set_value(new JSObjectsClusterTree()); | |
313 } | |
314 RetainersAggregator retainers_aggregator(coarser_, loc.value()); | |
315 tree->ForEach(&retainers_aggregator); | |
316 } | |
317 | |
318 | |
319 HeapProfiler::HeapProfiler() | 38 HeapProfiler::HeapProfiler() |
320 : snapshots_(new HeapSnapshotsCollection()), | 39 : snapshots_(new HeapSnapshotsCollection()), |
321 next_snapshot_uid_(1) { | 40 next_snapshot_uid_(1) { |
322 } | 41 } |
323 | 42 |
324 | 43 |
325 HeapProfiler::~HeapProfiler() { | 44 HeapProfiler::~HeapProfiler() { |
326 delete snapshots_; | 45 delete snapshots_; |
327 } | 46 } |
328 | 47 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
402 HeapSnapshot* result = | 121 HeapSnapshot* result = |
403 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++); | 122 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++); |
404 bool generation_completed = true; | 123 bool generation_completed = true; |
405 switch (s_type) { | 124 switch (s_type) { |
406 case HeapSnapshot::kFull: { | 125 case HeapSnapshot::kFull: { |
407 HEAP->CollectAllGarbage(true); | 126 HEAP->CollectAllGarbage(true); |
408 HeapSnapshotGenerator generator(result, control); | 127 HeapSnapshotGenerator generator(result, control); |
409 generation_completed = generator.GenerateSnapshot(); | 128 generation_completed = generator.GenerateSnapshot(); |
410 break; | 129 break; |
411 } | 130 } |
412 case HeapSnapshot::kAggregated: { | |
413 HEAP->CollectAllGarbage(true); | |
414 AggregatedHeapSnapshot agg_snapshot; | |
415 AggregatedHeapSnapshotGenerator generator(&agg_snapshot); | |
416 generator.GenerateSnapshot(); | |
417 generator.FillHeapSnapshot(result); | |
418 break; | |
419 } | |
420 default: | 131 default: |
421 UNREACHABLE(); | 132 UNREACHABLE(); |
422 } | 133 } |
423 if (!generation_completed) { | 134 if (!generation_completed) { |
424 delete result; | 135 delete result; |
425 result = NULL; | 136 result = NULL; |
426 } | 137 } |
427 snapshots_->SnapshotGenerationFinished(result); | 138 snapshots_->SnapshotGenerationFinished(result); |
428 return result; | 139 return result; |
429 } | 140 } |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
461 HeapProfiler* profiler = Isolate::Current()->heap_profiler(); | 172 HeapProfiler* profiler = Isolate::Current()->heap_profiler(); |
462 ASSERT(profiler != NULL); | 173 ASSERT(profiler != NULL); |
463 profiler->ResetSnapshots(); | 174 profiler->ResetSnapshots(); |
464 } | 175 } |
465 | 176 |
466 | 177 |
467 void HeapProfiler::ObjectMoveEvent(Address from, Address to) { | 178 void HeapProfiler::ObjectMoveEvent(Address from, Address to) { |
468 snapshots_->ObjectMoveEvent(from, to); | 179 snapshots_->ObjectMoveEvent(from, to); |
469 } | 180 } |
470 | 181 |
471 | |
472 const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey; | |
473 const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue; | |
474 | |
475 | |
476 ConstructorHeapProfile::ConstructorHeapProfile() | |
477 : zscope_(Isolate::Current(), DELETE_ON_EXIT) { | |
478 } | |
479 | |
480 | |
481 void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster, | |
482 const NumberAndSizeInfo& number_and_size) { | |
483 HeapStringAllocator allocator; | |
484 StringStream stream(&allocator); | |
485 cluster.Print(&stream); | |
486 LOG(ISOLATE, | |
487 HeapSampleJSConstructorEvent(*(stream.ToCString()), | |
488 number_and_size.number(), | |
489 number_and_size.bytes())); | |
490 } | |
491 | |
492 | |
493 void ConstructorHeapProfile::CollectStats(HeapObject* obj) { | |
494 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false); | |
495 } | |
496 | |
497 | |
498 void ConstructorHeapProfile::PrintStats() { | |
499 js_objects_info_tree_.ForEach(this); | |
500 } | |
501 | |
502 | |
503 static const char* GetConstructorName(const char* name) { | |
504 return name[0] != '\0' ? name : "(anonymous)"; | |
505 } | |
506 | |
507 | |
508 const char* JSObjectsCluster::GetSpecialCaseName() const { | |
509 if (constructor_ == FromSpecialCase(ROOTS)) { | |
510 return "(roots)"; | |
511 } else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) { | |
512 return "(global property)"; | |
513 } else if (constructor_ == FromSpecialCase(CODE)) { | |
514 return "(code)"; | |
515 } else if (constructor_ == FromSpecialCase(SELF)) { | |
516 return "(self)"; | |
517 } | |
518 return NULL; | |
519 } | |
520 | |
521 | |
522 void JSObjectsCluster::Print(StringStream* accumulator) const { | |
523 ASSERT(!is_null()); | |
524 const char* special_case_name = GetSpecialCaseName(); | |
525 if (special_case_name != NULL) { | |
526 accumulator->Add(special_case_name); | |
527 } else { | |
528 SmartPointer<char> s_name( | |
529 constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)); | |
530 accumulator->Add("%s", GetConstructorName(*s_name)); | |
531 if (instance_ != NULL) { | |
532 accumulator->Add(":%p", static_cast<void*>(instance_)); | |
533 } | |
534 } | |
535 } | |
536 | |
537 | |
538 void JSObjectsCluster::DebugPrint(StringStream* accumulator) const { | |
539 if (!is_null()) { | |
540 Print(accumulator); | |
541 } else { | |
542 accumulator->Add("(null cluster)"); | |
543 } | |
544 } | |
545 | |
546 | |
547 inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs( | |
548 const JSObjectsCluster& cluster_) | |
549 : cluster(cluster_), refs(kInitialBackrefsListCapacity) { | |
550 } | |
551 | |
552 | |
553 inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs( | |
554 const ClustersCoarser::ClusterBackRefs& src) | |
555 : cluster(src.cluster), refs(src.refs.capacity()) { | |
556 refs.AddAll(src.refs); | |
557 } | |
558 | |
559 | |
560 inline ClustersCoarser::ClusterBackRefs& | |
561 ClustersCoarser::ClusterBackRefs::operator=( | |
562 const ClustersCoarser::ClusterBackRefs& src) { | |
563 if (this == &src) return *this; | |
564 cluster = src.cluster; | |
565 refs.Clear(); | |
566 refs.AddAll(src.refs); | |
567 return *this; | |
568 } | |
569 | |
570 | |
571 inline int ClustersCoarser::ClusterBackRefs::Compare( | |
572 const ClustersCoarser::ClusterBackRefs& a, | |
573 const ClustersCoarser::ClusterBackRefs& b) { | |
574 int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster); | |
575 if (cmp != 0) return cmp; | |
576 if (a.refs.length() < b.refs.length()) return -1; | |
577 if (a.refs.length() > b.refs.length()) return 1; | |
578 for (int i = 0; i < a.refs.length(); ++i) { | |
579 int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]); | |
580 if (cmp != 0) return cmp; | |
581 } | |
582 return 0; | |
583 } | |
584 | |
585 | |
586 ClustersCoarser::ClustersCoarser() | |
587 : zscope_(Isolate::Current(), DELETE_ON_EXIT), | |
588 sim_list_(ClustersCoarser::kInitialSimilarityListCapacity), | |
589 current_pair_(NULL), | |
590 current_set_(NULL), | |
591 self_(NULL) { | |
592 } | |
593 | |
594 | |
595 void ClustersCoarser::Call(const JSObjectsCluster& cluster, | |
596 JSObjectsClusterTree* tree) { | |
597 if (!cluster.can_be_coarsed()) return; | |
598 ClusterBackRefs pair(cluster); | |
599 ASSERT(current_pair_ == NULL); | |
600 current_pair_ = &pair; | |
601 current_set_ = new JSObjectsRetainerTree(); | |
602 self_ = &cluster; | |
603 tree->ForEach(this); | |
604 sim_list_.Add(pair); | |
605 current_pair_ = NULL; | |
606 current_set_ = NULL; | |
607 self_ = NULL; | |
608 } | |
609 | |
610 | |
611 void ClustersCoarser::Call(const JSObjectsCluster& cluster, | |
612 const NumberAndSizeInfo& number_and_size) { | |
613 ASSERT(current_pair_ != NULL); | |
614 ASSERT(current_set_ != NULL); | |
615 ASSERT(self_ != NULL); | |
616 JSObjectsRetainerTree::Locator loc; | |
617 if (JSObjectsCluster::Compare(*self_, cluster) == 0) { | |
618 current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF)); | |
619 return; | |
620 } | |
621 JSObjectsCluster eq = GetCoarseEquivalent(cluster); | |
622 if (!eq.is_null()) { | |
623 if (current_set_->Find(eq, &loc)) return; | |
624 current_pair_->refs.Add(eq); | |
625 current_set_->Insert(eq, &loc); | |
626 } else { | |
627 current_pair_->refs.Add(cluster); | |
628 } | |
629 } | |
630 | |
631 | |
632 void ClustersCoarser::Process(JSObjectsRetainerTree* tree) { | |
633 int last_eq_clusters = -1; | |
634 for (int i = 0; i < kMaxPassesCount; ++i) { | |
635 sim_list_.Clear(); | |
636 const int curr_eq_clusters = DoProcess(tree); | |
637 // If no new cluster equivalents discovered, abort processing. | |
638 if (last_eq_clusters == curr_eq_clusters) break; | |
639 last_eq_clusters = curr_eq_clusters; | |
640 } | |
641 } | |
642 | |
643 | |
644 int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) { | |
645 tree->ForEach(this); | |
646 sim_list_.Iterate(ClusterBackRefs::SortRefsIterator); | |
647 sim_list_.Sort(ClusterBackRefsCmp); | |
648 return FillEqualityTree(); | |
649 } | |
650 | |
651 | |
652 JSObjectsCluster ClustersCoarser::GetCoarseEquivalent( | |
653 const JSObjectsCluster& cluster) { | |
654 if (!cluster.can_be_coarsed()) return JSObjectsCluster(); | |
655 EqualityTree::Locator loc; | |
656 return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster(); | |
657 } | |
658 | |
659 | |
660 bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) { | |
661 // Return true for coarsible clusters that have a non-identical equivalent. | |
662 if (!cluster.can_be_coarsed()) return false; | |
663 JSObjectsCluster eq = GetCoarseEquivalent(cluster); | |
664 return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0; | |
665 } | |
666 | |
667 | |
668 int ClustersCoarser::FillEqualityTree() { | |
669 int eq_clusters_count = 0; | |
670 int eq_to = 0; | |
671 bool first_added = false; | |
672 for (int i = 1; i < sim_list_.length(); ++i) { | |
673 if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) { | |
674 EqualityTree::Locator loc; | |
675 if (!first_added) { | |
676 // Add self-equivalence, if we have more than one item in this | |
677 // equivalence class. | |
678 eq_tree_.Insert(sim_list_[eq_to].cluster, &loc); | |
679 loc.set_value(sim_list_[eq_to].cluster); | |
680 first_added = true; | |
681 } | |
682 eq_tree_.Insert(sim_list_[i].cluster, &loc); | |
683 loc.set_value(sim_list_[eq_to].cluster); | |
684 ++eq_clusters_count; | |
685 } else { | |
686 eq_to = i; | |
687 first_added = false; | |
688 } | |
689 } | |
690 return eq_clusters_count; | |
691 } | |
692 | |
693 | |
694 const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey; | |
695 const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue; | |
696 const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey; | |
697 const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue = | |
698 NULL; | |
699 | |
700 | |
701 RetainerHeapProfile::RetainerHeapProfile() | |
702 : zscope_(Isolate::Current(), DELETE_ON_EXIT), | |
703 aggregator_(NULL) { | |
704 JSObjectsCluster roots(JSObjectsCluster::ROOTS); | |
705 ReferencesExtractor extractor(roots, this); | |
706 HEAP->IterateRoots(&extractor, VISIT_ONLY_STRONG); | |
707 } | |
708 | |
709 | |
710 RetainerHeapProfile::~RetainerHeapProfile() { | |
711 delete aggregator_; | |
712 } | |
713 | |
714 | |
715 void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster, | |
716 HeapObject* ref) { | |
717 JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref); | |
718 if (ref_cluster.is_null()) return; | |
719 JSObjectsRetainerTree::Locator ref_loc; | |
720 if (retainers_tree_.Insert(ref_cluster, &ref_loc)) { | |
721 ref_loc.set_value(new JSObjectsClusterTree()); | |
722 } | |
723 JSObjectsClusterTree* referenced_by = ref_loc.value(); | |
724 Clusterizer::InsertReferenceIntoTree(referenced_by, cluster); | |
725 } | |
726 | |
727 | |
728 void RetainerHeapProfile::CollectStats(HeapObject* obj) { | |
729 const JSObjectsCluster cluster = Clusterizer::Clusterize(obj); | |
730 if (cluster.is_null()) return; | |
731 ReferencesExtractor extractor(cluster, this); | |
732 obj->Iterate(&extractor); | |
733 } | |
734 | |
735 | |
736 void RetainerHeapProfile::CoarseAndAggregate() { | |
737 coarser_.Process(&retainers_tree_); | |
738 ASSERT(aggregator_ == NULL); | |
739 aggregator_ = new RetainerTreeAggregator(&coarser_); | |
740 aggregator_->Process(&retainers_tree_); | |
741 } | |
742 | |
743 | |
744 void RetainerHeapProfile::DebugPrintStats( | |
745 RetainerHeapProfile::Printer* printer) { | |
746 // Print clusters that have no equivalents, aggregating their retainers. | |
747 AggregatingRetainerTreePrinter agg_printer(&coarser_, printer); | |
748 retainers_tree_.ForEach(&agg_printer); | |
749 // Print clusters that have equivalents. | |
750 SimpleRetainerTreePrinter s_printer(printer); | |
751 aggregator_->output_tree().ForEach(&s_printer); | |
752 } | |
753 | |
754 | |
755 void RetainerHeapProfile::PrintStats() { | |
756 RetainersPrinter printer; | |
757 DebugPrintStats(&printer); | |
758 } | |
759 | |
760 | |
761 // | |
762 // HeapProfiler class implementation. | |
763 // | |
764 static void StackWeakReferenceCallback(Persistent<Value> object, | |
765 void* trace) { | |
766 DeleteArray(static_cast<Address*>(trace)); | |
767 object.Dispose(); | |
768 } | |
769 | |
770 | |
771 static void PrintProducerStackTrace(Object* obj, void* trace) { | |
772 if (!obj->IsJSObject()) return; | |
773 String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj)); | |
774 SmartPointer<char> s_name( | |
775 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)); | |
776 LOG(ISOLATE, | |
777 HeapSampleJSProducerEvent(GetConstructorName(*s_name), | |
778 reinterpret_cast<Address*>(trace))); | |
779 } | |
780 | |
781 | |
782 void HeapProfiler::WriteSample() { | |
783 Isolate* isolate = Isolate::Current(); | |
784 LOG(isolate, HeapSampleBeginEvent("Heap", "allocated")); | |
785 LOG(isolate, | |
786 HeapSampleStats( | |
787 "Heap", "allocated", HEAP->CommittedMemory(), HEAP->SizeOfObjects())); | |
788 | |
789 AggregatedHeapSnapshot snapshot; | |
790 AggregatedHeapSnapshotGenerator generator(&snapshot); | |
791 generator.GenerateSnapshot(); | |
792 | |
793 HistogramInfo* info = snapshot.info(); | |
794 for (int i = FIRST_NONSTRING_TYPE; | |
795 i <= AggregatedHeapSnapshotGenerator::kAllStringsType; | |
796 ++i) { | |
797 if (info[i].bytes() > 0) { | |
798 LOG(isolate, | |
799 HeapSampleItemEvent(info[i].name(), info[i].number(), | |
800 info[i].bytes())); | |
801 } | |
802 } | |
803 | |
804 snapshot.js_cons_profile()->PrintStats(); | |
805 snapshot.js_retainer_profile()->PrintStats(); | |
806 | |
807 isolate->global_handles()->IterateWeakRoots(PrintProducerStackTrace, | |
808 StackWeakReferenceCallback); | |
809 | |
810 LOG(isolate, HeapSampleEndEvent("Heap", "allocated")); | |
811 } | |
812 | |
813 | |
814 AggregatedHeapSnapshot::AggregatedHeapSnapshot() | |
815 : info_(NewArray<HistogramInfo>( | |
816 AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) { | |
817 #define DEF_TYPE_NAME(name) info_[name].set_name(#name); | |
818 INSTANCE_TYPE_LIST(DEF_TYPE_NAME); | |
819 #undef DEF_TYPE_NAME | |
820 info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name( | |
821 "STRING_TYPE"); | |
822 } | |
823 | |
824 | |
825 AggregatedHeapSnapshot::~AggregatedHeapSnapshot() { | |
826 DeleteArray(info_); | |
827 } | |
828 | |
829 | |
830 AggregatedHeapSnapshotGenerator::AggregatedHeapSnapshotGenerator( | |
831 AggregatedHeapSnapshot* agg_snapshot) | |
832 : agg_snapshot_(agg_snapshot) { | |
833 } | |
834 | |
835 | |
836 void AggregatedHeapSnapshotGenerator::CalculateStringsStats() { | |
837 HistogramInfo* info = agg_snapshot_->info(); | |
838 HistogramInfo& strings = info[kAllStringsType]; | |
839 // Lump all the string types together. | |
840 #define INCREMENT_SIZE(type, size, name, camel_name) \ | |
841 strings.increment_number(info[type].number()); \ | |
842 strings.increment_bytes(info[type].bytes()); | |
843 STRING_TYPE_LIST(INCREMENT_SIZE); | |
844 #undef INCREMENT_SIZE | |
845 } | |
846 | |
847 | |
848 void AggregatedHeapSnapshotGenerator::CollectStats(HeapObject* obj) { | |
849 InstanceType type = obj->map()->instance_type(); | |
850 ASSERT(0 <= type && type <= LAST_TYPE); | |
851 agg_snapshot_->info()[type].increment_number(1); | |
852 agg_snapshot_->info()[type].increment_bytes(obj->Size()); | |
853 } | |
854 | |
855 | |
856 void AggregatedHeapSnapshotGenerator::GenerateSnapshot() { | |
857 HeapIterator iterator(HeapIterator::kFilterUnreachable); | |
858 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { | |
859 CollectStats(obj); | |
860 agg_snapshot_->js_cons_profile()->CollectStats(obj); | |
861 agg_snapshot_->js_retainer_profile()->CollectStats(obj); | |
862 } | |
863 CalculateStringsStats(); | |
864 agg_snapshot_->js_retainer_profile()->CoarseAndAggregate(); | |
865 } | |
866 | |
867 | |
868 class CountingConstructorHeapProfileIterator { | |
869 public: | |
870 CountingConstructorHeapProfileIterator() | |
871 : entities_count_(0), children_count_(0) { | |
872 } | |
873 | |
874 void Call(const JSObjectsCluster& cluster, | |
875 const NumberAndSizeInfo& number_and_size) { | |
876 ++entities_count_; | |
877 children_count_ += number_and_size.number(); | |
878 } | |
879 | |
880 int entities_count() { return entities_count_; } | |
881 int children_count() { return children_count_; } | |
882 | |
883 private: | |
884 int entities_count_; | |
885 int children_count_; | |
886 }; | |
887 | |
888 | |
889 static HeapEntry* AddEntryFromAggregatedSnapshot(HeapSnapshot* snapshot, | |
890 int* root_child_index, | |
891 HeapEntry::Type type, | |
892 const char* name, | |
893 int count, | |
894 int size, | |
895 int children_count, | |
896 int retainers_count) { | |
897 HeapEntry* entry = snapshot->AddEntry( | |
898 type, name, count, size, children_count, retainers_count); | |
899 ASSERT(entry != NULL); | |
900 snapshot->root()->SetUnidirElementReference(*root_child_index, | |
901 *root_child_index + 1, | |
902 entry); | |
903 *root_child_index = *root_child_index + 1; | |
904 return entry; | |
905 } | |
906 | |
907 | |
908 class AllocatingConstructorHeapProfileIterator { | |
909 public: | |
910 AllocatingConstructorHeapProfileIterator(HeapSnapshot* snapshot, | |
911 int* root_child_index) | |
912 : snapshot_(snapshot), | |
913 root_child_index_(root_child_index) { | |
914 } | |
915 | |
916 void Call(const JSObjectsCluster& cluster, | |
917 const NumberAndSizeInfo& number_and_size) { | |
918 const char* name = cluster.GetSpecialCaseName(); | |
919 if (name == NULL) { | |
920 name = snapshot_->collection()->names()->GetFunctionName( | |
921 cluster.constructor()); | |
922 } | |
923 AddEntryFromAggregatedSnapshot(snapshot_, | |
924 root_child_index_, | |
925 HeapEntry::kObject, | |
926 name, | |
927 number_and_size.number(), | |
928 number_and_size.bytes(), | |
929 0, | |
930 0); | |
931 } | |
932 | |
933 private: | |
934 HeapSnapshot* snapshot_; | |
935 int* root_child_index_; | |
936 }; | |
937 | |
938 | |
939 static HeapObject* ClusterAsHeapObject(const JSObjectsCluster& cluster) { | |
940 return cluster.can_be_coarsed() ? | |
941 reinterpret_cast<HeapObject*>(cluster.instance()) : cluster.constructor(); | |
942 } | |
943 | |
944 | |
945 static JSObjectsCluster HeapObjectAsCluster(HeapObject* object) { | |
946 if (object->IsString()) { | |
947 return JSObjectsCluster(String::cast(object)); | |
948 } else { | |
949 JSObject* js_obj = JSObject::cast(object); | |
950 String* constructor = GetConstructorNameForHeapProfile( | |
951 JSObject::cast(js_obj)); | |
952 return JSObjectsCluster(constructor, object); | |
953 } | |
954 } | |
955 | |
956 | |
957 class CountingRetainersIterator { | |
958 public: | |
959 CountingRetainersIterator(const JSObjectsCluster& child_cluster, | |
960 HeapEntriesAllocator* allocator, | |
961 HeapEntriesMap* map) | |
962 : child_(ClusterAsHeapObject(child_cluster)), | |
963 allocator_(allocator), | |
964 map_(map) { | |
965 if (map_->Map(child_) == NULL) | |
966 map_->Pair(child_, allocator_, HeapEntriesMap::kHeapEntryPlaceholder); | |
967 } | |
968 | |
969 void Call(const JSObjectsCluster& cluster, | |
970 const NumberAndSizeInfo& number_and_size) { | |
971 if (map_->Map(ClusterAsHeapObject(cluster)) == NULL) | |
972 map_->Pair(ClusterAsHeapObject(cluster), | |
973 allocator_, | |
974 HeapEntriesMap::kHeapEntryPlaceholder); | |
975 map_->CountReference(ClusterAsHeapObject(cluster), child_); | |
976 } | |
977 | |
978 private: | |
979 HeapObject* child_; | |
980 HeapEntriesAllocator* allocator_; | |
981 HeapEntriesMap* map_; | |
982 }; | |
983 | |
984 | |
985 class AllocatingRetainersIterator { | |
986 public: | |
987 AllocatingRetainersIterator(const JSObjectsCluster& child_cluster, | |
988 HeapEntriesAllocator*, | |
989 HeapEntriesMap* map) | |
990 : child_(ClusterAsHeapObject(child_cluster)), map_(map) { | |
991 child_entry_ = map_->Map(child_); | |
992 ASSERT(child_entry_ != NULL); | |
993 } | |
994 | |
995 void Call(const JSObjectsCluster& cluster, | |
996 const NumberAndSizeInfo& number_and_size) { | |
997 int child_index, retainer_index; | |
998 map_->CountReference(ClusterAsHeapObject(cluster), | |
999 child_, | |
1000 &child_index, | |
1001 &retainer_index); | |
1002 map_->Map(ClusterAsHeapObject(cluster))->SetIndexedReference( | |
1003 HeapGraphEdge::kElement, | |
1004 child_index, | |
1005 number_and_size.number(), | |
1006 child_entry_, | |
1007 retainer_index); | |
1008 } | |
1009 | |
1010 private: | |
1011 HeapObject* child_; | |
1012 HeapEntriesMap* map_; | |
1013 HeapEntry* child_entry_; | |
1014 }; | |
1015 | |
1016 | |
1017 template<class RetainersIterator> | |
1018 class AggregatingRetainerTreeIterator { | |
1019 public: | |
1020 explicit AggregatingRetainerTreeIterator(ClustersCoarser* coarser, | |
1021 HeapEntriesAllocator* allocator, | |
1022 HeapEntriesMap* map) | |
1023 : coarser_(coarser), allocator_(allocator), map_(map) { | |
1024 } | |
1025 | |
1026 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree) { | |
1027 if (coarser_ != NULL && | |
1028 !coarser_->GetCoarseEquivalent(cluster).is_null()) return; | |
1029 JSObjectsClusterTree* tree_to_iterate = tree; | |
1030 ZoneScope zs(Isolate::Current(), DELETE_ON_EXIT); | |
1031 JSObjectsClusterTree dest_tree_; | |
1032 if (coarser_ != NULL) { | |
1033 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_); | |
1034 tree->ForEach(&retainers_aggregator); | |
1035 tree_to_iterate = &dest_tree_; | |
1036 } | |
1037 RetainersIterator iterator(cluster, allocator_, map_); | |
1038 tree_to_iterate->ForEach(&iterator); | |
1039 } | |
1040 | |
1041 private: | |
1042 ClustersCoarser* coarser_; | |
1043 HeapEntriesAllocator* allocator_; | |
1044 HeapEntriesMap* map_; | |
1045 }; | |
1046 | |
1047 | |
1048 class AggregatedRetainerTreeAllocator : public HeapEntriesAllocator { | |
1049 public: | |
1050 AggregatedRetainerTreeAllocator(HeapSnapshot* snapshot, | |
1051 int* root_child_index) | |
1052 : snapshot_(snapshot), root_child_index_(root_child_index) { | |
1053 } | |
1054 ~AggregatedRetainerTreeAllocator() { } | |
1055 | |
1056 HeapEntry* AllocateEntry( | |
1057 HeapThing ptr, int children_count, int retainers_count) { | |
1058 HeapObject* obj = reinterpret_cast<HeapObject*>(ptr); | |
1059 JSObjectsCluster cluster = HeapObjectAsCluster(obj); | |
1060 const char* name = cluster.GetSpecialCaseName(); | |
1061 if (name == NULL) { | |
1062 name = snapshot_->collection()->names()->GetFunctionName( | |
1063 cluster.constructor()); | |
1064 } | |
1065 return AddEntryFromAggregatedSnapshot( | |
1066 snapshot_, root_child_index_, HeapEntry::kObject, name, | |
1067 0, 0, children_count, retainers_count); | |
1068 } | |
1069 | |
1070 private: | |
1071 HeapSnapshot* snapshot_; | |
1072 int* root_child_index_; | |
1073 }; | |
1074 | |
1075 | |
1076 template<class Iterator> | |
1077 void AggregatedHeapSnapshotGenerator::IterateRetainers( | |
1078 HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map) { | |
1079 RetainerHeapProfile* p = agg_snapshot_->js_retainer_profile(); | |
1080 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_1( | |
1081 p->coarser(), allocator, entries_map); | |
1082 p->retainers_tree()->ForEach(&agg_ret_iter_1); | |
1083 AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2( | |
1084 NULL, allocator, entries_map); | |
1085 p->aggregator()->output_tree().ForEach(&agg_ret_iter_2); | |
1086 } | |
1087 | |
1088 | |
1089 void AggregatedHeapSnapshotGenerator::FillHeapSnapshot(HeapSnapshot* snapshot) { | |
1090 // Count the number of entities. | |
1091 int histogram_entities_count = 0; | |
1092 int histogram_children_count = 0; | |
1093 int histogram_retainers_count = 0; | |
1094 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) { | |
1095 if (agg_snapshot_->info()[i].bytes() > 0) { | |
1096 ++histogram_entities_count; | |
1097 } | |
1098 } | |
1099 CountingConstructorHeapProfileIterator counting_cons_iter; | |
1100 agg_snapshot_->js_cons_profile()->ForEach(&counting_cons_iter); | |
1101 histogram_entities_count += counting_cons_iter.entities_count(); | |
1102 HeapEntriesMap entries_map; | |
1103 int root_child_index = 0; | |
1104 AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index); | |
1105 IterateRetainers<CountingRetainersIterator>(&allocator, &entries_map); | |
1106 histogram_entities_count += entries_map.entries_count(); | |
1107 histogram_children_count += entries_map.total_children_count(); | |
1108 histogram_retainers_count += entries_map.total_retainers_count(); | |
1109 | |
1110 // Root entry references all other entries. | |
1111 histogram_children_count += histogram_entities_count; | |
1112 int root_children_count = histogram_entities_count; | |
1113 ++histogram_entities_count; | |
1114 | |
1115 // Allocate and fill entries in the snapshot, allocate references. | |
1116 snapshot->AllocateEntries(histogram_entities_count, | |
1117 histogram_children_count, | |
1118 histogram_retainers_count); | |
1119 snapshot->AddRootEntry(root_children_count); | |
1120 for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) { | |
1121 if (agg_snapshot_->info()[i].bytes() > 0) { | |
1122 AddEntryFromAggregatedSnapshot(snapshot, | |
1123 &root_child_index, | |
1124 HeapEntry::kHidden, | |
1125 agg_snapshot_->info()[i].name(), | |
1126 agg_snapshot_->info()[i].number(), | |
1127 agg_snapshot_->info()[i].bytes(), | |
1128 0, | |
1129 0); | |
1130 } | |
1131 } | |
1132 AllocatingConstructorHeapProfileIterator alloc_cons_iter( | |
1133 snapshot, &root_child_index); | |
1134 agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter); | |
1135 entries_map.AllocateEntries(); | |
1136 | |
1137 // Fill up references. | |
1138 IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map); | |
1139 | |
1140 snapshot->SetDominatorsToSelf(); | |
1141 } | |
1142 | |
1143 | |
1144 void ProducerHeapProfile::Setup() { | |
1145 can_log_ = true; | |
1146 } | |
1147 | |
1148 void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) { | |
1149 ASSERT(FLAG_log_producers); | |
1150 if (!can_log_) return; | |
1151 int framesCount = 0; | |
1152 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) { | |
1153 ++framesCount; | |
1154 } | |
1155 if (framesCount == 0) return; | |
1156 ++framesCount; // Reserve place for the terminator item. | |
1157 Vector<Address> stack(NewArray<Address>(framesCount), framesCount); | |
1158 int i = 0; | |
1159 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) { | |
1160 stack[i++] = it.frame()->pc(); | |
1161 } | |
1162 stack[i] = NULL; | |
1163 Handle<Object> handle = isolate_->global_handles()->Create(obj); | |
1164 isolate_->global_handles()->MakeWeak(handle.location(), | |
1165 static_cast<void*>(stack.start()), | |
1166 StackWeakReferenceCallback); | |
1167 } | |
1168 | |
1169 | |
1170 #endif // ENABLE_LOGGING_AND_PROFILING | 182 #endif // ENABLE_LOGGING_AND_PROFILING |
1171 | 183 |
1172 | 184 |
1173 } } // namespace v8::internal | 185 } } // namespace v8::internal |
OLD | NEW |