Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(583)

Side by Side Diff: tools/clang/blink_gc_plugin/RecordInfo.cpp

Issue 1385193002: Bisect clang Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: 246985 Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/clang/blink_gc_plugin/RecordInfo.h ('k') | tools/clang/blink_gc_plugin/TracingStatus.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "Config.h"
6 #include "RecordInfo.h"
7
8 using namespace clang;
9 using std::string;
10
11 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
12 : cache_(cache),
13 record_(record),
14 name_(record->getName()),
15 fields_need_tracing_(TracingStatus::Unknown()),
16 bases_(0),
17 fields_(0),
18 is_stack_allocated_(kNotComputed),
19 is_non_newable_(kNotComputed),
20 is_only_placement_newable_(kNotComputed),
21 does_need_finalization_(kNotComputed),
22 has_gc_mixin_methods_(kNotComputed),
23 is_declaring_local_trace_(kNotComputed),
24 is_eagerly_finalized_(kNotComputed),
25 determined_trace_methods_(false),
26 trace_method_(0),
27 trace_dispatch_method_(0),
28 finalize_dispatch_method_(0),
29 is_gc_derived_(false) {}
30
31 RecordInfo::~RecordInfo() {
32 delete fields_;
33 delete bases_;
34 }
35
36 // Get |count| number of template arguments. Returns false if there
37 // are fewer than |count| arguments or any of the arguments are not
38 // of a valid Type structure. If |count| is non-positive, all
39 // arguments are collected.
40 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
41 ClassTemplateSpecializationDecl* tmpl =
42 dyn_cast<ClassTemplateSpecializationDecl>(record_);
43 if (!tmpl)
44 return false;
45 const TemplateArgumentList& args = tmpl->getTemplateArgs();
46 if (args.size() < count)
47 return false;
48 if (count <= 0)
49 count = args.size();
50 for (unsigned i = 0; i < count; ++i) {
51 TemplateArgument arg = args[i];
52 if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
53 output_args->push_back(arg.getAsType().getTypePtr());
54 } else {
55 return false;
56 }
57 }
58 return true;
59 }
60
61 // Test if a record is a HeapAllocated collection.
62 bool RecordInfo::IsHeapAllocatedCollection() {
63 if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
64 return false;
65
66 TemplateArgs args;
67 if (GetTemplateArgs(0, &args)) {
68 for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
69 if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
70 if (decl->getName() == kHeapAllocatorName)
71 return true;
72 }
73 }
74
75 return Config::IsGCCollection(name_);
76 }
77
78 // Test if a record is derived from a garbage collected base.
79 bool RecordInfo::IsGCDerived() {
80 // If already computed, return the known result.
81 if (gc_base_names_.size())
82 return is_gc_derived_;
83
84 if (!record_->hasDefinition())
85 return false;
86
87 // The base classes are not themselves considered garbage collected objects.
88 if (Config::IsGCBase(name_))
89 return false;
90
91 // Walk the inheritance tree to find GC base classes.
92 walkBases();
93 return is_gc_derived_;
94 }
95
96 CXXRecordDecl* RecordInfo::GetDependentTemplatedDecl(const Type& type) {
97 const TemplateSpecializationType* tmpl_type =
98 type.getAs<TemplateSpecializationType>();
99 if (!tmpl_type)
100 return 0;
101
102 TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
103 if (!tmpl_decl)
104 return 0;
105
106 return dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
107 }
108
109 void RecordInfo::walkBases() {
110 // This traversal is akin to CXXRecordDecl::forallBases()'s,
111 // but without stepping over dependent bases -- these might also
112 // have a "GC base name", so are to be included and considered.
113 SmallVector<const CXXRecordDecl*, 8> queue;
114
115 const CXXRecordDecl *base_record = record();
116 while (true) {
117 for (const auto& it : base_record->bases()) {
118 const RecordType *type = it.getType()->getAs<RecordType>();
119 CXXRecordDecl* base;
120 if (!type)
121 base = GetDependentTemplatedDecl(*it.getType());
122 else {
123 base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
124 if (base)
125 queue.push_back(base);
126 }
127 if (!base)
128 continue;
129
130 const std::string& name = base->getName();
131 if (Config::IsGCBase(name)) {
132 gc_base_names_.push_back(name);
133 is_gc_derived_ = true;
134 }
135 }
136
137 if (queue.empty())
138 break;
139 base_record = queue.pop_back_val(); // not actually a queue.
140 }
141 }
142
143 bool RecordInfo::IsGCFinalized() {
144 if (!IsGCDerived())
145 return false;
146 for (const auto& gc_base : gc_base_names_) {
147 if (Config::IsGCFinalizedBase(gc_base))
148 return true;
149 }
150 return false;
151 }
152
153 // A GC mixin is a class that inherits from a GC mixin base and has
154 // not yet been "mixed in" with another GC base class.
155 bool RecordInfo::IsGCMixin() {
156 if (!IsGCDerived() || !gc_base_names_.size())
157 return false;
158 for (const auto& gc_base : gc_base_names_) {
159 // If it is not a mixin base we are done.
160 if (!Config::IsGCMixinBase(gc_base))
161 return false;
162 }
163 // This is a mixin if all GC bases are mixins.
164 return true;
165 }
166
167 // Test if a record is allocated on the managed heap.
168 bool RecordInfo::IsGCAllocated() {
169 return IsGCDerived() || IsHeapAllocatedCollection();
170 }
171
172 bool RecordInfo::IsEagerlyFinalized() {
173 if (is_eagerly_finalized_ == kNotComputed) {
174 is_eagerly_finalized_ = kFalse;
175 if (IsGCFinalized()) {
176 for (Decl* decl : record_->decls()) {
177 if (TypedefDecl* typedef_decl = dyn_cast<TypedefDecl>(decl)) {
178 if (typedef_decl->getNameAsString() == kIsEagerlyFinalizedName) {
179 is_eagerly_finalized_ = kTrue;
180 break;
181 }
182 }
183 }
184 }
185 }
186 return is_eagerly_finalized_;
187 }
188
189 bool RecordInfo::HasDefinition() {
190 return record_->hasDefinition();
191 }
192
193 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
194 // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
195 if (!record || Config::IsIgnoreAnnotated(record))
196 return 0;
197 Cache::iterator it = cache_.find(record);
198 if (it != cache_.end())
199 return &it->second;
200 return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
201 .first->second;
202 }
203
204 bool RecordInfo::IsStackAllocated() {
205 if (is_stack_allocated_ == kNotComputed) {
206 is_stack_allocated_ = kFalse;
207 for (Bases::iterator it = GetBases().begin();
208 it != GetBases().end();
209 ++it) {
210 if (it->second.info()->IsStackAllocated()) {
211 is_stack_allocated_ = kTrue;
212 return is_stack_allocated_;
213 }
214 }
215 for (CXXRecordDecl::method_iterator it = record_->method_begin();
216 it != record_->method_end();
217 ++it) {
218 if (it->getNameAsString() == kNewOperatorName &&
219 it->isDeleted() &&
220 Config::IsStackAnnotated(*it)) {
221 is_stack_allocated_ = kTrue;
222 return is_stack_allocated_;
223 }
224 }
225 }
226 return is_stack_allocated_;
227 }
228
229 bool RecordInfo::IsNonNewable() {
230 if (is_non_newable_ == kNotComputed) {
231 bool deleted = false;
232 bool all_deleted = true;
233 for (CXXRecordDecl::method_iterator it = record_->method_begin();
234 it != record_->method_end();
235 ++it) {
236 if (it->getNameAsString() == kNewOperatorName) {
237 deleted = it->isDeleted();
238 all_deleted = all_deleted && deleted;
239 }
240 }
241 is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
242 }
243 return is_non_newable_;
244 }
245
246 bool RecordInfo::IsOnlyPlacementNewable() {
247 if (is_only_placement_newable_ == kNotComputed) {
248 bool placement = false;
249 bool new_deleted = false;
250 for (CXXRecordDecl::method_iterator it = record_->method_begin();
251 it != record_->method_end();
252 ++it) {
253 if (it->getNameAsString() == kNewOperatorName) {
254 if (it->getNumParams() == 1) {
255 new_deleted = it->isDeleted();
256 } else if (it->getNumParams() == 2) {
257 placement = !it->isDeleted();
258 }
259 }
260 }
261 is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
262 }
263 return is_only_placement_newable_;
264 }
265
266 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
267 for (CXXRecordDecl::method_iterator it = record_->method_begin();
268 it != record_->method_end();
269 ++it) {
270 if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
271 return *it;
272 }
273 return 0;
274 }
275
276 // An object requires a tracing method if it has any fields that need tracing
277 // or if it inherits from multiple bases that need tracing.
278 bool RecordInfo::RequiresTraceMethod() {
279 if (IsStackAllocated())
280 return false;
281 unsigned bases_with_trace = 0;
282 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
283 if (it->second.NeedsTracing().IsNeeded())
284 ++bases_with_trace;
285 }
286 if (bases_with_trace > 1)
287 return true;
288 GetFields();
289 return fields_need_tracing_.IsNeeded();
290 }
291
292 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
293 // dispatch method).
294 CXXMethodDecl* RecordInfo::GetTraceMethod() {
295 DetermineTracingMethods();
296 return trace_method_;
297 }
298
299 // Get the static trace dispatch method.
300 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
301 DetermineTracingMethods();
302 return trace_dispatch_method_;
303 }
304
305 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
306 DetermineTracingMethods();
307 return finalize_dispatch_method_;
308 }
309
310 RecordInfo::Bases& RecordInfo::GetBases() {
311 if (!bases_)
312 bases_ = CollectBases();
313 return *bases_;
314 }
315
316 bool RecordInfo::InheritsTrace() {
317 if (GetTraceMethod())
318 return true;
319 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
320 if (it->second.info()->InheritsTrace())
321 return true;
322 }
323 return false;
324 }
325
326 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
327 if (CXXMethodDecl* trace = GetTraceMethod())
328 return trace->isVirtual() ? 0 : trace;
329 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
330 if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
331 return trace;
332 }
333 return 0;
334 }
335
336 bool RecordInfo::DeclaresGCMixinMethods() {
337 DetermineTracingMethods();
338 return has_gc_mixin_methods_;
339 }
340
341 bool RecordInfo::DeclaresLocalTraceMethod() {
342 if (is_declaring_local_trace_ != kNotComputed)
343 return is_declaring_local_trace_;
344 DetermineTracingMethods();
345 is_declaring_local_trace_ = trace_method_ ? kTrue : kFalse;
346 if (is_declaring_local_trace_) {
347 for (auto it = record_->method_begin();
348 it != record_->method_end(); ++it) {
349 if (*it == trace_method_) {
350 is_declaring_local_trace_ = kTrue;
351 break;
352 }
353 }
354 }
355 return is_declaring_local_trace_;
356 }
357
358 bool RecordInfo::IsGCMixinInstance() {
359 assert(IsGCDerived());
360 if (record_->isAbstract())
361 return false;
362
363 assert(!IsGCMixin());
364
365 // true iff the class derives from GCMixin and
366 // one or more other GC base classes.
367 bool seen_gc_mixin = false;
368 bool seen_gc_derived = false;
369 for (const auto& gc_base : gc_base_names_) {
370 if (Config::IsGCMixinBase(gc_base))
371 seen_gc_mixin = true;
372 else if (Config::IsGCBase(gc_base))
373 seen_gc_derived = true;
374 }
375 return seen_gc_derived && seen_gc_mixin;
376 }
377
378 // A (non-virtual) class is considered abstract in Blink if it has
379 // no public constructors and no create methods.
380 bool RecordInfo::IsConsideredAbstract() {
381 for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
382 it != record_->ctor_end();
383 ++it) {
384 if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
385 return false;
386 }
387 for (CXXRecordDecl::method_iterator it = record_->method_begin();
388 it != record_->method_end();
389 ++it) {
390 if (it->getNameAsString() == kCreateName)
391 return false;
392 }
393 return true;
394 }
395
396 RecordInfo::Bases* RecordInfo::CollectBases() {
397 // Compute the collection locally to avoid inconsistent states.
398 Bases* bases = new Bases;
399 if (!record_->hasDefinition())
400 return bases;
401 for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
402 it != record_->bases_end();
403 ++it) {
404 const CXXBaseSpecifier& spec = *it;
405 RecordInfo* info = cache_->Lookup(spec.getType());
406 if (!info)
407 continue;
408 CXXRecordDecl* base = info->record();
409 TracingStatus status = info->InheritsTrace()
410 ? TracingStatus::Needed()
411 : TracingStatus::Unneeded();
412 bases->insert(std::make_pair(base, BasePoint(spec, info, status)));
413 }
414 return bases;
415 }
416
417 RecordInfo::Fields& RecordInfo::GetFields() {
418 if (!fields_)
419 fields_ = CollectFields();
420 return *fields_;
421 }
422
423 RecordInfo::Fields* RecordInfo::CollectFields() {
424 // Compute the collection locally to avoid inconsistent states.
425 Fields* fields = new Fields;
426 if (!record_->hasDefinition())
427 return fields;
428 TracingStatus fields_status = TracingStatus::Unneeded();
429 for (RecordDecl::field_iterator it = record_->field_begin();
430 it != record_->field_end();
431 ++it) {
432 FieldDecl* field = *it;
433 // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
434 if (Config::IsIgnoreAnnotated(field))
435 continue;
436 if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) {
437 fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
438 fields->insert(std::make_pair(field, FieldPoint(field, edge)));
439 }
440 }
441 fields_need_tracing_ = fields_status;
442 return fields;
443 }
444
445 void RecordInfo::DetermineTracingMethods() {
446 if (determined_trace_methods_)
447 return;
448 determined_trace_methods_ = true;
449 if (Config::IsGCBase(name_))
450 return;
451 CXXMethodDecl* trace = nullptr;
452 CXXMethodDecl* trace_impl = nullptr;
453 CXXMethodDecl* trace_after_dispatch = nullptr;
454 bool has_adjust_and_mark = false;
455 bool has_is_heap_object_alive = false;
456 for (Decl* decl : record_->decls()) {
457 CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl);
458 if (!method) {
459 if (FunctionTemplateDecl* func_template =
460 dyn_cast<FunctionTemplateDecl>(decl))
461 method = dyn_cast<CXXMethodDecl>(func_template->getTemplatedDecl());
462 }
463 if (!method)
464 continue;
465
466 switch (Config::GetTraceMethodType(method)) {
467 case Config::TRACE_METHOD:
468 trace = method;
469 break;
470 case Config::TRACE_AFTER_DISPATCH_METHOD:
471 trace_after_dispatch = method;
472 break;
473 case Config::TRACE_IMPL_METHOD:
474 trace_impl = method;
475 break;
476 case Config::TRACE_AFTER_DISPATCH_IMPL_METHOD:
477 break;
478 case Config::NOT_TRACE_METHOD:
479 if (method->getNameAsString() == kFinalizeName) {
480 finalize_dispatch_method_ = method;
481 } else if (method->getNameAsString() == kAdjustAndMarkName) {
482 has_adjust_and_mark = true;
483 } else if (method->getNameAsString() == kIsHeapObjectAliveName) {
484 has_is_heap_object_alive = true;
485 }
486 break;
487 }
488 }
489
490 // Record if class defines the two GCMixin methods.
491 has_gc_mixin_methods_ =
492 has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
493 if (trace_after_dispatch) {
494 trace_method_ = trace_after_dispatch;
495 trace_dispatch_method_ = trace_impl ? trace_impl : trace;
496 } else {
497 // TODO: Can we never have a dispatch method called trace without the same
498 // class defining a traceAfterDispatch method?
499 trace_method_ = trace;
500 trace_dispatch_method_ = nullptr;
501 }
502 if (trace_dispatch_method_ && finalize_dispatch_method_)
503 return;
504 // If this class does not define dispatching methods inherit them.
505 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
506 // TODO: Does it make sense to inherit multiple dispatch methods?
507 if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
508 assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
509 trace_dispatch_method_ = dispatch;
510 }
511 if (CXXMethodDecl* dispatch =
512 it->second.info()->GetFinalizeDispatchMethod()) {
513 assert(!finalize_dispatch_method_ &&
514 "Multiple finalize dispatching methods");
515 finalize_dispatch_method_ = dispatch;
516 }
517 }
518 }
519
520 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
521 bool RecordInfo::NeedsFinalization() {
522 if (does_need_finalization_ == kNotComputed) {
523 // Rely on hasNonTrivialDestructor(), but if the only
524 // identifiable reason for it being true is the presence
525 // of a safely ignorable class as a direct base,
526 // or we're processing such an 'ignorable' class, then it does
527 // not need finalization.
528 does_need_finalization_ =
529 record_->hasNonTrivialDestructor() ? kTrue : kFalse;
530 if (!does_need_finalization_)
531 return does_need_finalization_;
532
533 // Processing a class with a safely-ignorable destructor.
534 NamespaceDecl* ns =
535 dyn_cast<NamespaceDecl>(record_->getDeclContext());
536 if (ns && Config::HasIgnorableDestructor(ns->getName(), name_)) {
537 does_need_finalization_ = kFalse;
538 return does_need_finalization_;
539 }
540
541 CXXDestructorDecl* dtor = record_->getDestructor();
542 if (dtor && dtor->isUserProvided())
543 return does_need_finalization_;
544 for (Fields::iterator it = GetFields().begin();
545 it != GetFields().end();
546 ++it) {
547 if (it->second.edge()->NeedsFinalization())
548 return does_need_finalization_;
549 }
550
551 for (Bases::iterator it = GetBases().begin();
552 it != GetBases().end();
553 ++it) {
554 if (it->second.info()->NeedsFinalization())
555 return does_need_finalization_;
556 }
557 // Destructor was non-trivial due to bases with destructors that
558 // can be safely ignored. Hence, no need for finalization.
559 does_need_finalization_ = kFalse;
560 }
561 return does_need_finalization_;
562 }
563
564 // A class needs tracing if:
565 // - it is allocated on the managed heap,
566 // - it is derived from a class that needs tracing, or
567 // - it contains fields that need tracing.
568 // TODO: Defining NeedsTracing based on whether a class defines a trace method
569 // (of the proper signature) over approximates too much. The use of transition
570 // types causes some classes to have trace methods without them needing to be
571 // traced.
572 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
573 if (IsGCAllocated())
574 return TracingStatus::Needed();
575
576 if (IsStackAllocated())
577 return TracingStatus::Unneeded();
578
579 for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
580 if (it->second.info()->NeedsTracing(option).IsNeeded())
581 return TracingStatus::Needed();
582 }
583
584 if (option == Edge::kRecursive)
585 GetFields();
586
587 return fields_need_tracing_;
588 }
589
590 Edge* RecordInfo::CreateEdge(const Type* type) {
591 if (!type) {
592 return 0;
593 }
594
595 if (type->isPointerType() || type->isReferenceType()) {
596 if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
597 return new RawPtr(ptr, false, type->isReferenceType());
598 return 0;
599 }
600
601 RecordInfo* info = cache_->Lookup(type);
602
603 // If the type is neither a pointer or a C++ record we ignore it.
604 if (!info) {
605 return 0;
606 }
607
608 TemplateArgs args;
609
610 if (Config::IsRawPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
611 if (Edge* ptr = CreateEdge(args[0]))
612 return new RawPtr(ptr, true, false);
613 return 0;
614 }
615
616 if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
617 if (Edge* ptr = CreateEdge(args[0]))
618 return new RefPtr(ptr);
619 return 0;
620 }
621
622 if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
623 if (Edge* ptr = CreateEdge(args[0]))
624 return new OwnPtr(ptr);
625 return 0;
626 }
627
628 if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
629 if (Edge* ptr = CreateEdge(args[0]))
630 return new Member(ptr);
631 return 0;
632 }
633
634 if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
635 if (Edge* ptr = CreateEdge(args[0]))
636 return new WeakMember(ptr);
637 return 0;
638 }
639
640 if (Config::IsPersistent(info->name())) {
641 // Persistent might refer to v8::Persistent, so check the name space.
642 // TODO: Consider using a more canonical identification than names.
643 NamespaceDecl* ns =
644 dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
645 if (!ns || ns->getName() != "blink")
646 return 0;
647 if (!info->GetTemplateArgs(1, &args))
648 return 0;
649 if (Edge* ptr = CreateEdge(args[0]))
650 return new Persistent(ptr);
651 return 0;
652 }
653
654 if (Config::IsGCCollection(info->name()) ||
655 Config::IsWTFCollection(info->name())) {
656 bool is_root = Config::IsPersistentGCCollection(info->name());
657 bool on_heap = is_root || info->IsHeapAllocatedCollection();
658 size_t count = Config::CollectionDimension(info->name());
659 if (!info->GetTemplateArgs(count, &args))
660 return 0;
661 Collection* edge = new Collection(info, on_heap, is_root);
662 for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
663 if (Edge* member = CreateEdge(*it)) {
664 edge->members().push_back(member);
665 }
666 // TODO: Handle the case where we fail to create an edge (eg, if the
667 // argument is a primitive type or just not fully known yet).
668 }
669 return edge;
670 }
671
672 return new Value(info);
673 }
OLDNEW
« no previous file with comments | « tools/clang/blink_gc_plugin/RecordInfo.h ('k') | tools/clang/blink_gc_plugin/TracingStatus.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698