Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(149)

Side by Side Diff: tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp

Issue 1412893004: Rolling forward clang, hiding some warnings (Closed) Base URL: https://github.com/domokit/mojo.git@master
Patch Set: Added some untracked files which have been added by Chromium Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « build/config/compiler/BUILD.gn ('k') | tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 // This clang plugin checks various invariants of the Blink garbage 5 // This clang plugin checks various invariants of the Blink garbage
6 // collection infrastructure. 6 // collection infrastructure.
7 // 7 //
8 // Errors are described at: 8 // Errors are described at:
9 // http://www.chromium.org/developers/blink-gc-plugin-errors 9 // http://www.chromium.org/developers/blink-gc-plugin-errors
10 10
11 #include <algorithm> 11 #include "BlinkGCPluginConsumer.h"
12 #include "BlinkGCPluginOptions.h"
13 #include "Config.h"
12 14
13 #include "Config.h"
14 #include "JsonWriter.h"
15 #include "RecordInfo.h"
16
17 #include "clang/AST/AST.h"
18 #include "clang/AST/ASTConsumer.h"
19 #include "clang/AST/RecursiveASTVisitor.h"
20 #include "clang/Frontend/CompilerInstance.h" 15 #include "clang/Frontend/CompilerInstance.h"
21 #include "clang/Frontend/FrontendPluginRegistry.h" 16 #include "clang/Frontend/FrontendPluginRegistry.h"
22 #include "clang/Sema/Sema.h"
23 17
24 using namespace clang; 18 using namespace clang;
25 using std::string;
26
27 namespace {
28
29 const char kClassMustLeftMostlyDeriveGC[] =
30 "[blink-gc] Class %0 must derive its GC base in the left-most position.";
31
32 const char kClassRequiresTraceMethod[] =
33 "[blink-gc] Class %0 requires a trace method.";
34
35 const char kBaseRequiresTracing[] =
36 "[blink-gc] Base class %0 of derived class %1 requires tracing.";
37
38 const char kBaseRequiresTracingNote[] =
39 "[blink-gc] Untraced base class %0 declared here:";
40
41 const char kFieldsRequireTracing[] =
42 "[blink-gc] Class %0 has untraced fields that require tracing.";
43
44 const char kFieldRequiresTracingNote[] =
45 "[blink-gc] Untraced field %0 declared here:";
46
47 const char kClassContainsInvalidFields[] =
48 "[blink-gc] Class %0 contains invalid fields.";
49
50 const char kClassContainsGCRoot[] =
51 "[blink-gc] Class %0 contains GC root in field %1.";
52
53 const char kClassRequiresFinalization[] =
54 "[blink-gc] Class %0 requires finalization.";
55
56 const char kClassDoesNotRequireFinalization[] =
57 "[blink-gc] Class %0 may not require finalization.";
58
59 const char kFinalizerAccessesFinalizedField[] =
60 "[blink-gc] Finalizer %0 accesses potentially finalized field %1.";
61
62 const char kFinalizerAccessesEagerlyFinalizedField[] =
63 "[blink-gc] Finalizer %0 accesses eagerly finalized field %1.";
64
65 const char kRawPtrToGCManagedClassNote[] =
66 "[blink-gc] Raw pointer field %0 to a GC managed class declared here:";
67
68 const char kRefPtrToGCManagedClassNote[] =
69 "[blink-gc] RefPtr field %0 to a GC managed class declared here:";
70
71 const char kOwnPtrToGCManagedClassNote[] =
72 "[blink-gc] OwnPtr field %0 to a GC managed class declared here:";
73
74 const char kMemberToGCUnmanagedClassNote[] =
75 "[blink-gc] Member field %0 to non-GC managed class declared here:";
76
77 const char kStackAllocatedFieldNote[] =
78 "[blink-gc] Stack-allocated field %0 declared here:";
79
80 const char kMemberInUnmanagedClassNote[] =
81 "[blink-gc] Member field %0 in unmanaged class declared here:";
82
83 const char kPartObjectToGCDerivedClassNote[] =
84 "[blink-gc] Part-object field %0 to a GC derived class declared here:";
85
86 const char kPartObjectContainsGCRootNote[] =
87 "[blink-gc] Field %0 with embedded GC root in %1 declared here:";
88
89 const char kFieldContainsGCRootNote[] =
90 "[blink-gc] Field %0 defining a GC root declared here:";
91
92 const char kOverriddenNonVirtualTrace[] =
93 "[blink-gc] Class %0 overrides non-virtual trace of base class %1.";
94
95 const char kOverriddenNonVirtualTraceNote[] =
96 "[blink-gc] Non-virtual trace method declared here:";
97
98 const char kMissingTraceDispatchMethod[] =
99 "[blink-gc] Class %0 is missing manual trace dispatch.";
100
101 const char kMissingFinalizeDispatchMethod[] =
102 "[blink-gc] Class %0 is missing manual finalize dispatch.";
103
104 const char kVirtualAndManualDispatch[] =
105 "[blink-gc] Class %0 contains or inherits virtual methods"
106 " but implements manual dispatching.";
107
108 const char kMissingTraceDispatch[] =
109 "[blink-gc] Missing dispatch to class %0 in manual trace dispatch.";
110
111 const char kMissingFinalizeDispatch[] =
112 "[blink-gc] Missing dispatch to class %0 in manual finalize dispatch.";
113
114 const char kFinalizedFieldNote[] =
115 "[blink-gc] Potentially finalized field %0 declared here:";
116
117 const char kEagerlyFinalizedFieldNote[] =
118 "[blink-gc] Field %0 having eagerly finalized value, declared here:";
119
120 const char kUserDeclaredDestructorNote[] =
121 "[blink-gc] User-declared destructor declared here:";
122
123 const char kUserDeclaredFinalizerNote[] =
124 "[blink-gc] User-declared finalizer declared here:";
125
126 const char kBaseRequiresFinalizationNote[] =
127 "[blink-gc] Base class %0 requiring finalization declared here:";
128
129 const char kFieldRequiresFinalizationNote[] =
130 "[blink-gc] Field %0 requiring finalization declared here:";
131
132 const char kManualDispatchMethodNote[] =
133 "[blink-gc] Manual dispatch %0 declared here:";
134
135 const char kDerivesNonStackAllocated[] =
136 "[blink-gc] Stack-allocated class %0 derives class %1"
137 " which is not stack allocated.";
138
139 const char kClassOverridesNew[] =
140 "[blink-gc] Garbage collected class %0"
141 " is not permitted to override its new operator.";
142
143 const char kClassDeclaresPureVirtualTrace[] =
144 "[blink-gc] Garbage collected class %0"
145 " is not permitted to declare a pure-virtual trace method.";
146
147 const char kLeftMostBaseMustBePolymorphic[] =
148 "[blink-gc] Left-most base class %0 of derived class %1"
149 " must be polymorphic.";
150
151 const char kBaseClassMustDeclareVirtualTrace[] =
152 "[blink-gc] Left-most base class %0 of derived class %1"
153 " must define a virtual trace method.";
154
155 const char kClassMustDeclareGCMixinTraceMethod[] =
156 "[blink-gc] Class %0 which inherits from GarbageCollectedMixin must"
157 " locally declare and override trace(Visitor*)";
158
159 // Use a local RAV implementation to simply collect all FunctionDecls marked for
160 // late template parsing. This happens with the flag -fdelayed-template-parsing,
161 // which is on by default in MSVC-compatible mode.
162 std::set<FunctionDecl*> GetLateParsedFunctionDecls(TranslationUnitDecl* decl) {
163 struct Visitor : public RecursiveASTVisitor<Visitor> {
164 bool VisitFunctionDecl(FunctionDecl* function_decl) {
165 if (function_decl->isLateTemplateParsed())
166 late_parsed_decls.insert(function_decl);
167 return true;
168 }
169
170 std::set<FunctionDecl*> late_parsed_decls;
171 } v;
172 v.TraverseDecl(decl);
173 return v.late_parsed_decls;
174 }
175
176 struct BlinkGCPluginOptions {
177 BlinkGCPluginOptions()
178 : enable_oilpan(false)
179 , dump_graph(false)
180 , warn_raw_ptr(false)
181 , warn_unneeded_finalizer(false) {}
182 bool enable_oilpan;
183 bool dump_graph;
184 bool warn_raw_ptr;
185 bool warn_unneeded_finalizer;
186 std::set<std::string> ignored_classes;
187 std::set<std::string> checked_namespaces;
188 std::vector<std::string> ignored_directories;
189 };
190
191 typedef std::vector<CXXRecordDecl*> RecordVector;
192 typedef std::vector<CXXMethodDecl*> MethodVector;
193
194 // Test if a template specialization is an instantiation.
195 static bool IsTemplateInstantiation(CXXRecordDecl* record) {
196 ClassTemplateSpecializationDecl* spec =
197 dyn_cast<ClassTemplateSpecializationDecl>(record);
198 if (!spec)
199 return false;
200 switch (spec->getTemplateSpecializationKind()) {
201 case TSK_ImplicitInstantiation:
202 case TSK_ExplicitInstantiationDefinition:
203 return true;
204 case TSK_Undeclared:
205 case TSK_ExplicitSpecialization:
206 return false;
207 // TODO: unsupported cases.
208 case TSK_ExplicitInstantiationDeclaration:
209 return false;
210 }
211 assert(false && "Unknown template specialization kind");
212 }
213
214 // This visitor collects the entry points for the checker.
215 class CollectVisitor : public RecursiveASTVisitor<CollectVisitor> {
216 public:
217 CollectVisitor() {}
218
219 RecordVector& record_decls() { return record_decls_; }
220 MethodVector& trace_decls() { return trace_decls_; }
221
222 bool shouldVisitTemplateInstantiations() { return false; }
223
224 // Collect record declarations, including nested declarations.
225 bool VisitCXXRecordDecl(CXXRecordDecl* record) {
226 if (record->hasDefinition() && record->isCompleteDefinition())
227 record_decls_.push_back(record);
228 return true;
229 }
230
231 // Collect tracing method definitions, but don't traverse method bodies.
232 bool TraverseCXXMethodDecl(CXXMethodDecl* method) {
233 if (method->isThisDeclarationADefinition() && Config::IsTraceMethod(method))
234 trace_decls_.push_back(method);
235 return true;
236 }
237
238 private:
239 RecordVector record_decls_;
240 MethodVector trace_decls_;
241 };
242
243 // This visitor checks that a finalizer method does not have invalid access to
244 // fields that are potentially finalized. A potentially finalized field is
245 // either a Member, a heap-allocated collection or an off-heap collection that
246 // contains Members. Invalid uses are currently identified as passing the field
247 // as the argument of a procedure call or using the -> or [] operators on it.
248 class CheckFinalizerVisitor
249 : public RecursiveASTVisitor<CheckFinalizerVisitor> {
250 private:
251 // Simple visitor to determine if the content of a field might be collected
252 // during finalization.
253 class MightBeCollectedVisitor : public EdgeVisitor {
254 public:
255 MightBeCollectedVisitor(bool is_eagerly_finalized)
256 : might_be_collected_(false)
257 , is_eagerly_finalized_(is_eagerly_finalized)
258 , as_eagerly_finalized_(false) {}
259 bool might_be_collected() { return might_be_collected_; }
260 bool as_eagerly_finalized() { return as_eagerly_finalized_; }
261 void VisitMember(Member* edge) override {
262 if (is_eagerly_finalized_) {
263 if (edge->ptr()->IsValue()) {
264 Value* member = static_cast<Value*>(edge->ptr());
265 if (member->value()->IsEagerlyFinalized()) {
266 might_be_collected_ = true;
267 as_eagerly_finalized_ = true;
268 }
269 }
270 return;
271 }
272 might_be_collected_ = true;
273 }
274 void VisitCollection(Collection* edge) override {
275 if (edge->on_heap() && !is_eagerly_finalized_) {
276 might_be_collected_ = !edge->is_root();
277 } else {
278 edge->AcceptMembers(this);
279 }
280 }
281
282 private:
283 bool might_be_collected_;
284 bool is_eagerly_finalized_;
285 bool as_eagerly_finalized_;
286 };
287
288 public:
289 class Error {
290 public:
291 Error(MemberExpr *member,
292 bool as_eagerly_finalized,
293 FieldPoint* field)
294 : member_(member)
295 , as_eagerly_finalized_(as_eagerly_finalized)
296 , field_(field) {}
297
298 MemberExpr* member_;
299 bool as_eagerly_finalized_;
300 FieldPoint* field_;
301 };
302
303 typedef std::vector<Error> Errors;
304
305 CheckFinalizerVisitor(RecordCache* cache, bool is_eagerly_finalized)
306 : blacklist_context_(false)
307 , cache_(cache)
308 , is_eagerly_finalized_(is_eagerly_finalized) {}
309
310 Errors& finalized_fields() { return finalized_fields_; }
311
312 bool WalkUpFromCXXOperatorCallExpr(CXXOperatorCallExpr* expr) {
313 // Only continue the walk-up if the operator is a blacklisted one.
314 switch (expr->getOperator()) {
315 case OO_Arrow:
316 case OO_Subscript:
317 this->WalkUpFromCallExpr(expr);
318 default:
319 return true;
320 }
321 }
322
323 // We consider all non-operator calls to be blacklisted contexts.
324 bool WalkUpFromCallExpr(CallExpr* expr) {
325 bool prev_blacklist_context = blacklist_context_;
326 blacklist_context_ = true;
327 for (size_t i = 0; i < expr->getNumArgs(); ++i)
328 this->TraverseStmt(expr->getArg(i));
329 blacklist_context_ = prev_blacklist_context;
330 return true;
331 }
332
333 bool VisitMemberExpr(MemberExpr* member) {
334 FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl());
335 if (!field)
336 return true;
337
338 RecordInfo* info = cache_->Lookup(field->getParent());
339 if (!info)
340 return true;
341
342 RecordInfo::Fields::iterator it = info->GetFields().find(field);
343 if (it == info->GetFields().end())
344 return true;
345
346 if (seen_members_.find(member) != seen_members_.end())
347 return true;
348
349 bool as_eagerly_finalized = false;
350 if (blacklist_context_ &&
351 MightBeCollected(&it->second, as_eagerly_finalized)) {
352 finalized_fields_.push_back(
353 Error(member, as_eagerly_finalized, &it->second));
354 seen_members_.insert(member);
355 }
356 return true;
357 }
358
359 bool MightBeCollected(FieldPoint* point, bool& as_eagerly_finalized) {
360 MightBeCollectedVisitor visitor(is_eagerly_finalized_);
361 point->edge()->Accept(&visitor);
362 as_eagerly_finalized = visitor.as_eagerly_finalized();
363 return visitor.might_be_collected();
364 }
365
366 private:
367 bool blacklist_context_;
368 Errors finalized_fields_;
369 std::set<MemberExpr*> seen_members_;
370 RecordCache* cache_;
371 bool is_eagerly_finalized_;
372 };
373
374 // This visitor checks that a method contains within its body, a call to a
375 // method on the provided receiver class. This is used to check manual
376 // dispatching for trace and finalize methods.
377 class CheckDispatchVisitor : public RecursiveASTVisitor<CheckDispatchVisitor> {
378 public:
379 CheckDispatchVisitor(RecordInfo* receiver)
380 : receiver_(receiver), dispatched_to_receiver_(false) {}
381
382 bool dispatched_to_receiver() { return dispatched_to_receiver_; }
383
384 bool VisitMemberExpr(MemberExpr* member) {
385 if (CXXMethodDecl* fn = dyn_cast<CXXMethodDecl>(member->getMemberDecl())) {
386 if (fn->getParent() == receiver_->record())
387 dispatched_to_receiver_ = true;
388 }
389 return true;
390 }
391
392 bool VisitUnresolvedMemberExpr(UnresolvedMemberExpr* member) {
393 for (Decl* decl : member->decls()) {
394 if (CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl)) {
395 if (method->getParent() == receiver_->record() &&
396 Config::GetTraceMethodType(method) ==
397 Config::TRACE_AFTER_DISPATCH_METHOD) {
398 dispatched_to_receiver_ = true;
399 return true;
400 }
401 }
402 }
403 return true;
404 }
405
406 private:
407 RecordInfo* receiver_;
408 bool dispatched_to_receiver_;
409 };
410
411 // This visitor checks a tracing method by traversing its body.
412 // - A member field is considered traced if it is referenced in the body.
413 // - A base is traced if a base-qualified call to a trace method is found.
414 class CheckTraceVisitor : public RecursiveASTVisitor<CheckTraceVisitor> {
415 public:
416 CheckTraceVisitor(CXXMethodDecl* trace, RecordInfo* info, RecordCache* cache)
417 : trace_(trace),
418 info_(info),
419 cache_(cache),
420 delegates_to_traceimpl_(false) {
421 }
422
423 bool delegates_to_traceimpl() const { return delegates_to_traceimpl_; }
424
425 bool VisitMemberExpr(MemberExpr* member) {
426 // In weak callbacks, consider any occurrence as a correct usage.
427 // TODO: We really want to require that isAlive is checked on manually
428 // processed weak fields.
429 if (IsWeakCallback()) {
430 if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl()))
431 FoundField(field);
432 }
433 return true;
434 }
435
436 bool VisitCallExpr(CallExpr* call) {
437 // In weak callbacks we don't check calls (see VisitMemberExpr).
438 if (IsWeakCallback())
439 return true;
440
441 Expr* callee = call->getCallee();
442
443 // Trace calls from a templated derived class result in a
444 // DependentScopeMemberExpr because the concrete trace call depends on the
445 // instantiation of any shared template parameters. In this case the call is
446 // "unresolved" and we resort to comparing the syntactic type names.
447 if (CXXDependentScopeMemberExpr* expr =
448 dyn_cast<CXXDependentScopeMemberExpr>(callee)) {
449 CheckCXXDependentScopeMemberExpr(call, expr);
450 return true;
451 }
452
453 // A tracing call will have either a |visitor| or a |m_field| argument.
454 // A registerWeakMembers call will have a |this| argument.
455 if (call->getNumArgs() != 1)
456 return true;
457 Expr* arg = call->getArg(0);
458
459 if (UnresolvedMemberExpr* expr = dyn_cast<UnresolvedMemberExpr>(callee)) {
460 // This could be a trace call of a base class, as explained in the
461 // comments of CheckTraceBaseCall().
462 if (CheckTraceBaseCall(call))
463 return true;
464
465 if (expr->getMemberName().getAsString() == kRegisterWeakMembersName)
466 MarkAllWeakMembersTraced();
467
468 QualType base = expr->getBaseType();
469 if (!base->isPointerType())
470 return true;
471 CXXRecordDecl* decl = base->getPointeeType()->getAsCXXRecordDecl();
472 if (decl)
473 CheckTraceFieldCall(expr->getMemberName().getAsString(), decl, arg);
474 if (Config::IsTraceImplName(expr->getMemberName().getAsString()))
475 delegates_to_traceimpl_ = true;
476 return true;
477 }
478
479 if (CXXMemberCallExpr* expr = dyn_cast<CXXMemberCallExpr>(call)) {
480 if (CheckTraceFieldCall(expr) || CheckRegisterWeakMembers(expr))
481 return true;
482
483 if (Config::IsTraceImplName(expr->getMethodDecl()->getNameAsString())) {
484 delegates_to_traceimpl_ = true;
485 return true;
486 }
487 }
488
489 CheckTraceBaseCall(call);
490 return true;
491 }
492
493 private:
494 bool IsTraceCallName(const std::string& name) {
495 if (trace_->getName() == kTraceImplName)
496 return name == kTraceName;
497 if (trace_->getName() == kTraceAfterDispatchImplName)
498 return name == kTraceAfterDispatchName;
499 // Currently, a manually dispatched class cannot have mixin bases (having
500 // one would add a vtable which we explicitly check against). This means
501 // that we can only make calls to a trace method of the same name. Revisit
502 // this if our mixin/vtable assumption changes.
503 return name == trace_->getName();
504 }
505
506 CXXRecordDecl* GetDependentTemplatedDecl(CXXDependentScopeMemberExpr* expr) {
507 NestedNameSpecifier* qual = expr->getQualifier();
508 if (!qual)
509 return 0;
510
511 const Type* type = qual->getAsType();
512 if (!type)
513 return 0;
514
515 return RecordInfo::GetDependentTemplatedDecl(*type);
516 }
517
518 void CheckCXXDependentScopeMemberExpr(CallExpr* call,
519 CXXDependentScopeMemberExpr* expr) {
520 string fn_name = expr->getMember().getAsString();
521
522 // Check for VisitorDispatcher::trace(field) and
523 // VisitorDispatcher::registerWeakMembers.
524 if (!expr->isImplicitAccess()) {
525 if (clang::DeclRefExpr* base_decl =
526 clang::dyn_cast<clang::DeclRefExpr>(expr->getBase())) {
527 if (Config::IsVisitorDispatcherType(base_decl->getType())) {
528 if (call->getNumArgs() == 1 && fn_name == kTraceName) {
529 FindFieldVisitor finder;
530 finder.TraverseStmt(call->getArg(0));
531 if (finder.field())
532 FoundField(finder.field());
533
534 return;
535 } else if (call->getNumArgs() == 1 &&
536 fn_name == kRegisterWeakMembersName) {
537 MarkAllWeakMembersTraced();
538 }
539 }
540 }
541 }
542
543 CXXRecordDecl* tmpl = GetDependentTemplatedDecl(expr);
544 if (!tmpl)
545 return;
546
547 // Check for Super<T>::trace(visitor)
548 if (call->getNumArgs() == 1 && IsTraceCallName(fn_name)) {
549 RecordInfo::Bases::iterator it = info_->GetBases().begin();
550 for (; it != info_->GetBases().end(); ++it) {
551 if (it->first->getName() == tmpl->getName())
552 it->second.MarkTraced();
553 }
554 }
555
556 // Check for TraceIfNeeded<T>::trace(visitor, &field)
557 if (call->getNumArgs() == 2 && fn_name == kTraceName &&
558 tmpl->getName() == kTraceIfNeededName) {
559 FindFieldVisitor finder;
560 finder.TraverseStmt(call->getArg(1));
561 if (finder.field())
562 FoundField(finder.field());
563 }
564 }
565
566 bool CheckTraceBaseCall(CallExpr* call) {
567 // Checks for "Base::trace(visitor)"-like calls.
568
569 // Checking code for these two variables is shared among MemberExpr* case
570 // and UnresolvedMemberCase* case below.
571 //
572 // For example, if we've got "Base::trace(visitor)" as |call|,
573 // callee_record will be "Base", and func_name will be "trace".
574 CXXRecordDecl* callee_record = nullptr;
575 std::string func_name;
576
577 if (MemberExpr* callee = dyn_cast<MemberExpr>(call->getCallee())) {
578 if (!callee->hasQualifier())
579 return false;
580
581 FunctionDecl* trace_decl =
582 dyn_cast<FunctionDecl>(callee->getMemberDecl());
583 if (!trace_decl || !Config::IsTraceMethod(trace_decl))
584 return false;
585
586 const Type* type = callee->getQualifier()->getAsType();
587 if (!type)
588 return false;
589
590 callee_record = type->getAsCXXRecordDecl();
591 func_name = trace_decl->getName();
592 } else if (UnresolvedMemberExpr* callee =
593 dyn_cast<UnresolvedMemberExpr>(call->getCallee())) {
594 // Callee part may become unresolved if the type of the argument
595 // ("visitor") is a template parameter and the called function is
596 // overloaded (i.e. trace(Visitor*) and
597 // trace(InlinedGlobalMarkingVisitor)).
598 //
599 // Here, we try to find a function that looks like trace() from the
600 // candidate overloaded functions, and if we find one, we assume it is
601 // called here.
602
603 CXXMethodDecl* trace_decl = nullptr;
604 for (NamedDecl* named_decl : callee->decls()) {
605 if (CXXMethodDecl* method_decl = dyn_cast<CXXMethodDecl>(named_decl)) {
606 if (Config::IsTraceMethod(method_decl)) {
607 trace_decl = method_decl;
608 break;
609 }
610 }
611 }
612 if (!trace_decl)
613 return false;
614
615 // Check if the passed argument is named "visitor".
616 if (call->getNumArgs() != 1)
617 return false;
618 DeclRefExpr* arg = dyn_cast<DeclRefExpr>(call->getArg(0));
619 if (!arg || arg->getNameInfo().getAsString() != kVisitorVarName)
620 return false;
621
622 callee_record = trace_decl->getParent();
623 func_name = callee->getMemberName().getAsString();
624 }
625
626 if (!callee_record)
627 return false;
628
629 if (!IsTraceCallName(func_name))
630 return false;
631
632 for (auto& base : info_->GetBases()) {
633 // We want to deal with omitted trace() function in an intermediary
634 // class in the class hierarchy, e.g.:
635 // class A : public GarbageCollected<A> { trace() { ... } };
636 // class B : public A { /* No trace(); have nothing to trace. */ };
637 // class C : public B { trace() { B::trace(visitor); } }
638 // where, B::trace() is actually A::trace(), and in some cases we get
639 // A as |callee_record| instead of B. We somehow need to mark B as
640 // traced if we find A::trace() call.
641 //
642 // To solve this, here we keep going up the class hierarchy as long as
643 // they are not required to have a trace method. The implementation is
644 // a simple DFS, where |base_records| represents the set of base classes
645 // we need to visit.
646
647 std::vector<CXXRecordDecl*> base_records;
648 base_records.push_back(base.first);
649
650 while (!base_records.empty()) {
651 CXXRecordDecl* base_record = base_records.back();
652 base_records.pop_back();
653
654 if (base_record == callee_record) {
655 // If we find a matching trace method, pretend the user has written
656 // a correct trace() method of the base; in the example above, we
657 // find A::trace() here and mark B as correctly traced.
658 base.second.MarkTraced();
659 return true;
660 }
661
662 if (RecordInfo* base_info = cache_->Lookup(base_record)) {
663 if (!base_info->RequiresTraceMethod()) {
664 // If this base class is not required to have a trace method, then
665 // the actual trace method may be defined in an ancestor.
666 for (auto& inner_base : base_info->GetBases())
667 base_records.push_back(inner_base.first);
668 }
669 }
670 }
671 }
672
673 return false;
674 }
675
676 bool CheckTraceFieldCall(CXXMemberCallExpr* call) {
677 return CheckTraceFieldCall(call->getMethodDecl()->getNameAsString(),
678 call->getRecordDecl(),
679 call->getArg(0));
680 }
681
682 bool CheckTraceFieldCall(string name, CXXRecordDecl* callee, Expr* arg) {
683 if (name != kTraceName || !Config::IsVisitor(callee->getName()))
684 return false;
685
686 FindFieldVisitor finder;
687 finder.TraverseStmt(arg);
688 if (finder.field())
689 FoundField(finder.field());
690
691 return true;
692 }
693
694 bool CheckRegisterWeakMembers(CXXMemberCallExpr* call) {
695 CXXMethodDecl* fn = call->getMethodDecl();
696 if (fn->getName() != kRegisterWeakMembersName)
697 return false;
698
699 if (fn->isTemplateInstantiation()) {
700 const TemplateArgumentList& args =
701 *fn->getTemplateSpecializationInfo()->TemplateArguments;
702 // The second template argument is the callback method.
703 if (args.size() > 1 &&
704 args[1].getKind() == TemplateArgument::Declaration) {
705 if (FunctionDecl* callback =
706 dyn_cast<FunctionDecl>(args[1].getAsDecl())) {
707 if (callback->hasBody()) {
708 CheckTraceVisitor nested_visitor(info_);
709 nested_visitor.TraverseStmt(callback->getBody());
710 }
711 }
712 }
713 }
714 return true;
715 }
716
717 class FindFieldVisitor : public RecursiveASTVisitor<FindFieldVisitor> {
718 public:
719 FindFieldVisitor() : member_(0), field_(0) {}
720 MemberExpr* member() const { return member_; }
721 FieldDecl* field() const { return field_; }
722 bool TraverseMemberExpr(MemberExpr* member) {
723 if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl())) {
724 member_ = member;
725 field_ = field;
726 return false;
727 }
728 return true;
729 }
730 private:
731 MemberExpr* member_;
732 FieldDecl* field_;
733 };
734
735 // Nested checking for weak callbacks.
736 CheckTraceVisitor(RecordInfo* info)
737 : trace_(nullptr), info_(info), cache_(nullptr) {}
738
739 bool IsWeakCallback() { return !trace_; }
740
741 void MarkTraced(RecordInfo::Fields::iterator it) {
742 // In a weak callback we can't mark strong fields as traced.
743 if (IsWeakCallback() && !it->second.edge()->IsWeakMember())
744 return;
745 it->second.MarkTraced();
746 }
747
748 void FoundField(FieldDecl* field) {
749 if (IsTemplateInstantiation(info_->record())) {
750 // Pointer equality on fields does not work for template instantiations.
751 // The trace method refers to fields of the template definition which
752 // are different from the instantiated fields that need to be traced.
753 const string& name = field->getNameAsString();
754 for (RecordInfo::Fields::iterator it = info_->GetFields().begin();
755 it != info_->GetFields().end();
756 ++it) {
757 if (it->first->getNameAsString() == name) {
758 MarkTraced(it);
759 break;
760 }
761 }
762 } else {
763 RecordInfo::Fields::iterator it = info_->GetFields().find(field);
764 if (it != info_->GetFields().end())
765 MarkTraced(it);
766 }
767 }
768
769 void MarkAllWeakMembersTraced() {
770 // If we find a call to registerWeakMembers which is unresolved we
771 // unsoundly consider all weak members as traced.
772 // TODO: Find out how to validate weak member tracing for unresolved call.
773 for (auto& field : info_->GetFields()) {
774 if (field.second.edge()->IsWeakMember())
775 field.second.MarkTraced();
776 }
777 }
778
779 CXXMethodDecl* trace_;
780 RecordInfo* info_;
781 RecordCache* cache_;
782 bool delegates_to_traceimpl_;
783 };
784
785 // This visitor checks that the fields of a class and the fields of
786 // its part objects don't define GC roots.
787 class CheckGCRootsVisitor : public RecursiveEdgeVisitor {
788 public:
789 typedef std::vector<FieldPoint*> RootPath;
790 typedef std::set<RecordInfo*> VisitingSet;
791 typedef std::vector<RootPath> Errors;
792
793 CheckGCRootsVisitor() {}
794
795 Errors& gc_roots() { return gc_roots_; }
796
797 bool ContainsGCRoots(RecordInfo* info) {
798 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
799 it != info->GetFields().end();
800 ++it) {
801 current_.push_back(&it->second);
802 it->second.edge()->Accept(this);
803 current_.pop_back();
804 }
805 return !gc_roots_.empty();
806 }
807
808 void VisitValue(Value* edge) override {
809 // TODO: what should we do to check unions?
810 if (edge->value()->record()->isUnion())
811 return;
812
813 // Prevent infinite regress for cyclic part objects.
814 if (visiting_set_.find(edge->value()) != visiting_set_.end())
815 return;
816
817 visiting_set_.insert(edge->value());
818 // If the value is a part object, then continue checking for roots.
819 for (Context::iterator it = context().begin();
820 it != context().end();
821 ++it) {
822 if (!(*it)->IsCollection())
823 return;
824 }
825 ContainsGCRoots(edge->value());
826 visiting_set_.erase(edge->value());
827 }
828
829 void VisitPersistent(Persistent* edge) override {
830 gc_roots_.push_back(current_);
831 }
832
833 void AtCollection(Collection* edge) override {
834 if (edge->is_root())
835 gc_roots_.push_back(current_);
836 }
837
838 protected:
839 RootPath current_;
840 VisitingSet visiting_set_;
841 Errors gc_roots_;
842 };
843
844 // This visitor checks that the fields of a class are "well formed".
845 // - OwnPtr, RefPtr and RawPtr must not point to a GC derived types.
846 // - Part objects must not be GC derived types.
847 // - An on-heap class must never contain GC roots.
848 // - Only stack-allocated types may point to stack-allocated types.
849 class CheckFieldsVisitor : public RecursiveEdgeVisitor {
850 public:
851
852 enum Error {
853 kRawPtrToGCManaged,
854 kRawPtrToGCManagedWarning,
855 kRefPtrToGCManaged,
856 kOwnPtrToGCManaged,
857 kMemberToGCUnmanaged,
858 kMemberInUnmanaged,
859 kPtrFromHeapToStack,
860 kGCDerivedPartObject
861 };
862
863 typedef std::vector<std::pair<FieldPoint*, Error> > Errors;
864
865 CheckFieldsVisitor(const BlinkGCPluginOptions& options)
866 : options_(options), current_(0), stack_allocated_host_(false) {}
867
868 Errors& invalid_fields() { return invalid_fields_; }
869
870 bool ContainsInvalidFields(RecordInfo* info) {
871 stack_allocated_host_ = info->IsStackAllocated();
872 managed_host_ = stack_allocated_host_ ||
873 info->IsGCAllocated() ||
874 info->IsNonNewable() ||
875 info->IsOnlyPlacementNewable();
876 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
877 it != info->GetFields().end();
878 ++it) {
879 context().clear();
880 current_ = &it->second;
881 current_->edge()->Accept(this);
882 }
883 return !invalid_fields_.empty();
884 }
885
886 void AtMember(Member* edge) override {
887 if (managed_host_)
888 return;
889 // A member is allowed to appear in the context of a root.
890 for (Context::iterator it = context().begin();
891 it != context().end();
892 ++it) {
893 if ((*it)->Kind() == Edge::kRoot)
894 return;
895 }
896 invalid_fields_.push_back(std::make_pair(current_, kMemberInUnmanaged));
897 }
898
899 void AtValue(Value* edge) override {
900 // TODO: what should we do to check unions?
901 if (edge->value()->record()->isUnion())
902 return;
903
904 if (!stack_allocated_host_ && edge->value()->IsStackAllocated()) {
905 invalid_fields_.push_back(std::make_pair(current_, kPtrFromHeapToStack));
906 return;
907 }
908
909 if (!Parent() &&
910 edge->value()->IsGCDerived() &&
911 !edge->value()->IsGCMixin()) {
912 invalid_fields_.push_back(std::make_pair(current_, kGCDerivedPartObject));
913 return;
914 }
915
916 // If in a stack allocated context, be fairly insistent that T in Member<T>
917 // is GC allocated, as stack allocated objects do not have a trace()
918 // that separately verifies the validity of Member<T>.
919 //
920 // Notice that an error is only reported if T's definition is in scope;
921 // we do not require that it must be brought into scope as that would
922 // prevent declarations of mutually dependent class types.
923 //
924 // (Note: Member<>'s constructor will at run-time verify that the
925 // pointer it wraps is indeed heap allocated.)
926 if (stack_allocated_host_ && Parent() && Parent()->IsMember() &&
927 edge->value()->HasDefinition() && !edge->value()->IsGCAllocated()) {
928 invalid_fields_.push_back(std::make_pair(current_,
929 kMemberToGCUnmanaged));
930 return;
931 }
932
933 if (!Parent() || !edge->value()->IsGCAllocated())
934 return;
935
936 // In transition mode, disallow OwnPtr<T>, RawPtr<T> to GC allocated T's,
937 // also disallow T* in stack-allocated types.
938 if (options_.enable_oilpan) {
939 if (Parent()->IsOwnPtr() ||
940 Parent()->IsRawPtrClass() ||
941 (stack_allocated_host_ && Parent()->IsRawPtr())) {
942 invalid_fields_.push_back(std::make_pair(
943 current_, InvalidSmartPtr(Parent())));
944 return;
945 }
946 if (options_.warn_raw_ptr && Parent()->IsRawPtr()) {
947 invalid_fields_.push_back(std::make_pair(
948 current_, kRawPtrToGCManagedWarning));
949 }
950 return;
951 }
952
953 if (Parent()->IsRawPtr() || Parent()->IsRefPtr() || Parent()->IsOwnPtr()) {
954 invalid_fields_.push_back(std::make_pair(
955 current_, InvalidSmartPtr(Parent())));
956 return;
957 }
958 }
959
960 void AtCollection(Collection* edge) override {
961 if (edge->on_heap() && Parent() && Parent()->IsOwnPtr())
962 invalid_fields_.push_back(std::make_pair(current_, kOwnPtrToGCManaged));
963 }
964
965 private:
966 Error InvalidSmartPtr(Edge* ptr) {
967 if (ptr->IsRawPtr())
968 return kRawPtrToGCManaged;
969 if (ptr->IsRefPtr())
970 return kRefPtrToGCManaged;
971 if (ptr->IsOwnPtr())
972 return kOwnPtrToGCManaged;
973 assert(false && "Unknown smart pointer kind");
974 }
975
976 const BlinkGCPluginOptions& options_;
977 FieldPoint* current_;
978 bool stack_allocated_host_;
979 bool managed_host_;
980 Errors invalid_fields_;
981 };
982
983 class EmptyStmtVisitor
984 : public RecursiveASTVisitor<EmptyStmtVisitor> {
985 public:
986 static bool isEmpty(Stmt* stmt) {
987 EmptyStmtVisitor visitor;
988 visitor.TraverseStmt(stmt);
989 return visitor.empty_;
990 }
991
992 bool WalkUpFromCompoundStmt(CompoundStmt* stmt) {
993 empty_ = stmt->body_empty();
994 return false;
995 }
996 bool VisitStmt(Stmt*) {
997 empty_ = false;
998 return false;
999 }
1000 private:
1001 EmptyStmtVisitor() : empty_(true) {}
1002 bool empty_;
1003 };
1004
1005 // Main class containing checks for various invariants of the Blink
1006 // garbage collection infrastructure.
1007 class BlinkGCPluginConsumer : public ASTConsumer {
1008 public:
1009 BlinkGCPluginConsumer(CompilerInstance& instance,
1010 const BlinkGCPluginOptions& options)
1011 : instance_(instance),
1012 diagnostic_(instance.getDiagnostics()),
1013 options_(options),
1014 json_(0) {
1015
1016 // Only check structures in the blink and WebKit namespaces.
1017 options_.checked_namespaces.insert("blink");
1018 options_.checked_namespaces.insert("WebKit");
1019
1020 // Ignore GC implementation files.
1021 options_.ignored_directories.push_back("/heap/");
1022
1023 // Register warning/error messages.
1024 diag_class_must_left_mostly_derive_gc_ = diagnostic_.getCustomDiagID(
1025 getErrorLevel(), kClassMustLeftMostlyDeriveGC);
1026 diag_class_requires_trace_method_ =
1027 diagnostic_.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod);
1028 diag_base_requires_tracing_ =
1029 diagnostic_.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing);
1030 diag_fields_require_tracing_ =
1031 diagnostic_.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing);
1032 diag_class_contains_invalid_fields_ = diagnostic_.getCustomDiagID(
1033 getErrorLevel(), kClassContainsInvalidFields);
1034 diag_class_contains_invalid_fields_warning_ = diagnostic_.getCustomDiagID(
1035 DiagnosticsEngine::Warning, kClassContainsInvalidFields);
1036 diag_class_contains_gc_root_ =
1037 diagnostic_.getCustomDiagID(getErrorLevel(), kClassContainsGCRoot);
1038 diag_class_requires_finalization_ = diagnostic_.getCustomDiagID(
1039 getErrorLevel(), kClassRequiresFinalization);
1040 diag_class_does_not_require_finalization_ = diagnostic_.getCustomDiagID(
1041 DiagnosticsEngine::Warning, kClassDoesNotRequireFinalization);
1042 diag_finalizer_accesses_finalized_field_ = diagnostic_.getCustomDiagID(
1043 getErrorLevel(), kFinalizerAccessesFinalizedField);
1044 diag_finalizer_eagerly_finalized_field_ = diagnostic_.getCustomDiagID(
1045 getErrorLevel(), kFinalizerAccessesEagerlyFinalizedField);
1046 diag_overridden_non_virtual_trace_ = diagnostic_.getCustomDiagID(
1047 getErrorLevel(), kOverriddenNonVirtualTrace);
1048 diag_missing_trace_dispatch_method_ = diagnostic_.getCustomDiagID(
1049 getErrorLevel(), kMissingTraceDispatchMethod);
1050 diag_missing_finalize_dispatch_method_ = diagnostic_.getCustomDiagID(
1051 getErrorLevel(), kMissingFinalizeDispatchMethod);
1052 diag_virtual_and_manual_dispatch_ =
1053 diagnostic_.getCustomDiagID(getErrorLevel(), kVirtualAndManualDispatch);
1054 diag_missing_trace_dispatch_ =
1055 diagnostic_.getCustomDiagID(getErrorLevel(), kMissingTraceDispatch);
1056 diag_missing_finalize_dispatch_ =
1057 diagnostic_.getCustomDiagID(getErrorLevel(), kMissingFinalizeDispatch);
1058 diag_derives_non_stack_allocated_ =
1059 diagnostic_.getCustomDiagID(getErrorLevel(), kDerivesNonStackAllocated);
1060 diag_class_overrides_new_ =
1061 diagnostic_.getCustomDiagID(getErrorLevel(), kClassOverridesNew);
1062 diag_class_declares_pure_virtual_trace_ = diagnostic_.getCustomDiagID(
1063 getErrorLevel(), kClassDeclaresPureVirtualTrace);
1064 diag_left_most_base_must_be_polymorphic_ = diagnostic_.getCustomDiagID(
1065 getErrorLevel(), kLeftMostBaseMustBePolymorphic);
1066 diag_base_class_must_declare_virtual_trace_ = diagnostic_.getCustomDiagID(
1067 getErrorLevel(), kBaseClassMustDeclareVirtualTrace);
1068 diag_class_must_declare_gc_mixin_trace_method_ =
1069 diagnostic_.getCustomDiagID(getErrorLevel(),
1070 kClassMustDeclareGCMixinTraceMethod);
1071
1072 // Register note messages.
1073 diag_base_requires_tracing_note_ = diagnostic_.getCustomDiagID(
1074 DiagnosticsEngine::Note, kBaseRequiresTracingNote);
1075 diag_field_requires_tracing_note_ = diagnostic_.getCustomDiagID(
1076 DiagnosticsEngine::Note, kFieldRequiresTracingNote);
1077 diag_raw_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
1078 DiagnosticsEngine::Note, kRawPtrToGCManagedClassNote);
1079 diag_ref_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
1080 DiagnosticsEngine::Note, kRefPtrToGCManagedClassNote);
1081 diag_own_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
1082 DiagnosticsEngine::Note, kOwnPtrToGCManagedClassNote);
1083 diag_member_to_gc_unmanaged_class_note_ = diagnostic_.getCustomDiagID(
1084 DiagnosticsEngine::Note, kMemberToGCUnmanagedClassNote);
1085 diag_stack_allocated_field_note_ = diagnostic_.getCustomDiagID(
1086 DiagnosticsEngine::Note, kStackAllocatedFieldNote);
1087 diag_member_in_unmanaged_class_note_ = diagnostic_.getCustomDiagID(
1088 DiagnosticsEngine::Note, kMemberInUnmanagedClassNote);
1089 diag_part_object_to_gc_derived_class_note_ = diagnostic_.getCustomDiagID(
1090 DiagnosticsEngine::Note, kPartObjectToGCDerivedClassNote);
1091 diag_part_object_contains_gc_root_note_ = diagnostic_.getCustomDiagID(
1092 DiagnosticsEngine::Note, kPartObjectContainsGCRootNote);
1093 diag_field_contains_gc_root_note_ = diagnostic_.getCustomDiagID(
1094 DiagnosticsEngine::Note, kFieldContainsGCRootNote);
1095 diag_finalized_field_note_ = diagnostic_.getCustomDiagID(
1096 DiagnosticsEngine::Note, kFinalizedFieldNote);
1097 diag_eagerly_finalized_field_note_ = diagnostic_.getCustomDiagID(
1098 DiagnosticsEngine::Note, kEagerlyFinalizedFieldNote);
1099 diag_user_declared_destructor_note_ = diagnostic_.getCustomDiagID(
1100 DiagnosticsEngine::Note, kUserDeclaredDestructorNote);
1101 diag_user_declared_finalizer_note_ = diagnostic_.getCustomDiagID(
1102 DiagnosticsEngine::Note, kUserDeclaredFinalizerNote);
1103 diag_base_requires_finalization_note_ = diagnostic_.getCustomDiagID(
1104 DiagnosticsEngine::Note, kBaseRequiresFinalizationNote);
1105 diag_field_requires_finalization_note_ = diagnostic_.getCustomDiagID(
1106 DiagnosticsEngine::Note, kFieldRequiresFinalizationNote);
1107 diag_overridden_non_virtual_trace_note_ = diagnostic_.getCustomDiagID(
1108 DiagnosticsEngine::Note, kOverriddenNonVirtualTraceNote);
1109 diag_manual_dispatch_method_note_ = diagnostic_.getCustomDiagID(
1110 DiagnosticsEngine::Note, kManualDispatchMethodNote);
1111 }
1112
1113 void HandleTranslationUnit(ASTContext& context) override {
1114 // Don't run the plugin if the compilation unit is already invalid.
1115 if (diagnostic_.hasErrorOccurred())
1116 return;
1117
1118 ParseFunctionTemplates(context.getTranslationUnitDecl());
1119
1120 CollectVisitor visitor;
1121 visitor.TraverseDecl(context.getTranslationUnitDecl());
1122
1123 if (options_.dump_graph) {
1124 std::error_code err;
1125 // TODO: Make createDefaultOutputFile or a shorter createOutputFile work.
1126 json_ = JsonWriter::from(instance_.createOutputFile(
1127 "", // OutputPath
1128 err, // Errors
1129 true, // Binary
1130 true, // RemoveFileOnSignal
1131 instance_.getFrontendOpts().OutputFile, // BaseInput
1132 "graph.json", // Extension
1133 false, // UseTemporary
1134 false, // CreateMissingDirectories
1135 0, // ResultPathName
1136 0)); // TempPathName
1137 if (!err && json_) {
1138 json_->OpenList();
1139 } else {
1140 json_ = 0;
1141 llvm::errs()
1142 << "[blink-gc] "
1143 << "Failed to create an output file for the object graph.\n";
1144 }
1145 }
1146
1147 for (RecordVector::iterator it = visitor.record_decls().begin();
1148 it != visitor.record_decls().end();
1149 ++it) {
1150 CheckRecord(cache_.Lookup(*it));
1151 }
1152
1153 for (MethodVector::iterator it = visitor.trace_decls().begin();
1154 it != visitor.trace_decls().end();
1155 ++it) {
1156 CheckTracingMethod(*it);
1157 }
1158
1159 if (json_) {
1160 json_->CloseList();
1161 delete json_;
1162 json_ = 0;
1163 }
1164 }
1165
1166 void ParseFunctionTemplates(TranslationUnitDecl* decl) {
1167 if (!instance_.getLangOpts().DelayedTemplateParsing)
1168 return; // Nothing to do.
1169
1170 std::set<FunctionDecl*> late_parsed_decls =
1171 GetLateParsedFunctionDecls(decl);
1172 clang::Sema& sema = instance_.getSema();
1173
1174 for (const FunctionDecl* fd : late_parsed_decls) {
1175 assert(fd->isLateTemplateParsed());
1176
1177 if (!Config::IsTraceMethod(fd))
1178 continue;
1179
1180 if (instance_.getSourceManager().isInSystemHeader(
1181 instance_.getSourceManager().getSpellingLoc(fd->getLocation())))
1182 continue;
1183
1184 // Force parsing and AST building of the yet-uninstantiated function
1185 // template trace method bodies.
1186 clang::LateParsedTemplate* lpt = sema.LateParsedTemplateMap[fd];
1187 sema.LateTemplateParser(sema.OpaqueParser, *lpt);
1188 }
1189 }
1190
1191 // Main entry for checking a record declaration.
1192 void CheckRecord(RecordInfo* info) {
1193 if (IsIgnored(info))
1194 return;
1195
1196 CXXRecordDecl* record = info->record();
1197
1198 // TODO: what should we do to check unions?
1199 if (record->isUnion())
1200 return;
1201
1202 // If this is the primary template declaration, check its specializations.
1203 if (record->isThisDeclarationADefinition() &&
1204 record->getDescribedClassTemplate()) {
1205 ClassTemplateDecl* tmpl = record->getDescribedClassTemplate();
1206 for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin();
1207 it != tmpl->spec_end();
1208 ++it) {
1209 CheckClass(cache_.Lookup(*it));
1210 }
1211 return;
1212 }
1213
1214 CheckClass(info);
1215 }
1216
1217 // Check a class-like object (eg, class, specialization, instantiation).
1218 void CheckClass(RecordInfo* info) {
1219 if (!info)
1220 return;
1221
1222 // Check consistency of stack-allocated hierarchies.
1223 if (info->IsStackAllocated()) {
1224 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
1225 it != info->GetBases().end();
1226 ++it) {
1227 if (!it->second.info()->IsStackAllocated())
1228 ReportDerivesNonStackAllocated(info, &it->second);
1229 }
1230 }
1231
1232 if (CXXMethodDecl* trace = info->GetTraceMethod()) {
1233 if (trace->isPure())
1234 ReportClassDeclaresPureVirtualTrace(info, trace);
1235 } else if (info->RequiresTraceMethod()) {
1236 ReportClassRequiresTraceMethod(info);
1237 }
1238
1239 // Check polymorphic classes that are GC-derived or have a trace method.
1240 if (info->record()->hasDefinition() && info->record()->isPolymorphic()) {
1241 // TODO: Check classes that inherit a trace method.
1242 CXXMethodDecl* trace = info->GetTraceMethod();
1243 if (trace || info->IsGCDerived())
1244 CheckPolymorphicClass(info, trace);
1245 }
1246
1247 {
1248 CheckFieldsVisitor visitor(options_);
1249 if (visitor.ContainsInvalidFields(info))
1250 ReportClassContainsInvalidFields(info, &visitor.invalid_fields());
1251 }
1252
1253 if (info->IsGCDerived()) {
1254
1255 if (!info->IsGCMixin()) {
1256 CheckLeftMostDerived(info);
1257 CheckDispatch(info);
1258 if (CXXMethodDecl* newop = info->DeclaresNewOperator())
1259 if (!Config::IsIgnoreAnnotated(newop))
1260 ReportClassOverridesNew(info, newop);
1261 if (info->IsGCMixinInstance()) {
1262 // Require that declared GCMixin implementations
1263 // also provide a trace() override.
1264 if (info->DeclaresGCMixinMethods()
1265 && !info->DeclaresLocalTraceMethod())
1266 ReportClassMustDeclareGCMixinTraceMethod(info);
1267 }
1268 }
1269
1270 {
1271 CheckGCRootsVisitor visitor;
1272 if (visitor.ContainsGCRoots(info))
1273 ReportClassContainsGCRoots(info, &visitor.gc_roots());
1274 }
1275
1276 if (info->NeedsFinalization())
1277 CheckFinalization(info);
1278
1279 if (options_.warn_unneeded_finalizer && info->IsGCFinalized())
1280 CheckUnneededFinalization(info);
1281 }
1282
1283 DumpClass(info);
1284 }
1285
1286 CXXRecordDecl* GetDependentTemplatedDecl(const Type& type) {
1287 const TemplateSpecializationType* tmpl_type =
1288 type.getAs<TemplateSpecializationType>();
1289 if (!tmpl_type)
1290 return 0;
1291
1292 TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
1293 if (!tmpl_decl)
1294 return 0;
1295
1296 return dyn_cast<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
1297 }
1298
1299 // The GC infrastructure assumes that if the vtable of a polymorphic
1300 // base-class is not initialized for a given object (ie, it is partially
1301 // initialized) then the object does not need to be traced. Thus, we must
1302 // ensure that any polymorphic class with a trace method does not have any
1303 // tractable fields that are initialized before we are sure that the vtable
1304 // and the trace method are both defined. There are two cases that need to
1305 // hold to satisfy that assumption:
1306 //
1307 // 1. If trace is virtual, then it must be defined in the left-most base.
1308 // This ensures that if the vtable is initialized then it contains a pointer
1309 // to the trace method.
1310 //
1311 // 2. If trace is non-virtual, then the trace method is defined and we must
1312 // ensure that the left-most base defines a vtable. This ensures that the
1313 // first thing to be initialized when constructing the object is the vtable
1314 // itself.
1315 void CheckPolymorphicClass(RecordInfo* info, CXXMethodDecl* trace) {
1316 CXXRecordDecl* left_most = info->record();
1317 CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
1318 CXXRecordDecl* left_most_base = 0;
1319 while (it != left_most->bases_end()) {
1320 left_most_base = it->getType()->getAsCXXRecordDecl();
1321 if (!left_most_base && it->getType()->isDependentType())
1322 left_most_base = RecordInfo::GetDependentTemplatedDecl(*it->getType());
1323
1324 // TODO: Find a way to correctly check actual instantiations
1325 // for dependent types. The escape below will be hit, eg, when
1326 // we have a primary template with no definition and
1327 // specializations for each case (such as SupplementBase) in
1328 // which case we don't succeed in checking the required
1329 // properties.
1330 if (!left_most_base || !left_most_base->hasDefinition())
1331 return;
1332
1333 StringRef name = left_most_base->getName();
1334 // We know GCMixin base defines virtual trace.
1335 if (Config::IsGCMixinBase(name))
1336 return;
1337
1338 // Stop with the left-most prior to a safe polymorphic base (a safe base
1339 // is non-polymorphic and contains no fields).
1340 if (Config::IsSafePolymorphicBase(name))
1341 break;
1342
1343 left_most = left_most_base;
1344 it = left_most->bases_begin();
1345 }
1346
1347 if (RecordInfo* left_most_info = cache_.Lookup(left_most)) {
1348
1349 // Check condition (1):
1350 if (trace && trace->isVirtual()) {
1351 if (CXXMethodDecl* trace = left_most_info->GetTraceMethod()) {
1352 if (trace->isVirtual())
1353 return;
1354 }
1355 ReportBaseClassMustDeclareVirtualTrace(info, left_most);
1356 return;
1357 }
1358
1359 // Check condition (2):
1360 if (DeclaresVirtualMethods(left_most))
1361 return;
1362 if (left_most_base) {
1363 // Get the base next to the "safe polymorphic base"
1364 if (it != left_most->bases_end())
1365 ++it;
1366 if (it != left_most->bases_end()) {
1367 if (CXXRecordDecl* next_base = it->getType()->getAsCXXRecordDecl()) {
1368 if (CXXRecordDecl* next_left_most = GetLeftMostBase(next_base)) {
1369 if (DeclaresVirtualMethods(next_left_most))
1370 return;
1371 ReportLeftMostBaseMustBePolymorphic(info, next_left_most);
1372 return;
1373 }
1374 }
1375 }
1376 }
1377 ReportLeftMostBaseMustBePolymorphic(info, left_most);
1378 }
1379 }
1380
1381 CXXRecordDecl* GetLeftMostBase(CXXRecordDecl* left_most) {
1382 CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
1383 while (it != left_most->bases_end()) {
1384 if (it->getType()->isDependentType())
1385 left_most = RecordInfo::GetDependentTemplatedDecl(*it->getType());
1386 else
1387 left_most = it->getType()->getAsCXXRecordDecl();
1388 if (!left_most || !left_most->hasDefinition())
1389 return 0;
1390 it = left_most->bases_begin();
1391 }
1392 return left_most;
1393 }
1394
1395 bool DeclaresVirtualMethods(CXXRecordDecl* decl) {
1396 CXXRecordDecl::method_iterator it = decl->method_begin();
1397 for (; it != decl->method_end(); ++it)
1398 if (it->isVirtual() && !it->isPure())
1399 return true;
1400 return false;
1401 }
1402
1403 void CheckLeftMostDerived(RecordInfo* info) {
1404 CXXRecordDecl* left_most = GetLeftMostBase(info->record());
1405 if (!left_most)
1406 return;
1407 if (!Config::IsGCBase(left_most->getName()))
1408 ReportClassMustLeftMostlyDeriveGC(info);
1409 }
1410
1411 void CheckDispatch(RecordInfo* info) {
1412 bool finalized = info->IsGCFinalized();
1413 CXXMethodDecl* trace_dispatch = info->GetTraceDispatchMethod();
1414 CXXMethodDecl* finalize_dispatch = info->GetFinalizeDispatchMethod();
1415 if (!trace_dispatch && !finalize_dispatch)
1416 return;
1417
1418 CXXRecordDecl* base = trace_dispatch ? trace_dispatch->getParent()
1419 : finalize_dispatch->getParent();
1420
1421 // Check that dispatch methods are defined at the base.
1422 if (base == info->record()) {
1423 if (!trace_dispatch)
1424 ReportMissingTraceDispatchMethod(info);
1425 if (finalized && !finalize_dispatch)
1426 ReportMissingFinalizeDispatchMethod(info);
1427 if (!finalized && finalize_dispatch) {
1428 ReportClassRequiresFinalization(info);
1429 NoteUserDeclaredFinalizer(finalize_dispatch);
1430 }
1431 }
1432
1433 // Check that classes implementing manual dispatch do not have vtables.
1434 if (info->record()->isPolymorphic())
1435 ReportVirtualAndManualDispatch(
1436 info, trace_dispatch ? trace_dispatch : finalize_dispatch);
1437
1438 // If this is a non-abstract class check that it is dispatched to.
1439 // TODO: Create a global variant of this local check. We can only check if
1440 // the dispatch body is known in this compilation unit.
1441 if (info->IsConsideredAbstract())
1442 return;
1443
1444 const FunctionDecl* defn;
1445
1446 if (trace_dispatch && trace_dispatch->isDefined(defn)) {
1447 CheckDispatchVisitor visitor(info);
1448 visitor.TraverseStmt(defn->getBody());
1449 if (!visitor.dispatched_to_receiver())
1450 ReportMissingTraceDispatch(defn, info);
1451 }
1452
1453 if (finalized && finalize_dispatch && finalize_dispatch->isDefined(defn)) {
1454 CheckDispatchVisitor visitor(info);
1455 visitor.TraverseStmt(defn->getBody());
1456 if (!visitor.dispatched_to_receiver())
1457 ReportMissingFinalizeDispatch(defn, info);
1458 }
1459 }
1460
1461 // TODO: Should we collect destructors similar to trace methods?
1462 void CheckFinalization(RecordInfo* info) {
1463 CXXDestructorDecl* dtor = info->record()->getDestructor();
1464
1465 // For finalized classes, check the finalization method if possible.
1466 if (info->IsGCFinalized()) {
1467 if (dtor && dtor->hasBody()) {
1468 CheckFinalizerVisitor visitor(&cache_, info->IsEagerlyFinalized());
1469 visitor.TraverseCXXMethodDecl(dtor);
1470 if (!visitor.finalized_fields().empty()) {
1471 ReportFinalizerAccessesFinalizedFields(
1472 dtor, &visitor.finalized_fields());
1473 }
1474 }
1475 return;
1476 }
1477
1478 // Don't require finalization of a mixin that has not yet been "mixed in".
1479 if (info->IsGCMixin())
1480 return;
1481
1482 // Report the finalization error, and proceed to print possible causes for
1483 // the finalization requirement.
1484 ReportClassRequiresFinalization(info);
1485
1486 if (dtor && dtor->isUserProvided())
1487 NoteUserDeclaredDestructor(dtor);
1488
1489 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
1490 it != info->GetBases().end();
1491 ++it) {
1492 if (it->second.info()->NeedsFinalization())
1493 NoteBaseRequiresFinalization(&it->second);
1494 }
1495
1496 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1497 it != info->GetFields().end();
1498 ++it) {
1499 if (it->second.edge()->NeedsFinalization())
1500 NoteField(&it->second, diag_field_requires_finalization_note_);
1501 }
1502 }
1503
1504 void CheckUnneededFinalization(RecordInfo* info) {
1505 if (!HasNonEmptyFinalizer(info))
1506 ReportClassDoesNotRequireFinalization(info);
1507 }
1508
1509 bool HasNonEmptyFinalizer(RecordInfo* info) {
1510 CXXDestructorDecl* dtor = info->record()->getDestructor();
1511 if (dtor && dtor->isUserProvided()) {
1512 if (!dtor->hasBody() || !EmptyStmtVisitor::isEmpty(dtor->getBody()))
1513 return true;
1514 }
1515 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
1516 it != info->GetBases().end();
1517 ++it) {
1518 if (HasNonEmptyFinalizer(it->second.info()))
1519 return true;
1520 }
1521 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1522 it != info->GetFields().end();
1523 ++it) {
1524 if (it->second.edge()->NeedsFinalization())
1525 return true;
1526 }
1527 return false;
1528 }
1529
1530 // This is the main entry for tracing method definitions.
1531 void CheckTracingMethod(CXXMethodDecl* method) {
1532 RecordInfo* parent = cache_.Lookup(method->getParent());
1533 if (IsIgnored(parent))
1534 return;
1535
1536 // Check templated tracing methods by checking the template instantiations.
1537 // Specialized templates are handled as ordinary classes.
1538 if (ClassTemplateDecl* tmpl =
1539 parent->record()->getDescribedClassTemplate()) {
1540 for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin();
1541 it != tmpl->spec_end();
1542 ++it) {
1543 // Check trace using each template instantiation as the holder.
1544 if (IsTemplateInstantiation(*it))
1545 CheckTraceOrDispatchMethod(cache_.Lookup(*it), method);
1546 }
1547 return;
1548 }
1549
1550 CheckTraceOrDispatchMethod(parent, method);
1551 }
1552
1553 // Determine what type of tracing method this is (dispatch or trace).
1554 void CheckTraceOrDispatchMethod(RecordInfo* parent, CXXMethodDecl* method) {
1555 Config::TraceMethodType trace_type = Config::GetTraceMethodType(method);
1556 if (trace_type == Config::TRACE_AFTER_DISPATCH_METHOD ||
1557 trace_type == Config::TRACE_AFTER_DISPATCH_IMPL_METHOD ||
1558 !parent->GetTraceDispatchMethod()) {
1559 CheckTraceMethod(parent, method, trace_type);
1560 }
1561 // Dispatch methods are checked when we identify subclasses.
1562 }
1563
1564 // Check an actual trace method.
1565 void CheckTraceMethod(RecordInfo* parent,
1566 CXXMethodDecl* trace,
1567 Config::TraceMethodType trace_type) {
1568 // A trace method must not override any non-virtual trace methods.
1569 if (trace_type == Config::TRACE_METHOD) {
1570 for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
1571 it != parent->GetBases().end();
1572 ++it) {
1573 RecordInfo* base = it->second.info();
1574 if (CXXMethodDecl* other = base->InheritsNonVirtualTrace())
1575 ReportOverriddenNonVirtualTrace(parent, trace, other);
1576 }
1577 }
1578
1579 CheckTraceVisitor visitor(trace, parent, &cache_);
1580 visitor.TraverseCXXMethodDecl(trace);
1581
1582 // Skip reporting if this trace method is a just delegate to
1583 // traceImpl (or traceAfterDispatchImpl) method. We will report on
1584 // CheckTraceMethod on traceImpl method.
1585 if (visitor.delegates_to_traceimpl())
1586 return;
1587
1588 for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
1589 it != parent->GetBases().end();
1590 ++it) {
1591 if (!it->second.IsProperlyTraced())
1592 ReportBaseRequiresTracing(parent, trace, it->first);
1593 }
1594
1595 for (RecordInfo::Fields::iterator it = parent->GetFields().begin();
1596 it != parent->GetFields().end();
1597 ++it) {
1598 if (!it->second.IsProperlyTraced()) {
1599 // Discontinue once an untraced-field error is found.
1600 ReportFieldsRequireTracing(parent, trace);
1601 break;
1602 }
1603 }
1604 }
1605
1606 void DumpClass(RecordInfo* info) {
1607 if (!json_)
1608 return;
1609
1610 json_->OpenObject();
1611 json_->Write("name", info->record()->getQualifiedNameAsString());
1612 json_->Write("loc", GetLocString(info->record()->getLocStart()));
1613 json_->CloseObject();
1614
1615 class DumpEdgeVisitor : public RecursiveEdgeVisitor {
1616 public:
1617 DumpEdgeVisitor(JsonWriter* json) : json_(json) {}
1618 void DumpEdge(RecordInfo* src,
1619 RecordInfo* dst,
1620 const string& lbl,
1621 const Edge::LivenessKind& kind,
1622 const string& loc) {
1623 json_->OpenObject();
1624 json_->Write("src", src->record()->getQualifiedNameAsString());
1625 json_->Write("dst", dst->record()->getQualifiedNameAsString());
1626 json_->Write("lbl", lbl);
1627 json_->Write("kind", kind);
1628 json_->Write("loc", loc);
1629 json_->Write("ptr",
1630 !Parent() ? "val" :
1631 Parent()->IsRawPtr() ? "raw" :
1632 Parent()->IsRefPtr() ? "ref" :
1633 Parent()->IsOwnPtr() ? "own" :
1634 (Parent()->IsMember() ||
1635 Parent()->IsWeakMember()) ? "mem" :
1636 "val");
1637 json_->CloseObject();
1638 }
1639
1640 void DumpField(RecordInfo* src, FieldPoint* point, const string& loc) {
1641 src_ = src;
1642 point_ = point;
1643 loc_ = loc;
1644 point_->edge()->Accept(this);
1645 }
1646
1647 void AtValue(Value* e) override {
1648 // The liveness kind of a path from the point to this value
1649 // is given by the innermost place that is non-strong.
1650 Edge::LivenessKind kind = Edge::kStrong;
1651 if (Config::IsIgnoreCycleAnnotated(point_->field())) {
1652 kind = Edge::kWeak;
1653 } else {
1654 for (Context::iterator it = context().begin();
1655 it != context().end();
1656 ++it) {
1657 Edge::LivenessKind pointer_kind = (*it)->Kind();
1658 if (pointer_kind != Edge::kStrong) {
1659 kind = pointer_kind;
1660 break;
1661 }
1662 }
1663 }
1664 DumpEdge(
1665 src_, e->value(), point_->field()->getNameAsString(), kind, loc_);
1666 }
1667
1668 private:
1669 JsonWriter* json_;
1670 RecordInfo* src_;
1671 FieldPoint* point_;
1672 string loc_;
1673 };
1674
1675 DumpEdgeVisitor visitor(json_);
1676
1677 RecordInfo::Bases& bases = info->GetBases();
1678 for (RecordInfo::Bases::iterator it = bases.begin();
1679 it != bases.end();
1680 ++it) {
1681 visitor.DumpEdge(info,
1682 it->second.info(),
1683 "<super>",
1684 Edge::kStrong,
1685 GetLocString(it->second.spec().getLocStart()));
1686 }
1687
1688 RecordInfo::Fields& fields = info->GetFields();
1689 for (RecordInfo::Fields::iterator it = fields.begin();
1690 it != fields.end();
1691 ++it) {
1692 visitor.DumpField(info,
1693 &it->second,
1694 GetLocString(it->second.field()->getLocStart()));
1695 }
1696 }
1697
1698 // Adds either a warning or error, based on the current handling of -Werror.
1699 DiagnosticsEngine::Level getErrorLevel() {
1700 return diagnostic_.getWarningsAsErrors() ? DiagnosticsEngine::Error
1701 : DiagnosticsEngine::Warning;
1702 }
1703
1704 const string GetLocString(SourceLocation loc) {
1705 const SourceManager& source_manager = instance_.getSourceManager();
1706 PresumedLoc ploc = source_manager.getPresumedLoc(loc);
1707 if (ploc.isInvalid())
1708 return "";
1709 string loc_str;
1710 llvm::raw_string_ostream OS(loc_str);
1711 OS << ploc.getFilename()
1712 << ":" << ploc.getLine()
1713 << ":" << ploc.getColumn();
1714 return OS.str();
1715 }
1716
1717 bool IsIgnored(RecordInfo* record) {
1718 return !record ||
1719 !InCheckedNamespace(record) ||
1720 IsIgnoredClass(record) ||
1721 InIgnoredDirectory(record);
1722 }
1723
1724 bool IsIgnoredClass(RecordInfo* info) {
1725 // Ignore any class prefixed by SameSizeAs. These are used in
1726 // Blink to verify class sizes and don't need checking.
1727 const string SameSizeAs = "SameSizeAs";
1728 if (info->name().compare(0, SameSizeAs.size(), SameSizeAs) == 0)
1729 return true;
1730 return options_.ignored_classes.find(info->name()) !=
1731 options_.ignored_classes.end();
1732 }
1733
1734 bool InIgnoredDirectory(RecordInfo* info) {
1735 string filename;
1736 if (!GetFilename(info->record()->getLocStart(), &filename))
1737 return false; // TODO: should we ignore non-existing file locations?
1738 #if defined(LLVM_ON_WIN32)
1739 std::replace(filename.begin(), filename.end(), '\\', '/');
1740 #endif
1741 std::vector<string>::iterator it = options_.ignored_directories.begin();
1742 for (; it != options_.ignored_directories.end(); ++it)
1743 if (filename.find(*it) != string::npos)
1744 return true;
1745 return false;
1746 }
1747
1748 bool InCheckedNamespace(RecordInfo* info) {
1749 if (!info)
1750 return false;
1751 for (DeclContext* context = info->record()->getDeclContext();
1752 !context->isTranslationUnit();
1753 context = context->getParent()) {
1754 if (NamespaceDecl* decl = dyn_cast<NamespaceDecl>(context)) {
1755 if (options_.checked_namespaces.find(decl->getNameAsString()) !=
1756 options_.checked_namespaces.end()) {
1757 return true;
1758 }
1759 }
1760 }
1761 return false;
1762 }
1763
1764 bool GetFilename(SourceLocation loc, string* filename) {
1765 const SourceManager& source_manager = instance_.getSourceManager();
1766 SourceLocation spelling_location = source_manager.getSpellingLoc(loc);
1767 PresumedLoc ploc = source_manager.getPresumedLoc(spelling_location);
1768 if (ploc.isInvalid()) {
1769 // If we're in an invalid location, we're looking at things that aren't
1770 // actually stated in the source.
1771 return false;
1772 }
1773 *filename = ploc.getFilename();
1774 return true;
1775 }
1776
1777 void ReportClassMustLeftMostlyDeriveGC(RecordInfo* info) {
1778 SourceLocation loc = info->record()->getInnerLocStart();
1779 SourceManager& manager = instance_.getSourceManager();
1780 FullSourceLoc full_loc(loc, manager);
1781 diagnostic_.Report(full_loc, diag_class_must_left_mostly_derive_gc_)
1782 << info->record();
1783 }
1784
1785 void ReportClassRequiresTraceMethod(RecordInfo* info) {
1786 SourceLocation loc = info->record()->getInnerLocStart();
1787 SourceManager& manager = instance_.getSourceManager();
1788 FullSourceLoc full_loc(loc, manager);
1789 diagnostic_.Report(full_loc, diag_class_requires_trace_method_)
1790 << info->record();
1791
1792 for (RecordInfo::Bases::iterator it = info->GetBases().begin();
1793 it != info->GetBases().end();
1794 ++it) {
1795 if (it->second.NeedsTracing().IsNeeded())
1796 NoteBaseRequiresTracing(&it->second);
1797 }
1798
1799 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1800 it != info->GetFields().end();
1801 ++it) {
1802 if (!it->second.IsProperlyTraced())
1803 NoteFieldRequiresTracing(info, it->first);
1804 }
1805 }
1806
1807 void ReportBaseRequiresTracing(RecordInfo* derived,
1808 CXXMethodDecl* trace,
1809 CXXRecordDecl* base) {
1810 SourceLocation loc = trace->getLocStart();
1811 SourceManager& manager = instance_.getSourceManager();
1812 FullSourceLoc full_loc(loc, manager);
1813 diagnostic_.Report(full_loc, diag_base_requires_tracing_)
1814 << base << derived->record();
1815 }
1816
1817 void ReportFieldsRequireTracing(RecordInfo* info, CXXMethodDecl* trace) {
1818 SourceLocation loc = trace->getLocStart();
1819 SourceManager& manager = instance_.getSourceManager();
1820 FullSourceLoc full_loc(loc, manager);
1821 diagnostic_.Report(full_loc, diag_fields_require_tracing_)
1822 << info->record();
1823 for (RecordInfo::Fields::iterator it = info->GetFields().begin();
1824 it != info->GetFields().end();
1825 ++it) {
1826 if (!it->second.IsProperlyTraced())
1827 NoteFieldRequiresTracing(info, it->first);
1828 }
1829 }
1830
1831 void ReportClassContainsInvalidFields(RecordInfo* info,
1832 CheckFieldsVisitor::Errors* errors) {
1833 SourceLocation loc = info->record()->getLocStart();
1834 SourceManager& manager = instance_.getSourceManager();
1835 FullSourceLoc full_loc(loc, manager);
1836 bool only_warnings = options_.warn_raw_ptr;
1837 for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
1838 only_warnings && it != errors->end();
1839 ++it) {
1840 if (it->second != CheckFieldsVisitor::kRawPtrToGCManagedWarning)
1841 only_warnings = false;
1842 }
1843 diagnostic_.Report(full_loc, only_warnings ?
1844 diag_class_contains_invalid_fields_warning_ :
1845 diag_class_contains_invalid_fields_)
1846 << info->record();
1847 for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
1848 it != errors->end();
1849 ++it) {
1850 unsigned error;
1851 if (it->second == CheckFieldsVisitor::kRawPtrToGCManaged ||
1852 it->second == CheckFieldsVisitor::kRawPtrToGCManagedWarning) {
1853 error = diag_raw_ptr_to_gc_managed_class_note_;
1854 } else if (it->second == CheckFieldsVisitor::kRefPtrToGCManaged) {
1855 error = diag_ref_ptr_to_gc_managed_class_note_;
1856 } else if (it->second == CheckFieldsVisitor::kOwnPtrToGCManaged) {
1857 error = diag_own_ptr_to_gc_managed_class_note_;
1858 } else if (it->second == CheckFieldsVisitor::kMemberToGCUnmanaged) {
1859 error = diag_member_to_gc_unmanaged_class_note_;
1860 } else if (it->second == CheckFieldsVisitor::kMemberInUnmanaged) {
1861 error = diag_member_in_unmanaged_class_note_;
1862 } else if (it->second == CheckFieldsVisitor::kPtrFromHeapToStack) {
1863 error = diag_stack_allocated_field_note_;
1864 } else if (it->second == CheckFieldsVisitor::kGCDerivedPartObject) {
1865 error = diag_part_object_to_gc_derived_class_note_;
1866 } else {
1867 assert(false && "Unknown field error");
1868 }
1869 NoteField(it->first, error);
1870 }
1871 }
1872
1873 void ReportClassContainsGCRoots(RecordInfo* info,
1874 CheckGCRootsVisitor::Errors* errors) {
1875 SourceLocation loc = info->record()->getLocStart();
1876 SourceManager& manager = instance_.getSourceManager();
1877 FullSourceLoc full_loc(loc, manager);
1878 for (CheckGCRootsVisitor::Errors::iterator it = errors->begin();
1879 it != errors->end();
1880 ++it) {
1881 CheckGCRootsVisitor::RootPath::iterator path = it->begin();
1882 FieldPoint* point = *path;
1883 diagnostic_.Report(full_loc, diag_class_contains_gc_root_)
1884 << info->record() << point->field();
1885 while (++path != it->end()) {
1886 NotePartObjectContainsGCRoot(point);
1887 point = *path;
1888 }
1889 NoteFieldContainsGCRoot(point);
1890 }
1891 }
1892
1893 void ReportFinalizerAccessesFinalizedFields(
1894 CXXMethodDecl* dtor,
1895 CheckFinalizerVisitor::Errors* fields) {
1896 for (CheckFinalizerVisitor::Errors::iterator it = fields->begin();
1897 it != fields->end();
1898 ++it) {
1899 SourceLocation loc = it->member_->getLocStart();
1900 SourceManager& manager = instance_.getSourceManager();
1901 bool as_eagerly_finalized = it->as_eagerly_finalized_;
1902 unsigned diag_error = as_eagerly_finalized ?
1903 diag_finalizer_eagerly_finalized_field_ :
1904 diag_finalizer_accesses_finalized_field_;
1905 unsigned diag_note = as_eagerly_finalized ?
1906 diag_eagerly_finalized_field_note_ :
1907 diag_finalized_field_note_;
1908 FullSourceLoc full_loc(loc, manager);
1909 diagnostic_.Report(full_loc, diag_error)
1910 << dtor << it->field_->field();
1911 NoteField(it->field_, diag_note);
1912 }
1913 }
1914
1915 void ReportClassRequiresFinalization(RecordInfo* info) {
1916 SourceLocation loc = info->record()->getInnerLocStart();
1917 SourceManager& manager = instance_.getSourceManager();
1918 FullSourceLoc full_loc(loc, manager);
1919 diagnostic_.Report(full_loc, diag_class_requires_finalization_)
1920 << info->record();
1921 }
1922
1923 void ReportClassDoesNotRequireFinalization(RecordInfo* info) {
1924 SourceLocation loc = info->record()->getInnerLocStart();
1925 SourceManager& manager = instance_.getSourceManager();
1926 FullSourceLoc full_loc(loc, manager);
1927 diagnostic_.Report(full_loc, diag_class_does_not_require_finalization_)
1928 << info->record();
1929 }
1930
1931 void ReportClassMustDeclareGCMixinTraceMethod(RecordInfo* info) {
1932 SourceLocation loc = info->record()->getInnerLocStart();
1933 SourceManager& manager = instance_.getSourceManager();
1934 FullSourceLoc full_loc(loc, manager);
1935 diagnostic_.Report(
1936 full_loc, diag_class_must_declare_gc_mixin_trace_method_)
1937 << info->record();
1938 }
1939
1940 void ReportOverriddenNonVirtualTrace(RecordInfo* info,
1941 CXXMethodDecl* trace,
1942 CXXMethodDecl* overridden) {
1943 SourceLocation loc = trace->getLocStart();
1944 SourceManager& manager = instance_.getSourceManager();
1945 FullSourceLoc full_loc(loc, manager);
1946 diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_)
1947 << info->record() << overridden->getParent();
1948 NoteOverriddenNonVirtualTrace(overridden);
1949 }
1950
1951 void ReportMissingTraceDispatchMethod(RecordInfo* info) {
1952 ReportMissingDispatchMethod(info, diag_missing_trace_dispatch_method_);
1953 }
1954
1955 void ReportMissingFinalizeDispatchMethod(RecordInfo* info) {
1956 ReportMissingDispatchMethod(info, diag_missing_finalize_dispatch_method_);
1957 }
1958
1959 void ReportMissingDispatchMethod(RecordInfo* info, unsigned error) {
1960 SourceLocation loc = info->record()->getInnerLocStart();
1961 SourceManager& manager = instance_.getSourceManager();
1962 FullSourceLoc full_loc(loc, manager);
1963 diagnostic_.Report(full_loc, error) << info->record();
1964 }
1965
1966 void ReportVirtualAndManualDispatch(RecordInfo* info,
1967 CXXMethodDecl* dispatch) {
1968 SourceLocation loc = info->record()->getInnerLocStart();
1969 SourceManager& manager = instance_.getSourceManager();
1970 FullSourceLoc full_loc(loc, manager);
1971 diagnostic_.Report(full_loc, diag_virtual_and_manual_dispatch_)
1972 << info->record();
1973 NoteManualDispatchMethod(dispatch);
1974 }
1975
1976 void ReportMissingTraceDispatch(const FunctionDecl* dispatch,
1977 RecordInfo* receiver) {
1978 ReportMissingDispatch(dispatch, receiver, diag_missing_trace_dispatch_);
1979 }
1980
1981 void ReportMissingFinalizeDispatch(const FunctionDecl* dispatch,
1982 RecordInfo* receiver) {
1983 ReportMissingDispatch(dispatch, receiver, diag_missing_finalize_dispatch_);
1984 }
1985
1986 void ReportMissingDispatch(const FunctionDecl* dispatch,
1987 RecordInfo* receiver,
1988 unsigned error) {
1989 SourceLocation loc = dispatch->getLocStart();
1990 SourceManager& manager = instance_.getSourceManager();
1991 FullSourceLoc full_loc(loc, manager);
1992 diagnostic_.Report(full_loc, error) << receiver->record();
1993 }
1994
1995 void ReportDerivesNonStackAllocated(RecordInfo* info, BasePoint* base) {
1996 SourceLocation loc = base->spec().getLocStart();
1997 SourceManager& manager = instance_.getSourceManager();
1998 FullSourceLoc full_loc(loc, manager);
1999 diagnostic_.Report(full_loc, diag_derives_non_stack_allocated_)
2000 << info->record() << base->info()->record();
2001 }
2002
2003 void ReportClassOverridesNew(RecordInfo* info, CXXMethodDecl* newop) {
2004 SourceLocation loc = newop->getLocStart();
2005 SourceManager& manager = instance_.getSourceManager();
2006 FullSourceLoc full_loc(loc, manager);
2007 diagnostic_.Report(full_loc, diag_class_overrides_new_) << info->record();
2008 }
2009
2010 void ReportClassDeclaresPureVirtualTrace(RecordInfo* info,
2011 CXXMethodDecl* trace) {
2012 SourceLocation loc = trace->getLocStart();
2013 SourceManager& manager = instance_.getSourceManager();
2014 FullSourceLoc full_loc(loc, manager);
2015 diagnostic_.Report(full_loc, diag_class_declares_pure_virtual_trace_)
2016 << info->record();
2017 }
2018
2019 void ReportLeftMostBaseMustBePolymorphic(RecordInfo* derived,
2020 CXXRecordDecl* base) {
2021 SourceLocation loc = base->getLocStart();
2022 SourceManager& manager = instance_.getSourceManager();
2023 FullSourceLoc full_loc(loc, manager);
2024 diagnostic_.Report(full_loc, diag_left_most_base_must_be_polymorphic_)
2025 << base << derived->record();
2026 }
2027
2028 void ReportBaseClassMustDeclareVirtualTrace(RecordInfo* derived,
2029 CXXRecordDecl* base) {
2030 SourceLocation loc = base->getLocStart();
2031 SourceManager& manager = instance_.getSourceManager();
2032 FullSourceLoc full_loc(loc, manager);
2033 diagnostic_.Report(full_loc, diag_base_class_must_declare_virtual_trace_)
2034 << base << derived->record();
2035 }
2036
2037 void NoteManualDispatchMethod(CXXMethodDecl* dispatch) {
2038 SourceLocation loc = dispatch->getLocStart();
2039 SourceManager& manager = instance_.getSourceManager();
2040 FullSourceLoc full_loc(loc, manager);
2041 diagnostic_.Report(full_loc, diag_manual_dispatch_method_note_) << dispatch;
2042 }
2043
2044 void NoteBaseRequiresTracing(BasePoint* base) {
2045 SourceLocation loc = base->spec().getLocStart();
2046 SourceManager& manager = instance_.getSourceManager();
2047 FullSourceLoc full_loc(loc, manager);
2048 diagnostic_.Report(full_loc, diag_base_requires_tracing_note_)
2049 << base->info()->record();
2050 }
2051
2052 void NoteFieldRequiresTracing(RecordInfo* holder, FieldDecl* field) {
2053 NoteField(field, diag_field_requires_tracing_note_);
2054 }
2055
2056 void NotePartObjectContainsGCRoot(FieldPoint* point) {
2057 FieldDecl* field = point->field();
2058 SourceLocation loc = field->getLocStart();
2059 SourceManager& manager = instance_.getSourceManager();
2060 FullSourceLoc full_loc(loc, manager);
2061 diagnostic_.Report(full_loc, diag_part_object_contains_gc_root_note_)
2062 << field << field->getParent();
2063 }
2064
2065 void NoteFieldContainsGCRoot(FieldPoint* point) {
2066 NoteField(point, diag_field_contains_gc_root_note_);
2067 }
2068
2069 void NoteUserDeclaredDestructor(CXXMethodDecl* dtor) {
2070 SourceLocation loc = dtor->getLocStart();
2071 SourceManager& manager = instance_.getSourceManager();
2072 FullSourceLoc full_loc(loc, manager);
2073 diagnostic_.Report(full_loc, diag_user_declared_destructor_note_);
2074 }
2075
2076 void NoteUserDeclaredFinalizer(CXXMethodDecl* dtor) {
2077 SourceLocation loc = dtor->getLocStart();
2078 SourceManager& manager = instance_.getSourceManager();
2079 FullSourceLoc full_loc(loc, manager);
2080 diagnostic_.Report(full_loc, diag_user_declared_finalizer_note_);
2081 }
2082
2083 void NoteBaseRequiresFinalization(BasePoint* base) {
2084 SourceLocation loc = base->spec().getLocStart();
2085 SourceManager& manager = instance_.getSourceManager();
2086 FullSourceLoc full_loc(loc, manager);
2087 diagnostic_.Report(full_loc, diag_base_requires_finalization_note_)
2088 << base->info()->record();
2089 }
2090
2091 void NoteField(FieldPoint* point, unsigned note) {
2092 NoteField(point->field(), note);
2093 }
2094
2095 void NoteField(FieldDecl* field, unsigned note) {
2096 SourceLocation loc = field->getLocStart();
2097 SourceManager& manager = instance_.getSourceManager();
2098 FullSourceLoc full_loc(loc, manager);
2099 diagnostic_.Report(full_loc, note) << field;
2100 }
2101
2102 void NoteOverriddenNonVirtualTrace(CXXMethodDecl* overridden) {
2103 SourceLocation loc = overridden->getLocStart();
2104 SourceManager& manager = instance_.getSourceManager();
2105 FullSourceLoc full_loc(loc, manager);
2106 diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_note_)
2107 << overridden;
2108 }
2109
2110 unsigned diag_class_must_left_mostly_derive_gc_;
2111 unsigned diag_class_requires_trace_method_;
2112 unsigned diag_base_requires_tracing_;
2113 unsigned diag_fields_require_tracing_;
2114 unsigned diag_class_contains_invalid_fields_;
2115 unsigned diag_class_contains_invalid_fields_warning_;
2116 unsigned diag_class_contains_gc_root_;
2117 unsigned diag_class_requires_finalization_;
2118 unsigned diag_class_does_not_require_finalization_;
2119 unsigned diag_finalizer_accesses_finalized_field_;
2120 unsigned diag_finalizer_eagerly_finalized_field_;
2121 unsigned diag_overridden_non_virtual_trace_;
2122 unsigned diag_missing_trace_dispatch_method_;
2123 unsigned diag_missing_finalize_dispatch_method_;
2124 unsigned diag_virtual_and_manual_dispatch_;
2125 unsigned diag_missing_trace_dispatch_;
2126 unsigned diag_missing_finalize_dispatch_;
2127 unsigned diag_derives_non_stack_allocated_;
2128 unsigned diag_class_overrides_new_;
2129 unsigned diag_class_declares_pure_virtual_trace_;
2130 unsigned diag_left_most_base_must_be_polymorphic_;
2131 unsigned diag_base_class_must_declare_virtual_trace_;
2132 unsigned diag_class_must_declare_gc_mixin_trace_method_;
2133
2134 unsigned diag_base_requires_tracing_note_;
2135 unsigned diag_field_requires_tracing_note_;
2136 unsigned diag_raw_ptr_to_gc_managed_class_note_;
2137 unsigned diag_ref_ptr_to_gc_managed_class_note_;
2138 unsigned diag_own_ptr_to_gc_managed_class_note_;
2139 unsigned diag_member_to_gc_unmanaged_class_note_;
2140 unsigned diag_stack_allocated_field_note_;
2141 unsigned diag_member_in_unmanaged_class_note_;
2142 unsigned diag_part_object_to_gc_derived_class_note_;
2143 unsigned diag_part_object_contains_gc_root_note_;
2144 unsigned diag_field_contains_gc_root_note_;
2145 unsigned diag_finalized_field_note_;
2146 unsigned diag_eagerly_finalized_field_note_;
2147 unsigned diag_user_declared_destructor_note_;
2148 unsigned diag_user_declared_finalizer_note_;
2149 unsigned diag_base_requires_finalization_note_;
2150 unsigned diag_field_requires_finalization_note_;
2151 unsigned diag_overridden_non_virtual_trace_note_;
2152 unsigned diag_manual_dispatch_method_note_;
2153
2154 CompilerInstance& instance_;
2155 DiagnosticsEngine& diagnostic_;
2156 BlinkGCPluginOptions options_;
2157 RecordCache cache_;
2158 JsonWriter* json_;
2159 };
2160 19
2161 class BlinkGCPluginAction : public PluginASTAction { 20 class BlinkGCPluginAction : public PluginASTAction {
2162 public: 21 public:
2163 BlinkGCPluginAction() {} 22 BlinkGCPluginAction() {}
2164 23
2165 protected: 24 protected:
2166 // Overridden from PluginASTAction: 25 // Overridden from PluginASTAction:
2167 virtual std::unique_ptr<ASTConsumer> CreateASTConsumer( 26 virtual std::unique_ptr<ASTConsumer> CreateASTConsumer(
2168 CompilerInstance& instance, 27 CompilerInstance& instance,
2169 llvm::StringRef ref) { 28 llvm::StringRef ref) {
2170 return llvm::make_unique<BlinkGCPluginConsumer>(instance, options_); 29 return llvm::make_unique<BlinkGCPluginConsumer>(instance, options_);
2171 } 30 }
2172 31
2173 virtual bool ParseArgs(const CompilerInstance& instance, 32 virtual bool ParseArgs(const CompilerInstance& instance,
2174 const std::vector<string>& args) { 33 const std::vector<std::string>& args) {
2175 bool parsed = true; 34 bool parsed = true;
2176 35
2177 for (size_t i = 0; i < args.size() && parsed; ++i) { 36 for (size_t i = 0; i < args.size() && parsed; ++i) {
2178 if (args[i] == "enable-oilpan") { 37 if (args[i] == "enable-oilpan") {
2179 options_.enable_oilpan = true; 38 options_.enable_oilpan = true;
2180 } else if (args[i] == "dump-graph") { 39 } else if (args[i] == "dump-graph") {
2181 options_.dump_graph = true; 40 options_.dump_graph = true;
2182 } else if (args[i] == "warn-raw-ptr") { 41 } else if (args[i] == "warn-raw-ptr") {
2183 options_.warn_raw_ptr = true; 42 options_.warn_raw_ptr = true;
2184 } else if (args[i] == "warn-unneeded-finalizer") { 43 } else if (args[i] == "warn-unneeded-finalizer") {
2185 options_.warn_unneeded_finalizer = true; 44 options_.warn_unneeded_finalizer = true;
2186 } else { 45 } else {
2187 parsed = false; 46 parsed = false;
2188 llvm::errs() << "Unknown blink-gc-plugin argument: " << args[i] << "\n"; 47 llvm::errs() << "Unknown blink-gc-plugin argument: " << args[i] << "\n";
2189 } 48 }
2190 } 49 }
2191 50
2192 return parsed; 51 return parsed;
2193 } 52 }
2194 53
2195 private: 54 private:
2196 BlinkGCPluginOptions options_; 55 BlinkGCPluginOptions options_;
2197 }; 56 };
2198 57
2199 } // namespace
2200
2201 static FrontendPluginRegistry::Add<BlinkGCPluginAction> X( 58 static FrontendPluginRegistry::Add<BlinkGCPluginAction> X(
2202 "blink-gc-plugin", 59 "blink-gc-plugin",
2203 "Check Blink GC invariants"); 60 "Check Blink GC invariants");
OLDNEW
« no previous file with comments | « build/config/compiler/BUILD.gn ('k') | tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698