OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 // This clang plugin checks various invariants of the Blink garbage | 5 // This clang plugin checks various invariants of the Blink garbage |
6 // collection infrastructure. | 6 // collection infrastructure. |
7 // | 7 // |
8 // Errors are described at: | 8 // Errors are described at: |
9 // http://www.chromium.org/developers/blink-gc-plugin-errors | 9 // http://www.chromium.org/developers/blink-gc-plugin-errors |
10 | 10 |
| 11 #include "BlinkGCPluginConsumer.h" |
| 12 #include "BlinkGCPluginOptions.h" |
11 #include "Config.h" | 13 #include "Config.h" |
12 #include "JsonWriter.h" | |
13 #include "RecordInfo.h" | |
14 | 14 |
15 #include "clang/AST/AST.h" | |
16 #include "clang/AST/ASTConsumer.h" | |
17 #include "clang/AST/RecursiveASTVisitor.h" | |
18 #include "clang/Frontend/CompilerInstance.h" | 15 #include "clang/Frontend/CompilerInstance.h" |
19 #include "clang/Frontend/FrontendPluginRegistry.h" | 16 #include "clang/Frontend/FrontendPluginRegistry.h" |
20 | 17 |
21 using namespace clang; | 18 using namespace clang; |
22 using std::string; | |
23 | |
24 namespace { | |
25 | |
26 const char kClassMustLeftMostlyDeriveGC[] = | |
27 "[blink-gc] Class %0 must derive its GC base in the left-most position."; | |
28 | |
29 const char kClassRequiresTraceMethod[] = | |
30 "[blink-gc] Class %0 requires a trace method."; | |
31 | |
32 const char kBaseRequiresTracing[] = | |
33 "[blink-gc] Base class %0 of derived class %1 requires tracing."; | |
34 | |
35 const char kBaseRequiresTracingNote[] = | |
36 "[blink-gc] Untraced base class %0 declared here:"; | |
37 | |
38 const char kFieldsRequireTracing[] = | |
39 "[blink-gc] Class %0 has untraced fields that require tracing."; | |
40 | |
41 const char kFieldRequiresTracingNote[] = | |
42 "[blink-gc] Untraced field %0 declared here:"; | |
43 | |
44 const char kClassContainsInvalidFields[] = | |
45 "[blink-gc] Class %0 contains invalid fields."; | |
46 | |
47 const char kClassContainsGCRoot[] = | |
48 "[blink-gc] Class %0 contains GC root in field %1."; | |
49 | |
50 const char kClassRequiresFinalization[] = | |
51 "[blink-gc] Class %0 requires finalization."; | |
52 | |
53 const char kClassDoesNotRequireFinalization[] = | |
54 "[blink-gc] Class %0 may not require finalization."; | |
55 | |
56 const char kFinalizerAccessesFinalizedField[] = | |
57 "[blink-gc] Finalizer %0 accesses potentially finalized field %1."; | |
58 | |
59 const char kRawPtrToGCManagedClassNote[] = | |
60 "[blink-gc] Raw pointer field %0 to a GC managed class declared here:"; | |
61 | |
62 const char kRefPtrToGCManagedClassNote[] = | |
63 "[blink-gc] RefPtr field %0 to a GC managed class declared here:"; | |
64 | |
65 const char kOwnPtrToGCManagedClassNote[] = | |
66 "[blink-gc] OwnPtr field %0 to a GC managed class declared here:"; | |
67 | |
68 const char kStackAllocatedFieldNote[] = | |
69 "[blink-gc] Stack-allocated field %0 declared here:"; | |
70 | |
71 const char kMemberInUnmanagedClassNote[] = | |
72 "[blink-gc] Member field %0 in unmanaged class declared here:"; | |
73 | |
74 const char kPartObjectToGCDerivedClassNote[] = | |
75 "[blink-gc] Part-object field %0 to a GC derived class declared here:"; | |
76 | |
77 const char kPartObjectContainsGCRootNote[] = | |
78 "[blink-gc] Field %0 with embedded GC root in %1 declared here:"; | |
79 | |
80 const char kFieldContainsGCRootNote[] = | |
81 "[blink-gc] Field %0 defining a GC root declared here:"; | |
82 | |
83 const char kOverriddenNonVirtualTrace[] = | |
84 "[blink-gc] Class %0 overrides non-virtual trace of base class %1."; | |
85 | |
86 const char kOverriddenNonVirtualTraceNote[] = | |
87 "[blink-gc] Non-virtual trace method declared here:"; | |
88 | |
89 const char kMissingTraceDispatchMethod[] = | |
90 "[blink-gc] Class %0 is missing manual trace dispatch."; | |
91 | |
92 const char kMissingFinalizeDispatchMethod[] = | |
93 "[blink-gc] Class %0 is missing manual finalize dispatch."; | |
94 | |
95 const char kVirtualAndManualDispatch[] = | |
96 "[blink-gc] Class %0 contains or inherits virtual methods" | |
97 " but implements manual dispatching."; | |
98 | |
99 const char kMissingTraceDispatch[] = | |
100 "[blink-gc] Missing dispatch to class %0 in manual trace dispatch."; | |
101 | |
102 const char kMissingFinalizeDispatch[] = | |
103 "[blink-gc] Missing dispatch to class %0 in manual finalize dispatch."; | |
104 | |
105 const char kFinalizedFieldNote[] = | |
106 "[blink-gc] Potentially finalized field %0 declared here:"; | |
107 | |
108 const char kUserDeclaredDestructorNote[] = | |
109 "[blink-gc] User-declared destructor declared here:"; | |
110 | |
111 const char kUserDeclaredFinalizerNote[] = | |
112 "[blink-gc] User-declared finalizer declared here:"; | |
113 | |
114 const char kBaseRequiresFinalizationNote[] = | |
115 "[blink-gc] Base class %0 requiring finalization declared here:"; | |
116 | |
117 const char kFieldRequiresFinalizationNote[] = | |
118 "[blink-gc] Field %0 requiring finalization declared here:"; | |
119 | |
120 const char kManualDispatchMethodNote[] = | |
121 "[blink-gc] Manual dispatch %0 declared here:"; | |
122 | |
123 const char kDerivesNonStackAllocated[] = | |
124 "[blink-gc] Stack-allocated class %0 derives class %1" | |
125 " which is not stack allocated."; | |
126 | |
127 const char kClassOverridesNew[] = | |
128 "[blink-gc] Garbage collected class %0" | |
129 " is not permitted to override its new operator."; | |
130 | |
131 const char kClassDeclaresPureVirtualTrace[] = | |
132 "[blink-gc] Garbage collected class %0" | |
133 " is not permitted to declare a pure-virtual trace method."; | |
134 | |
135 const char kLeftMostBaseMustBePolymorphic[] = | |
136 "[blink-gc] Left-most base class %0 of derived class %1" | |
137 " must be polymorphic."; | |
138 | |
139 const char kBaseClassMustDeclareVirtualTrace[] = | |
140 "[blink-gc] Left-most base class %0 of derived class %1" | |
141 " must define a virtual trace method."; | |
142 | |
143 const char kClassMustDeclareGCMixinTraceMethod[] = | |
144 "[blink-gc] Class %0 which inherits from GarbageCollectedMixin must" | |
145 " locally declare and override trace(Visitor*)"; | |
146 | |
147 struct BlinkGCPluginOptions { | |
148 BlinkGCPluginOptions() | |
149 : enable_oilpan(false) | |
150 , dump_graph(false) | |
151 , warn_raw_ptr(false) | |
152 , warn_unneeded_finalizer(false) {} | |
153 bool enable_oilpan; | |
154 bool dump_graph; | |
155 bool warn_raw_ptr; | |
156 bool warn_unneeded_finalizer; | |
157 std::set<std::string> ignored_classes; | |
158 std::set<std::string> checked_namespaces; | |
159 std::vector<std::string> ignored_directories; | |
160 }; | |
161 | |
162 typedef std::vector<CXXRecordDecl*> RecordVector; | |
163 typedef std::vector<CXXMethodDecl*> MethodVector; | |
164 | |
165 // Test if a template specialization is an instantiation. | |
166 static bool IsTemplateInstantiation(CXXRecordDecl* record) { | |
167 ClassTemplateSpecializationDecl* spec = | |
168 dyn_cast<ClassTemplateSpecializationDecl>(record); | |
169 if (!spec) | |
170 return false; | |
171 switch (spec->getTemplateSpecializationKind()) { | |
172 case TSK_ImplicitInstantiation: | |
173 case TSK_ExplicitInstantiationDefinition: | |
174 return true; | |
175 case TSK_Undeclared: | |
176 case TSK_ExplicitSpecialization: | |
177 return false; | |
178 // TODO: unsupported cases. | |
179 case TSK_ExplicitInstantiationDeclaration: | |
180 return false; | |
181 } | |
182 assert(false && "Unknown template specialization kind"); | |
183 } | |
184 | |
185 // This visitor collects the entry points for the checker. | |
186 class CollectVisitor : public RecursiveASTVisitor<CollectVisitor> { | |
187 public: | |
188 CollectVisitor() {} | |
189 | |
190 RecordVector& record_decls() { return record_decls_; } | |
191 MethodVector& trace_decls() { return trace_decls_; } | |
192 | |
193 bool shouldVisitTemplateInstantiations() { return false; } | |
194 | |
195 // Collect record declarations, including nested declarations. | |
196 bool VisitCXXRecordDecl(CXXRecordDecl* record) { | |
197 if (record->hasDefinition() && record->isCompleteDefinition()) | |
198 record_decls_.push_back(record); | |
199 return true; | |
200 } | |
201 | |
202 // Collect tracing method definitions, but don't traverse method bodies. | |
203 bool TraverseCXXMethodDecl(CXXMethodDecl* method) { | |
204 if (method->isThisDeclarationADefinition() && Config::IsTraceMethod(method)) | |
205 trace_decls_.push_back(method); | |
206 return true; | |
207 } | |
208 | |
209 private: | |
210 RecordVector record_decls_; | |
211 MethodVector trace_decls_; | |
212 }; | |
213 | |
214 // This visitor checks that a finalizer method does not have invalid access to | |
215 // fields that are potentially finalized. A potentially finalized field is | |
216 // either a Member, a heap-allocated collection or an off-heap collection that | |
217 // contains Members. Invalid uses are currently identified as passing the field | |
218 // as the argument of a procedure call or using the -> or [] operators on it. | |
219 class CheckFinalizerVisitor | |
220 : public RecursiveASTVisitor<CheckFinalizerVisitor> { | |
221 private: | |
222 // Simple visitor to determine if the content of a field might be collected | |
223 // during finalization. | |
224 class MightBeCollectedVisitor : public EdgeVisitor { | |
225 public: | |
226 MightBeCollectedVisitor() : might_be_collected_(false) {} | |
227 bool might_be_collected() { return might_be_collected_; } | |
228 void VisitMember(Member* edge) override { might_be_collected_ = true; } | |
229 void VisitCollection(Collection* edge) override { | |
230 if (edge->on_heap()) { | |
231 might_be_collected_ = !edge->is_root(); | |
232 } else { | |
233 edge->AcceptMembers(this); | |
234 } | |
235 } | |
236 | |
237 private: | |
238 bool might_be_collected_; | |
239 }; | |
240 | |
241 public: | |
242 typedef std::vector<std::pair<MemberExpr*, FieldPoint*> > Errors; | |
243 | |
244 CheckFinalizerVisitor(RecordCache* cache) | |
245 : blacklist_context_(false), cache_(cache) {} | |
246 | |
247 Errors& finalized_fields() { return finalized_fields_; } | |
248 | |
249 bool WalkUpFromCXXOperatorCallExpr(CXXOperatorCallExpr* expr) { | |
250 // Only continue the walk-up if the operator is a blacklisted one. | |
251 switch (expr->getOperator()) { | |
252 case OO_Arrow: | |
253 case OO_Subscript: | |
254 this->WalkUpFromCallExpr(expr); | |
255 default: | |
256 return true; | |
257 } | |
258 } | |
259 | |
260 // We consider all non-operator calls to be blacklisted contexts. | |
261 bool WalkUpFromCallExpr(CallExpr* expr) { | |
262 bool prev_blacklist_context = blacklist_context_; | |
263 blacklist_context_ = true; | |
264 for (size_t i = 0; i < expr->getNumArgs(); ++i) | |
265 this->TraverseStmt(expr->getArg(i)); | |
266 blacklist_context_ = prev_blacklist_context; | |
267 return true; | |
268 } | |
269 | |
270 bool VisitMemberExpr(MemberExpr* member) { | |
271 FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl()); | |
272 if (!field) | |
273 return true; | |
274 | |
275 RecordInfo* info = cache_->Lookup(field->getParent()); | |
276 if (!info) | |
277 return true; | |
278 | |
279 RecordInfo::Fields::iterator it = info->GetFields().find(field); | |
280 if (it == info->GetFields().end()) | |
281 return true; | |
282 | |
283 if (blacklist_context_ && MightBeCollected(&it->second)) | |
284 finalized_fields_.push_back(std::make_pair(member, &it->second)); | |
285 return true; | |
286 } | |
287 | |
288 bool MightBeCollected(FieldPoint* point) { | |
289 MightBeCollectedVisitor visitor; | |
290 point->edge()->Accept(&visitor); | |
291 return visitor.might_be_collected(); | |
292 } | |
293 | |
294 private: | |
295 bool blacklist_context_; | |
296 Errors finalized_fields_; | |
297 RecordCache* cache_; | |
298 }; | |
299 | |
300 // This visitor checks that a method contains within its body, a call to a | |
301 // method on the provided receiver class. This is used to check manual | |
302 // dispatching for trace and finalize methods. | |
303 class CheckDispatchVisitor : public RecursiveASTVisitor<CheckDispatchVisitor> { | |
304 public: | |
305 CheckDispatchVisitor(RecordInfo* receiver) | |
306 : receiver_(receiver), dispatched_to_receiver_(false) {} | |
307 | |
308 bool dispatched_to_receiver() { return dispatched_to_receiver_; } | |
309 | |
310 bool VisitMemberExpr(MemberExpr* member) { | |
311 if (CXXMethodDecl* fn = dyn_cast<CXXMethodDecl>(member->getMemberDecl())) { | |
312 if (fn->getParent() == receiver_->record()) | |
313 dispatched_to_receiver_ = true; | |
314 } | |
315 return true; | |
316 } | |
317 | |
318 bool VisitUnresolvedMemberExpr(UnresolvedMemberExpr* member) { | |
319 for (Decl* decl : member->decls()) { | |
320 if (CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl)) { | |
321 if (method->getParent() == receiver_->record() && | |
322 Config::GetTraceMethodType(method) == | |
323 Config::TRACE_AFTER_DISPATCH_METHOD) { | |
324 dispatched_to_receiver_ = true; | |
325 return true; | |
326 } | |
327 } | |
328 } | |
329 return true; | |
330 } | |
331 | |
332 private: | |
333 RecordInfo* receiver_; | |
334 bool dispatched_to_receiver_; | |
335 }; | |
336 | |
337 // This visitor checks a tracing method by traversing its body. | |
338 // - A member field is considered traced if it is referenced in the body. | |
339 // - A base is traced if a base-qualified call to a trace method is found. | |
340 class CheckTraceVisitor : public RecursiveASTVisitor<CheckTraceVisitor> { | |
341 public: | |
342 CheckTraceVisitor(CXXMethodDecl* trace, RecordInfo* info, RecordCache* cache) | |
343 : trace_(trace), | |
344 info_(info), | |
345 cache_(cache), | |
346 delegates_to_traceimpl_(false) { | |
347 } | |
348 | |
349 bool delegates_to_traceimpl() const { return delegates_to_traceimpl_; } | |
350 | |
351 bool VisitMemberExpr(MemberExpr* member) { | |
352 // In weak callbacks, consider any occurrence as a correct usage. | |
353 // TODO: We really want to require that isAlive is checked on manually | |
354 // processed weak fields. | |
355 if (IsWeakCallback()) { | |
356 if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl())) | |
357 FoundField(field); | |
358 } | |
359 return true; | |
360 } | |
361 | |
362 bool VisitCallExpr(CallExpr* call) { | |
363 // In weak callbacks we don't check calls (see VisitMemberExpr). | |
364 if (IsWeakCallback()) | |
365 return true; | |
366 | |
367 Expr* callee = call->getCallee(); | |
368 | |
369 // Trace calls from a templated derived class result in a | |
370 // DependentScopeMemberExpr because the concrete trace call depends on the | |
371 // instantiation of any shared template parameters. In this case the call is | |
372 // "unresolved" and we resort to comparing the syntactic type names. | |
373 if (CXXDependentScopeMemberExpr* expr = | |
374 dyn_cast<CXXDependentScopeMemberExpr>(callee)) { | |
375 CheckCXXDependentScopeMemberExpr(call, expr); | |
376 return true; | |
377 } | |
378 | |
379 // A tracing call will have either a |visitor| or a |m_field| argument. | |
380 // A registerWeakMembers call will have a |this| argument. | |
381 if (call->getNumArgs() != 1) | |
382 return true; | |
383 Expr* arg = call->getArg(0); | |
384 | |
385 if (UnresolvedMemberExpr* expr = dyn_cast<UnresolvedMemberExpr>(callee)) { | |
386 // This could be a trace call of a base class, as explained in the | |
387 // comments of CheckTraceBaseCall(). | |
388 if (CheckTraceBaseCall(call)) | |
389 return true; | |
390 | |
391 if (expr->getMemberName().getAsString() == kRegisterWeakMembersName) | |
392 MarkAllWeakMembersTraced(); | |
393 | |
394 QualType base = expr->getBaseType(); | |
395 if (!base->isPointerType()) | |
396 return true; | |
397 CXXRecordDecl* decl = base->getPointeeType()->getAsCXXRecordDecl(); | |
398 if (decl) | |
399 CheckTraceFieldCall(expr->getMemberName().getAsString(), decl, arg); | |
400 if (Config::IsTraceImplName(expr->getMemberName().getAsString())) | |
401 delegates_to_traceimpl_ = true; | |
402 return true; | |
403 } | |
404 | |
405 if (CXXMemberCallExpr* expr = dyn_cast<CXXMemberCallExpr>(call)) { | |
406 if (CheckTraceFieldCall(expr) || CheckRegisterWeakMembers(expr)) | |
407 return true; | |
408 | |
409 if (Config::IsTraceImplName(expr->getMethodDecl()->getNameAsString())) { | |
410 delegates_to_traceimpl_ = true; | |
411 return true; | |
412 } | |
413 } | |
414 | |
415 CheckTraceBaseCall(call); | |
416 return true; | |
417 } | |
418 | |
419 private: | |
420 bool IsTraceCallName(const std::string& name) { | |
421 if (trace_->getName() == kTraceImplName) | |
422 return name == kTraceName; | |
423 if (trace_->getName() == kTraceAfterDispatchImplName) | |
424 return name == kTraceAfterDispatchName; | |
425 // Currently, a manually dispatched class cannot have mixin bases (having | |
426 // one would add a vtable which we explicitly check against). This means | |
427 // that we can only make calls to a trace method of the same name. Revisit | |
428 // this if our mixin/vtable assumption changes. | |
429 return name == trace_->getName(); | |
430 } | |
431 | |
432 CXXRecordDecl* GetDependentTemplatedDecl(CXXDependentScopeMemberExpr* expr) { | |
433 NestedNameSpecifier* qual = expr->getQualifier(); | |
434 if (!qual) | |
435 return 0; | |
436 | |
437 const Type* type = qual->getAsType(); | |
438 if (!type) | |
439 return 0; | |
440 | |
441 return RecordInfo::GetDependentTemplatedDecl(*type); | |
442 } | |
443 | |
444 void CheckCXXDependentScopeMemberExpr(CallExpr* call, | |
445 CXXDependentScopeMemberExpr* expr) { | |
446 string fn_name = expr->getMember().getAsString(); | |
447 | |
448 // Check for VisitorDispatcher::trace(field) and | |
449 // VisitorDispatcher::registerWeakMembers. | |
450 if (!expr->isImplicitAccess()) { | |
451 if (clang::DeclRefExpr* base_decl = | |
452 clang::dyn_cast<clang::DeclRefExpr>(expr->getBase())) { | |
453 if (Config::IsVisitorDispatcherType(base_decl->getType())) { | |
454 if (call->getNumArgs() == 1 && fn_name == kTraceName) { | |
455 FindFieldVisitor finder; | |
456 finder.TraverseStmt(call->getArg(0)); | |
457 if (finder.field()) | |
458 FoundField(finder.field()); | |
459 | |
460 return; | |
461 } else if (call->getNumArgs() == 1 && | |
462 fn_name == kRegisterWeakMembersName) { | |
463 MarkAllWeakMembersTraced(); | |
464 } | |
465 } | |
466 } | |
467 } | |
468 | |
469 CXXRecordDecl* tmpl = GetDependentTemplatedDecl(expr); | |
470 if (!tmpl) | |
471 return; | |
472 | |
473 // Check for Super<T>::trace(visitor) | |
474 if (call->getNumArgs() == 1 && IsTraceCallName(fn_name)) { | |
475 RecordInfo::Bases::iterator it = info_->GetBases().begin(); | |
476 for (; it != info_->GetBases().end(); ++it) { | |
477 if (it->first->getName() == tmpl->getName()) | |
478 it->second.MarkTraced(); | |
479 } | |
480 } | |
481 | |
482 // Check for TraceIfNeeded<T>::trace(visitor, &field) | |
483 if (call->getNumArgs() == 2 && fn_name == kTraceName && | |
484 tmpl->getName() == kTraceIfNeededName) { | |
485 FindFieldVisitor finder; | |
486 finder.TraverseStmt(call->getArg(1)); | |
487 if (finder.field()) | |
488 FoundField(finder.field()); | |
489 } | |
490 } | |
491 | |
492 bool CheckTraceBaseCall(CallExpr* call) { | |
493 // Checks for "Base::trace(visitor)"-like calls. | |
494 | |
495 // Checking code for these two variables is shared among MemberExpr* case | |
496 // and UnresolvedMemberCase* case below. | |
497 // | |
498 // For example, if we've got "Base::trace(visitor)" as |call|, | |
499 // callee_record will be "Base", and func_name will be "trace". | |
500 CXXRecordDecl* callee_record = nullptr; | |
501 std::string func_name; | |
502 | |
503 if (MemberExpr* callee = dyn_cast<MemberExpr>(call->getCallee())) { | |
504 if (!callee->hasQualifier()) | |
505 return false; | |
506 | |
507 FunctionDecl* trace_decl = | |
508 dyn_cast<FunctionDecl>(callee->getMemberDecl()); | |
509 if (!trace_decl || !Config::IsTraceMethod(trace_decl)) | |
510 return false; | |
511 | |
512 const Type* type = callee->getQualifier()->getAsType(); | |
513 if (!type) | |
514 return false; | |
515 | |
516 callee_record = type->getAsCXXRecordDecl(); | |
517 func_name = trace_decl->getName(); | |
518 } else if (UnresolvedMemberExpr* callee = | |
519 dyn_cast<UnresolvedMemberExpr>(call->getCallee())) { | |
520 // Callee part may become unresolved if the type of the argument | |
521 // ("visitor") is a template parameter and the called function is | |
522 // overloaded (i.e. trace(Visitor*) and | |
523 // trace(InlinedGlobalMarkingVisitor)). | |
524 // | |
525 // Here, we try to find a function that looks like trace() from the | |
526 // candidate overloaded functions, and if we find one, we assume it is | |
527 // called here. | |
528 | |
529 CXXMethodDecl* trace_decl = nullptr; | |
530 for (NamedDecl* named_decl : callee->decls()) { | |
531 if (CXXMethodDecl* method_decl = dyn_cast<CXXMethodDecl>(named_decl)) { | |
532 if (Config::IsTraceMethod(method_decl)) { | |
533 trace_decl = method_decl; | |
534 break; | |
535 } | |
536 } | |
537 } | |
538 if (!trace_decl) | |
539 return false; | |
540 | |
541 // Check if the passed argument is named "visitor". | |
542 if (call->getNumArgs() != 1) | |
543 return false; | |
544 DeclRefExpr* arg = dyn_cast<DeclRefExpr>(call->getArg(0)); | |
545 if (!arg || arg->getNameInfo().getAsString() != kVisitorVarName) | |
546 return false; | |
547 | |
548 callee_record = trace_decl->getParent(); | |
549 func_name = callee->getMemberName().getAsString(); | |
550 } | |
551 | |
552 if (!callee_record) | |
553 return false; | |
554 | |
555 if (!IsTraceCallName(func_name)) | |
556 return false; | |
557 | |
558 for (auto& base : info_->GetBases()) { | |
559 // We want to deal with omitted trace() function in an intermediary | |
560 // class in the class hierarchy, e.g.: | |
561 // class A : public GarbageCollected<A> { trace() { ... } }; | |
562 // class B : public A { /* No trace(); have nothing to trace. */ }; | |
563 // class C : public B { trace() { B::trace(visitor); } } | |
564 // where, B::trace() is actually A::trace(), and in some cases we get | |
565 // A as |callee_record| instead of B. We somehow need to mark B as | |
566 // traced if we find A::trace() call. | |
567 // | |
568 // To solve this, here we keep going up the class hierarchy as long as | |
569 // they are not required to have a trace method. The implementation is | |
570 // a simple DFS, where |base_records| represents the set of base classes | |
571 // we need to visit. | |
572 | |
573 std::vector<CXXRecordDecl*> base_records; | |
574 base_records.push_back(base.first); | |
575 | |
576 while (!base_records.empty()) { | |
577 CXXRecordDecl* base_record = base_records.back(); | |
578 base_records.pop_back(); | |
579 | |
580 if (base_record == callee_record) { | |
581 // If we find a matching trace method, pretend the user has written | |
582 // a correct trace() method of the base; in the example above, we | |
583 // find A::trace() here and mark B as correctly traced. | |
584 base.second.MarkTraced(); | |
585 return true; | |
586 } | |
587 | |
588 if (RecordInfo* base_info = cache_->Lookup(base_record)) { | |
589 if (!base_info->RequiresTraceMethod()) { | |
590 // If this base class is not required to have a trace method, then | |
591 // the actual trace method may be defined in an ancestor. | |
592 for (auto& inner_base : base_info->GetBases()) | |
593 base_records.push_back(inner_base.first); | |
594 } | |
595 } | |
596 } | |
597 } | |
598 | |
599 return false; | |
600 } | |
601 | |
602 bool CheckTraceFieldCall(CXXMemberCallExpr* call) { | |
603 return CheckTraceFieldCall(call->getMethodDecl()->getNameAsString(), | |
604 call->getRecordDecl(), | |
605 call->getArg(0)); | |
606 } | |
607 | |
608 bool CheckTraceFieldCall(string name, CXXRecordDecl* callee, Expr* arg) { | |
609 if (name != kTraceName || !Config::IsVisitor(callee->getName())) | |
610 return false; | |
611 | |
612 FindFieldVisitor finder; | |
613 finder.TraverseStmt(arg); | |
614 if (finder.field()) | |
615 FoundField(finder.field()); | |
616 | |
617 return true; | |
618 } | |
619 | |
620 bool CheckRegisterWeakMembers(CXXMemberCallExpr* call) { | |
621 CXXMethodDecl* fn = call->getMethodDecl(); | |
622 if (fn->getName() != kRegisterWeakMembersName) | |
623 return false; | |
624 | |
625 if (fn->isTemplateInstantiation()) { | |
626 const TemplateArgumentList& args = | |
627 *fn->getTemplateSpecializationInfo()->TemplateArguments; | |
628 // The second template argument is the callback method. | |
629 if (args.size() > 1 && | |
630 args[1].getKind() == TemplateArgument::Declaration) { | |
631 if (FunctionDecl* callback = | |
632 dyn_cast<FunctionDecl>(args[1].getAsDecl())) { | |
633 if (callback->hasBody()) { | |
634 CheckTraceVisitor nested_visitor(info_); | |
635 nested_visitor.TraverseStmt(callback->getBody()); | |
636 } | |
637 } | |
638 } | |
639 } | |
640 return true; | |
641 } | |
642 | |
643 class FindFieldVisitor : public RecursiveASTVisitor<FindFieldVisitor> { | |
644 public: | |
645 FindFieldVisitor() : member_(0), field_(0) {} | |
646 MemberExpr* member() const { return member_; } | |
647 FieldDecl* field() const { return field_; } | |
648 bool TraverseMemberExpr(MemberExpr* member) { | |
649 if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl())) { | |
650 member_ = member; | |
651 field_ = field; | |
652 return false; | |
653 } | |
654 return true; | |
655 } | |
656 private: | |
657 MemberExpr* member_; | |
658 FieldDecl* field_; | |
659 }; | |
660 | |
661 // Nested checking for weak callbacks. | |
662 CheckTraceVisitor(RecordInfo* info) | |
663 : trace_(nullptr), info_(info), cache_(nullptr) {} | |
664 | |
665 bool IsWeakCallback() { return !trace_; } | |
666 | |
667 void MarkTraced(RecordInfo::Fields::iterator it) { | |
668 // In a weak callback we can't mark strong fields as traced. | |
669 if (IsWeakCallback() && !it->second.edge()->IsWeakMember()) | |
670 return; | |
671 it->second.MarkTraced(); | |
672 } | |
673 | |
674 void FoundField(FieldDecl* field) { | |
675 if (IsTemplateInstantiation(info_->record())) { | |
676 // Pointer equality on fields does not work for template instantiations. | |
677 // The trace method refers to fields of the template definition which | |
678 // are different from the instantiated fields that need to be traced. | |
679 const string& name = field->getNameAsString(); | |
680 for (RecordInfo::Fields::iterator it = info_->GetFields().begin(); | |
681 it != info_->GetFields().end(); | |
682 ++it) { | |
683 if (it->first->getNameAsString() == name) { | |
684 MarkTraced(it); | |
685 break; | |
686 } | |
687 } | |
688 } else { | |
689 RecordInfo::Fields::iterator it = info_->GetFields().find(field); | |
690 if (it != info_->GetFields().end()) | |
691 MarkTraced(it); | |
692 } | |
693 } | |
694 | |
695 void MarkAllWeakMembersTraced() { | |
696 // If we find a call to registerWeakMembers which is unresolved we | |
697 // unsoundly consider all weak members as traced. | |
698 // TODO: Find out how to validate weak member tracing for unresolved call. | |
699 for (auto& field : info_->GetFields()) { | |
700 if (field.second.edge()->IsWeakMember()) | |
701 field.second.MarkTraced(); | |
702 } | |
703 } | |
704 | |
705 CXXMethodDecl* trace_; | |
706 RecordInfo* info_; | |
707 RecordCache* cache_; | |
708 bool delegates_to_traceimpl_; | |
709 }; | |
710 | |
711 // This visitor checks that the fields of a class and the fields of | |
712 // its part objects don't define GC roots. | |
713 class CheckGCRootsVisitor : public RecursiveEdgeVisitor { | |
714 public: | |
715 typedef std::vector<FieldPoint*> RootPath; | |
716 typedef std::set<RecordInfo*> VisitingSet; | |
717 typedef std::vector<RootPath> Errors; | |
718 | |
719 CheckGCRootsVisitor() {} | |
720 | |
721 Errors& gc_roots() { return gc_roots_; } | |
722 | |
723 bool ContainsGCRoots(RecordInfo* info) { | |
724 for (RecordInfo::Fields::iterator it = info->GetFields().begin(); | |
725 it != info->GetFields().end(); | |
726 ++it) { | |
727 current_.push_back(&it->second); | |
728 it->second.edge()->Accept(this); | |
729 current_.pop_back(); | |
730 } | |
731 return !gc_roots_.empty(); | |
732 } | |
733 | |
734 void VisitValue(Value* edge) override { | |
735 // TODO: what should we do to check unions? | |
736 if (edge->value()->record()->isUnion()) | |
737 return; | |
738 | |
739 // Prevent infinite regress for cyclic part objects. | |
740 if (visiting_set_.find(edge->value()) != visiting_set_.end()) | |
741 return; | |
742 | |
743 visiting_set_.insert(edge->value()); | |
744 // If the value is a part object, then continue checking for roots. | |
745 for (Context::iterator it = context().begin(); | |
746 it != context().end(); | |
747 ++it) { | |
748 if (!(*it)->IsCollection()) | |
749 return; | |
750 } | |
751 ContainsGCRoots(edge->value()); | |
752 visiting_set_.erase(edge->value()); | |
753 } | |
754 | |
755 void VisitPersistent(Persistent* edge) override { | |
756 gc_roots_.push_back(current_); | |
757 } | |
758 | |
759 void AtCollection(Collection* edge) override { | |
760 if (edge->is_root()) | |
761 gc_roots_.push_back(current_); | |
762 } | |
763 | |
764 protected: | |
765 RootPath current_; | |
766 VisitingSet visiting_set_; | |
767 Errors gc_roots_; | |
768 }; | |
769 | |
770 // This visitor checks that the fields of a class are "well formed". | |
771 // - OwnPtr, RefPtr and RawPtr must not point to a GC derived types. | |
772 // - Part objects must not be GC derived types. | |
773 // - An on-heap class must never contain GC roots. | |
774 // - Only stack-allocated types may point to stack-allocated types. | |
775 class CheckFieldsVisitor : public RecursiveEdgeVisitor { | |
776 public: | |
777 | |
778 enum Error { | |
779 kRawPtrToGCManaged, | |
780 kRawPtrToGCManagedWarning, | |
781 kRefPtrToGCManaged, | |
782 kOwnPtrToGCManaged, | |
783 kMemberInUnmanaged, | |
784 kPtrFromHeapToStack, | |
785 kGCDerivedPartObject | |
786 }; | |
787 | |
788 typedef std::vector<std::pair<FieldPoint*, Error> > Errors; | |
789 | |
790 CheckFieldsVisitor(const BlinkGCPluginOptions& options) | |
791 : options_(options), current_(0), stack_allocated_host_(false) {} | |
792 | |
793 Errors& invalid_fields() { return invalid_fields_; } | |
794 | |
795 bool ContainsInvalidFields(RecordInfo* info) { | |
796 stack_allocated_host_ = info->IsStackAllocated(); | |
797 managed_host_ = stack_allocated_host_ || | |
798 info->IsGCAllocated() || | |
799 info->IsNonNewable() || | |
800 info->IsOnlyPlacementNewable(); | |
801 for (RecordInfo::Fields::iterator it = info->GetFields().begin(); | |
802 it != info->GetFields().end(); | |
803 ++it) { | |
804 context().clear(); | |
805 current_ = &it->second; | |
806 current_->edge()->Accept(this); | |
807 } | |
808 return !invalid_fields_.empty(); | |
809 } | |
810 | |
811 void AtMember(Member* edge) override { | |
812 if (managed_host_) | |
813 return; | |
814 // A member is allowed to appear in the context of a root. | |
815 for (Context::iterator it = context().begin(); | |
816 it != context().end(); | |
817 ++it) { | |
818 if ((*it)->Kind() == Edge::kRoot) | |
819 return; | |
820 } | |
821 invalid_fields_.push_back(std::make_pair(current_, kMemberInUnmanaged)); | |
822 } | |
823 | |
824 void AtValue(Value* edge) override { | |
825 // TODO: what should we do to check unions? | |
826 if (edge->value()->record()->isUnion()) | |
827 return; | |
828 | |
829 if (!stack_allocated_host_ && edge->value()->IsStackAllocated()) { | |
830 invalid_fields_.push_back(std::make_pair(current_, kPtrFromHeapToStack)); | |
831 return; | |
832 } | |
833 | |
834 if (!Parent() && | |
835 edge->value()->IsGCDerived() && | |
836 !edge->value()->IsGCMixin()) { | |
837 invalid_fields_.push_back(std::make_pair(current_, kGCDerivedPartObject)); | |
838 return; | |
839 } | |
840 | |
841 if (!Parent() || !edge->value()->IsGCAllocated()) | |
842 return; | |
843 | |
844 // In transition mode, disallow OwnPtr<T>, RawPtr<T> to GC allocated T's, | |
845 // also disallow T* in stack-allocated types. | |
846 if (options_.enable_oilpan) { | |
847 if (Parent()->IsOwnPtr() || | |
848 Parent()->IsRawPtrClass() || | |
849 (stack_allocated_host_ && Parent()->IsRawPtr())) { | |
850 invalid_fields_.push_back(std::make_pair( | |
851 current_, InvalidSmartPtr(Parent()))); | |
852 return; | |
853 } | |
854 if (options_.warn_raw_ptr && Parent()->IsRawPtr()) { | |
855 invalid_fields_.push_back(std::make_pair( | |
856 current_, kRawPtrToGCManagedWarning)); | |
857 } | |
858 return; | |
859 } | |
860 | |
861 if (Parent()->IsRawPtr() || Parent()->IsRefPtr() || Parent()->IsOwnPtr()) { | |
862 invalid_fields_.push_back(std::make_pair( | |
863 current_, InvalidSmartPtr(Parent()))); | |
864 return; | |
865 } | |
866 } | |
867 | |
868 void AtCollection(Collection* edge) override { | |
869 if (edge->on_heap() && Parent() && Parent()->IsOwnPtr()) | |
870 invalid_fields_.push_back(std::make_pair(current_, kOwnPtrToGCManaged)); | |
871 } | |
872 | |
873 private: | |
874 Error InvalidSmartPtr(Edge* ptr) { | |
875 if (ptr->IsRawPtr()) | |
876 return kRawPtrToGCManaged; | |
877 if (ptr->IsRefPtr()) | |
878 return kRefPtrToGCManaged; | |
879 if (ptr->IsOwnPtr()) | |
880 return kOwnPtrToGCManaged; | |
881 assert(false && "Unknown smart pointer kind"); | |
882 } | |
883 | |
884 const BlinkGCPluginOptions& options_; | |
885 FieldPoint* current_; | |
886 bool stack_allocated_host_; | |
887 bool managed_host_; | |
888 Errors invalid_fields_; | |
889 }; | |
890 | |
891 class EmptyStmtVisitor | |
892 : public RecursiveASTVisitor<EmptyStmtVisitor> { | |
893 public: | |
894 static bool isEmpty(Stmt* stmt) { | |
895 EmptyStmtVisitor visitor; | |
896 visitor.TraverseStmt(stmt); | |
897 return visitor.empty_; | |
898 } | |
899 | |
900 bool WalkUpFromCompoundStmt(CompoundStmt* stmt) { | |
901 empty_ = stmt->body_empty(); | |
902 return false; | |
903 } | |
904 bool VisitStmt(Stmt*) { | |
905 empty_ = false; | |
906 return false; | |
907 } | |
908 private: | |
909 EmptyStmtVisitor() : empty_(true) {} | |
910 bool empty_; | |
911 }; | |
912 | |
913 // Main class containing checks for various invariants of the Blink | |
914 // garbage collection infrastructure. | |
915 class BlinkGCPluginConsumer : public ASTConsumer { | |
916 public: | |
917 BlinkGCPluginConsumer(CompilerInstance& instance, | |
918 const BlinkGCPluginOptions& options) | |
919 : instance_(instance), | |
920 diagnostic_(instance.getDiagnostics()), | |
921 options_(options), | |
922 json_(0) { | |
923 | |
924 // Only check structures in the blink and WebKit namespaces. | |
925 options_.checked_namespaces.insert("blink"); | |
926 options_.checked_namespaces.insert("WebKit"); | |
927 | |
928 // Ignore GC implementation files. | |
929 options_.ignored_directories.push_back("/heap/"); | |
930 | |
931 // Register warning/error messages. | |
932 diag_class_must_left_mostly_derive_gc_ = diagnostic_.getCustomDiagID( | |
933 getErrorLevel(), kClassMustLeftMostlyDeriveGC); | |
934 diag_class_requires_trace_method_ = | |
935 diagnostic_.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod); | |
936 diag_base_requires_tracing_ = | |
937 diagnostic_.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing); | |
938 diag_fields_require_tracing_ = | |
939 diagnostic_.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing); | |
940 diag_class_contains_invalid_fields_ = diagnostic_.getCustomDiagID( | |
941 getErrorLevel(), kClassContainsInvalidFields); | |
942 diag_class_contains_invalid_fields_warning_ = diagnostic_.getCustomDiagID( | |
943 DiagnosticsEngine::Warning, kClassContainsInvalidFields); | |
944 diag_class_contains_gc_root_ = | |
945 diagnostic_.getCustomDiagID(getErrorLevel(), kClassContainsGCRoot); | |
946 diag_class_requires_finalization_ = diagnostic_.getCustomDiagID( | |
947 getErrorLevel(), kClassRequiresFinalization); | |
948 diag_class_does_not_require_finalization_ = diagnostic_.getCustomDiagID( | |
949 DiagnosticsEngine::Warning, kClassDoesNotRequireFinalization); | |
950 diag_finalizer_accesses_finalized_field_ = diagnostic_.getCustomDiagID( | |
951 getErrorLevel(), kFinalizerAccessesFinalizedField); | |
952 diag_overridden_non_virtual_trace_ = diagnostic_.getCustomDiagID( | |
953 getErrorLevel(), kOverriddenNonVirtualTrace); | |
954 diag_missing_trace_dispatch_method_ = diagnostic_.getCustomDiagID( | |
955 getErrorLevel(), kMissingTraceDispatchMethod); | |
956 diag_missing_finalize_dispatch_method_ = diagnostic_.getCustomDiagID( | |
957 getErrorLevel(), kMissingFinalizeDispatchMethod); | |
958 diag_virtual_and_manual_dispatch_ = | |
959 diagnostic_.getCustomDiagID(getErrorLevel(), kVirtualAndManualDispatch); | |
960 diag_missing_trace_dispatch_ = | |
961 diagnostic_.getCustomDiagID(getErrorLevel(), kMissingTraceDispatch); | |
962 diag_missing_finalize_dispatch_ = | |
963 diagnostic_.getCustomDiagID(getErrorLevel(), kMissingFinalizeDispatch); | |
964 diag_derives_non_stack_allocated_ = | |
965 diagnostic_.getCustomDiagID(getErrorLevel(), kDerivesNonStackAllocated); | |
966 diag_class_overrides_new_ = | |
967 diagnostic_.getCustomDiagID(getErrorLevel(), kClassOverridesNew); | |
968 diag_class_declares_pure_virtual_trace_ = diagnostic_.getCustomDiagID( | |
969 getErrorLevel(), kClassDeclaresPureVirtualTrace); | |
970 diag_left_most_base_must_be_polymorphic_ = diagnostic_.getCustomDiagID( | |
971 getErrorLevel(), kLeftMostBaseMustBePolymorphic); | |
972 diag_base_class_must_declare_virtual_trace_ = diagnostic_.getCustomDiagID( | |
973 getErrorLevel(), kBaseClassMustDeclareVirtualTrace); | |
974 diag_class_must_declare_gc_mixin_trace_method_ = | |
975 diagnostic_.getCustomDiagID(getErrorLevel(), | |
976 kClassMustDeclareGCMixinTraceMethod); | |
977 | |
978 // Register note messages. | |
979 diag_base_requires_tracing_note_ = diagnostic_.getCustomDiagID( | |
980 DiagnosticsEngine::Note, kBaseRequiresTracingNote); | |
981 diag_field_requires_tracing_note_ = diagnostic_.getCustomDiagID( | |
982 DiagnosticsEngine::Note, kFieldRequiresTracingNote); | |
983 diag_raw_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID( | |
984 DiagnosticsEngine::Note, kRawPtrToGCManagedClassNote); | |
985 diag_ref_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID( | |
986 DiagnosticsEngine::Note, kRefPtrToGCManagedClassNote); | |
987 diag_own_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID( | |
988 DiagnosticsEngine::Note, kOwnPtrToGCManagedClassNote); | |
989 diag_stack_allocated_field_note_ = diagnostic_.getCustomDiagID( | |
990 DiagnosticsEngine::Note, kStackAllocatedFieldNote); | |
991 diag_member_in_unmanaged_class_note_ = diagnostic_.getCustomDiagID( | |
992 DiagnosticsEngine::Note, kMemberInUnmanagedClassNote); | |
993 diag_part_object_to_gc_derived_class_note_ = diagnostic_.getCustomDiagID( | |
994 DiagnosticsEngine::Note, kPartObjectToGCDerivedClassNote); | |
995 diag_part_object_contains_gc_root_note_ = diagnostic_.getCustomDiagID( | |
996 DiagnosticsEngine::Note, kPartObjectContainsGCRootNote); | |
997 diag_field_contains_gc_root_note_ = diagnostic_.getCustomDiagID( | |
998 DiagnosticsEngine::Note, kFieldContainsGCRootNote); | |
999 diag_finalized_field_note_ = diagnostic_.getCustomDiagID( | |
1000 DiagnosticsEngine::Note, kFinalizedFieldNote); | |
1001 diag_user_declared_destructor_note_ = diagnostic_.getCustomDiagID( | |
1002 DiagnosticsEngine::Note, kUserDeclaredDestructorNote); | |
1003 diag_user_declared_finalizer_note_ = diagnostic_.getCustomDiagID( | |
1004 DiagnosticsEngine::Note, kUserDeclaredFinalizerNote); | |
1005 diag_base_requires_finalization_note_ = diagnostic_.getCustomDiagID( | |
1006 DiagnosticsEngine::Note, kBaseRequiresFinalizationNote); | |
1007 diag_field_requires_finalization_note_ = diagnostic_.getCustomDiagID( | |
1008 DiagnosticsEngine::Note, kFieldRequiresFinalizationNote); | |
1009 diag_overridden_non_virtual_trace_note_ = diagnostic_.getCustomDiagID( | |
1010 DiagnosticsEngine::Note, kOverriddenNonVirtualTraceNote); | |
1011 diag_manual_dispatch_method_note_ = diagnostic_.getCustomDiagID( | |
1012 DiagnosticsEngine::Note, kManualDispatchMethodNote); | |
1013 } | |
1014 | |
1015 void HandleTranslationUnit(ASTContext& context) override { | |
1016 // Don't run the plugin if the compilation unit is already invalid. | |
1017 if (diagnostic_.hasErrorOccurred()) | |
1018 return; | |
1019 | |
1020 CollectVisitor visitor; | |
1021 visitor.TraverseDecl(context.getTranslationUnitDecl()); | |
1022 | |
1023 if (options_.dump_graph) { | |
1024 std::error_code err; | |
1025 // TODO: Make createDefaultOutputFile or a shorter createOutputFile work. | |
1026 json_ = JsonWriter::from(instance_.createOutputFile( | |
1027 "", // OutputPath | |
1028 err, // Errors | |
1029 true, // Binary | |
1030 true, // RemoveFileOnSignal | |
1031 instance_.getFrontendOpts().OutputFile, // BaseInput | |
1032 "graph.json", // Extension | |
1033 false, // UseTemporary | |
1034 false, // CreateMissingDirectories | |
1035 0, // ResultPathName | |
1036 0)); // TempPathName | |
1037 if (!err && json_) { | |
1038 json_->OpenList(); | |
1039 } else { | |
1040 json_ = 0; | |
1041 llvm::errs() | |
1042 << "[blink-gc] " | |
1043 << "Failed to create an output file for the object graph.\n"; | |
1044 } | |
1045 } | |
1046 | |
1047 for (RecordVector::iterator it = visitor.record_decls().begin(); | |
1048 it != visitor.record_decls().end(); | |
1049 ++it) { | |
1050 CheckRecord(cache_.Lookup(*it)); | |
1051 } | |
1052 | |
1053 for (MethodVector::iterator it = visitor.trace_decls().begin(); | |
1054 it != visitor.trace_decls().end(); | |
1055 ++it) { | |
1056 CheckTracingMethod(*it); | |
1057 } | |
1058 | |
1059 if (json_) { | |
1060 json_->CloseList(); | |
1061 delete json_; | |
1062 json_ = 0; | |
1063 } | |
1064 } | |
1065 | |
1066 // Main entry for checking a record declaration. | |
1067 void CheckRecord(RecordInfo* info) { | |
1068 if (IsIgnored(info)) | |
1069 return; | |
1070 | |
1071 CXXRecordDecl* record = info->record(); | |
1072 | |
1073 // TODO: what should we do to check unions? | |
1074 if (record->isUnion()) | |
1075 return; | |
1076 | |
1077 // If this is the primary template declaration, check its specializations. | |
1078 if (record->isThisDeclarationADefinition() && | |
1079 record->getDescribedClassTemplate()) { | |
1080 ClassTemplateDecl* tmpl = record->getDescribedClassTemplate(); | |
1081 for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin(); | |
1082 it != tmpl->spec_end(); | |
1083 ++it) { | |
1084 CheckClass(cache_.Lookup(*it)); | |
1085 } | |
1086 return; | |
1087 } | |
1088 | |
1089 CheckClass(info); | |
1090 } | |
1091 | |
1092 // Check a class-like object (eg, class, specialization, instantiation). | |
1093 void CheckClass(RecordInfo* info) { | |
1094 if (!info) | |
1095 return; | |
1096 | |
1097 // Check consistency of stack-allocated hierarchies. | |
1098 if (info->IsStackAllocated()) { | |
1099 for (RecordInfo::Bases::iterator it = info->GetBases().begin(); | |
1100 it != info->GetBases().end(); | |
1101 ++it) { | |
1102 if (!it->second.info()->IsStackAllocated()) | |
1103 ReportDerivesNonStackAllocated(info, &it->second); | |
1104 } | |
1105 } | |
1106 | |
1107 if (CXXMethodDecl* trace = info->GetTraceMethod()) { | |
1108 if (trace->isPure()) | |
1109 ReportClassDeclaresPureVirtualTrace(info, trace); | |
1110 } else if (info->RequiresTraceMethod()) { | |
1111 ReportClassRequiresTraceMethod(info); | |
1112 } | |
1113 | |
1114 // Check polymorphic classes that are GC-derived or have a trace method. | |
1115 if (info->record()->hasDefinition() && info->record()->isPolymorphic()) { | |
1116 // TODO: Check classes that inherit a trace method. | |
1117 CXXMethodDecl* trace = info->GetTraceMethod(); | |
1118 if (trace || info->IsGCDerived()) | |
1119 CheckPolymorphicClass(info, trace); | |
1120 } | |
1121 | |
1122 { | |
1123 CheckFieldsVisitor visitor(options_); | |
1124 if (visitor.ContainsInvalidFields(info)) | |
1125 ReportClassContainsInvalidFields(info, &visitor.invalid_fields()); | |
1126 } | |
1127 | |
1128 if (info->IsGCDerived()) { | |
1129 | |
1130 if (!info->IsGCMixin()) { | |
1131 CheckLeftMostDerived(info); | |
1132 CheckDispatch(info); | |
1133 if (CXXMethodDecl* newop = info->DeclaresNewOperator()) | |
1134 if (!Config::IsIgnoreAnnotated(newop)) | |
1135 ReportClassOverridesNew(info, newop); | |
1136 if (info->IsGCMixinInstance()) { | |
1137 // Require that declared GCMixin implementations | |
1138 // also provide a trace() override. | |
1139 if (info->DeclaresGCMixinMethods() | |
1140 && !info->DeclaresLocalTraceMethod()) | |
1141 ReportClassMustDeclareGCMixinTraceMethod(info); | |
1142 } | |
1143 } | |
1144 | |
1145 { | |
1146 CheckGCRootsVisitor visitor; | |
1147 if (visitor.ContainsGCRoots(info)) | |
1148 ReportClassContainsGCRoots(info, &visitor.gc_roots()); | |
1149 } | |
1150 | |
1151 if (info->NeedsFinalization()) | |
1152 CheckFinalization(info); | |
1153 | |
1154 if (options_.warn_unneeded_finalizer && info->IsGCFinalized()) | |
1155 CheckUnneededFinalization(info); | |
1156 } | |
1157 | |
1158 DumpClass(info); | |
1159 } | |
1160 | |
1161 CXXRecordDecl* GetDependentTemplatedDecl(const Type& type) { | |
1162 const TemplateSpecializationType* tmpl_type = | |
1163 type.getAs<TemplateSpecializationType>(); | |
1164 if (!tmpl_type) | |
1165 return 0; | |
1166 | |
1167 TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl(); | |
1168 if (!tmpl_decl) | |
1169 return 0; | |
1170 | |
1171 return dyn_cast<CXXRecordDecl>(tmpl_decl->getTemplatedDecl()); | |
1172 } | |
1173 | |
1174 // The GC infrastructure assumes that if the vtable of a polymorphic | |
1175 // base-class is not initialized for a given object (ie, it is partially | |
1176 // initialized) then the object does not need to be traced. Thus, we must | |
1177 // ensure that any polymorphic class with a trace method does not have any | |
1178 // tractable fields that are initialized before we are sure that the vtable | |
1179 // and the trace method are both defined. There are two cases that need to | |
1180 // hold to satisfy that assumption: | |
1181 // | |
1182 // 1. If trace is virtual, then it must be defined in the left-most base. | |
1183 // This ensures that if the vtable is initialized then it contains a pointer | |
1184 // to the trace method. | |
1185 // | |
1186 // 2. If trace is non-virtual, then the trace method is defined and we must | |
1187 // ensure that the left-most base defines a vtable. This ensures that the | |
1188 // first thing to be initialized when constructing the object is the vtable | |
1189 // itself. | |
1190 void CheckPolymorphicClass(RecordInfo* info, CXXMethodDecl* trace) { | |
1191 CXXRecordDecl* left_most = info->record(); | |
1192 CXXRecordDecl::base_class_iterator it = left_most->bases_begin(); | |
1193 CXXRecordDecl* left_most_base = 0; | |
1194 while (it != left_most->bases_end()) { | |
1195 left_most_base = it->getType()->getAsCXXRecordDecl(); | |
1196 if (!left_most_base && it->getType()->isDependentType()) | |
1197 left_most_base = RecordInfo::GetDependentTemplatedDecl(*it->getType()); | |
1198 | |
1199 // TODO: Find a way to correctly check actual instantiations | |
1200 // for dependent types. The escape below will be hit, eg, when | |
1201 // we have a primary template with no definition and | |
1202 // specializations for each case (such as SupplementBase) in | |
1203 // which case we don't succeed in checking the required | |
1204 // properties. | |
1205 if (!left_most_base || !left_most_base->hasDefinition()) | |
1206 return; | |
1207 | |
1208 StringRef name = left_most_base->getName(); | |
1209 // We know GCMixin base defines virtual trace. | |
1210 if (Config::IsGCMixinBase(name)) | |
1211 return; | |
1212 | |
1213 // Stop with the left-most prior to a safe polymorphic base (a safe base | |
1214 // is non-polymorphic and contains no fields). | |
1215 if (Config::IsSafePolymorphicBase(name)) | |
1216 break; | |
1217 | |
1218 left_most = left_most_base; | |
1219 it = left_most->bases_begin(); | |
1220 } | |
1221 | |
1222 if (RecordInfo* left_most_info = cache_.Lookup(left_most)) { | |
1223 | |
1224 // Check condition (1): | |
1225 if (trace && trace->isVirtual()) { | |
1226 if (CXXMethodDecl* trace = left_most_info->GetTraceMethod()) { | |
1227 if (trace->isVirtual()) | |
1228 return; | |
1229 } | |
1230 ReportBaseClassMustDeclareVirtualTrace(info, left_most); | |
1231 return; | |
1232 } | |
1233 | |
1234 // Check condition (2): | |
1235 if (DeclaresVirtualMethods(left_most)) | |
1236 return; | |
1237 if (left_most_base) { | |
1238 // Get the base next to the "safe polymorphic base" | |
1239 if (it != left_most->bases_end()) | |
1240 ++it; | |
1241 if (it != left_most->bases_end()) { | |
1242 if (CXXRecordDecl* next_base = it->getType()->getAsCXXRecordDecl()) { | |
1243 if (CXXRecordDecl* next_left_most = GetLeftMostBase(next_base)) { | |
1244 if (DeclaresVirtualMethods(next_left_most)) | |
1245 return; | |
1246 ReportLeftMostBaseMustBePolymorphic(info, next_left_most); | |
1247 return; | |
1248 } | |
1249 } | |
1250 } | |
1251 } | |
1252 ReportLeftMostBaseMustBePolymorphic(info, left_most); | |
1253 } | |
1254 } | |
1255 | |
1256 CXXRecordDecl* GetLeftMostBase(CXXRecordDecl* left_most) { | |
1257 CXXRecordDecl::base_class_iterator it = left_most->bases_begin(); | |
1258 while (it != left_most->bases_end()) { | |
1259 if (it->getType()->isDependentType()) | |
1260 left_most = RecordInfo::GetDependentTemplatedDecl(*it->getType()); | |
1261 else | |
1262 left_most = it->getType()->getAsCXXRecordDecl(); | |
1263 if (!left_most || !left_most->hasDefinition()) | |
1264 return 0; | |
1265 it = left_most->bases_begin(); | |
1266 } | |
1267 return left_most; | |
1268 } | |
1269 | |
1270 bool DeclaresVirtualMethods(CXXRecordDecl* decl) { | |
1271 CXXRecordDecl::method_iterator it = decl->method_begin(); | |
1272 for (; it != decl->method_end(); ++it) | |
1273 if (it->isVirtual() && !it->isPure()) | |
1274 return true; | |
1275 return false; | |
1276 } | |
1277 | |
1278 void CheckLeftMostDerived(RecordInfo* info) { | |
1279 CXXRecordDecl* left_most = GetLeftMostBase(info->record()); | |
1280 if (!left_most) | |
1281 return; | |
1282 if (!Config::IsGCBase(left_most->getName())) | |
1283 ReportClassMustLeftMostlyDeriveGC(info); | |
1284 } | |
1285 | |
1286 void CheckDispatch(RecordInfo* info) { | |
1287 bool finalized = info->IsGCFinalized(); | |
1288 CXXMethodDecl* trace_dispatch = info->GetTraceDispatchMethod(); | |
1289 CXXMethodDecl* finalize_dispatch = info->GetFinalizeDispatchMethod(); | |
1290 if (!trace_dispatch && !finalize_dispatch) | |
1291 return; | |
1292 | |
1293 CXXRecordDecl* base = trace_dispatch ? trace_dispatch->getParent() | |
1294 : finalize_dispatch->getParent(); | |
1295 | |
1296 // Check that dispatch methods are defined at the base. | |
1297 if (base == info->record()) { | |
1298 if (!trace_dispatch) | |
1299 ReportMissingTraceDispatchMethod(info); | |
1300 if (finalized && !finalize_dispatch) | |
1301 ReportMissingFinalizeDispatchMethod(info); | |
1302 if (!finalized && finalize_dispatch) { | |
1303 ReportClassRequiresFinalization(info); | |
1304 NoteUserDeclaredFinalizer(finalize_dispatch); | |
1305 } | |
1306 } | |
1307 | |
1308 // Check that classes implementing manual dispatch do not have vtables. | |
1309 if (info->record()->isPolymorphic()) | |
1310 ReportVirtualAndManualDispatch( | |
1311 info, trace_dispatch ? trace_dispatch : finalize_dispatch); | |
1312 | |
1313 // If this is a non-abstract class check that it is dispatched to. | |
1314 // TODO: Create a global variant of this local check. We can only check if | |
1315 // the dispatch body is known in this compilation unit. | |
1316 if (info->IsConsideredAbstract()) | |
1317 return; | |
1318 | |
1319 const FunctionDecl* defn; | |
1320 | |
1321 if (trace_dispatch && trace_dispatch->isDefined(defn)) { | |
1322 CheckDispatchVisitor visitor(info); | |
1323 visitor.TraverseStmt(defn->getBody()); | |
1324 if (!visitor.dispatched_to_receiver()) | |
1325 ReportMissingTraceDispatch(defn, info); | |
1326 } | |
1327 | |
1328 if (finalized && finalize_dispatch && finalize_dispatch->isDefined(defn)) { | |
1329 CheckDispatchVisitor visitor(info); | |
1330 visitor.TraverseStmt(defn->getBody()); | |
1331 if (!visitor.dispatched_to_receiver()) | |
1332 ReportMissingFinalizeDispatch(defn, info); | |
1333 } | |
1334 } | |
1335 | |
1336 // TODO: Should we collect destructors similar to trace methods? | |
1337 void CheckFinalization(RecordInfo* info) { | |
1338 CXXDestructorDecl* dtor = info->record()->getDestructor(); | |
1339 | |
1340 // For finalized classes, check the finalization method if possible. | |
1341 if (info->IsGCFinalized()) { | |
1342 if (dtor && dtor->hasBody()) { | |
1343 CheckFinalizerVisitor visitor(&cache_); | |
1344 visitor.TraverseCXXMethodDecl(dtor); | |
1345 if (!visitor.finalized_fields().empty()) { | |
1346 ReportFinalizerAccessesFinalizedFields( | |
1347 dtor, &visitor.finalized_fields()); | |
1348 } | |
1349 } | |
1350 return; | |
1351 } | |
1352 | |
1353 // Don't require finalization of a mixin that has not yet been "mixed in". | |
1354 if (info->IsGCMixin()) | |
1355 return; | |
1356 | |
1357 // Report the finalization error, and proceed to print possible causes for | |
1358 // the finalization requirement. | |
1359 ReportClassRequiresFinalization(info); | |
1360 | |
1361 if (dtor && dtor->isUserProvided()) | |
1362 NoteUserDeclaredDestructor(dtor); | |
1363 | |
1364 for (RecordInfo::Bases::iterator it = info->GetBases().begin(); | |
1365 it != info->GetBases().end(); | |
1366 ++it) { | |
1367 if (it->second.info()->NeedsFinalization()) | |
1368 NoteBaseRequiresFinalization(&it->second); | |
1369 } | |
1370 | |
1371 for (RecordInfo::Fields::iterator it = info->GetFields().begin(); | |
1372 it != info->GetFields().end(); | |
1373 ++it) { | |
1374 if (it->second.edge()->NeedsFinalization()) | |
1375 NoteField(&it->second, diag_field_requires_finalization_note_); | |
1376 } | |
1377 } | |
1378 | |
1379 void CheckUnneededFinalization(RecordInfo* info) { | |
1380 if (!HasNonEmptyFinalizer(info)) | |
1381 ReportClassDoesNotRequireFinalization(info); | |
1382 } | |
1383 | |
1384 bool HasNonEmptyFinalizer(RecordInfo* info) { | |
1385 CXXDestructorDecl* dtor = info->record()->getDestructor(); | |
1386 if (dtor && dtor->isUserProvided()) { | |
1387 if (!dtor->hasBody() || !EmptyStmtVisitor::isEmpty(dtor->getBody())) | |
1388 return true; | |
1389 } | |
1390 for (RecordInfo::Bases::iterator it = info->GetBases().begin(); | |
1391 it != info->GetBases().end(); | |
1392 ++it) { | |
1393 if (HasNonEmptyFinalizer(it->second.info())) | |
1394 return true; | |
1395 } | |
1396 for (RecordInfo::Fields::iterator it = info->GetFields().begin(); | |
1397 it != info->GetFields().end(); | |
1398 ++it) { | |
1399 if (it->second.edge()->NeedsFinalization()) | |
1400 return true; | |
1401 } | |
1402 return false; | |
1403 } | |
1404 | |
1405 // This is the main entry for tracing method definitions. | |
1406 void CheckTracingMethod(CXXMethodDecl* method) { | |
1407 RecordInfo* parent = cache_.Lookup(method->getParent()); | |
1408 if (IsIgnored(parent)) | |
1409 return; | |
1410 | |
1411 // Check templated tracing methods by checking the template instantiations. | |
1412 // Specialized templates are handled as ordinary classes. | |
1413 if (ClassTemplateDecl* tmpl = | |
1414 parent->record()->getDescribedClassTemplate()) { | |
1415 for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin(); | |
1416 it != tmpl->spec_end(); | |
1417 ++it) { | |
1418 // Check trace using each template instantiation as the holder. | |
1419 if (IsTemplateInstantiation(*it)) | |
1420 CheckTraceOrDispatchMethod(cache_.Lookup(*it), method); | |
1421 } | |
1422 return; | |
1423 } | |
1424 | |
1425 CheckTraceOrDispatchMethod(parent, method); | |
1426 } | |
1427 | |
1428 // Determine what type of tracing method this is (dispatch or trace). | |
1429 void CheckTraceOrDispatchMethod(RecordInfo* parent, CXXMethodDecl* method) { | |
1430 Config::TraceMethodType trace_type = Config::GetTraceMethodType(method); | |
1431 if (trace_type == Config::TRACE_AFTER_DISPATCH_METHOD || | |
1432 trace_type == Config::TRACE_AFTER_DISPATCH_IMPL_METHOD || | |
1433 !parent->GetTraceDispatchMethod()) { | |
1434 CheckTraceMethod(parent, method, trace_type); | |
1435 } | |
1436 // Dispatch methods are checked when we identify subclasses. | |
1437 } | |
1438 | |
1439 // Check an actual trace method. | |
1440 void CheckTraceMethod(RecordInfo* parent, | |
1441 CXXMethodDecl* trace, | |
1442 Config::TraceMethodType trace_type) { | |
1443 // A trace method must not override any non-virtual trace methods. | |
1444 if (trace_type == Config::TRACE_METHOD) { | |
1445 for (RecordInfo::Bases::iterator it = parent->GetBases().begin(); | |
1446 it != parent->GetBases().end(); | |
1447 ++it) { | |
1448 RecordInfo* base = it->second.info(); | |
1449 if (CXXMethodDecl* other = base->InheritsNonVirtualTrace()) | |
1450 ReportOverriddenNonVirtualTrace(parent, trace, other); | |
1451 } | |
1452 } | |
1453 | |
1454 CheckTraceVisitor visitor(trace, parent, &cache_); | |
1455 visitor.TraverseCXXMethodDecl(trace); | |
1456 | |
1457 // Skip reporting if this trace method is a just delegate to | |
1458 // traceImpl (or traceAfterDispatchImpl) method. We will report on | |
1459 // CheckTraceMethod on traceImpl method. | |
1460 if (visitor.delegates_to_traceimpl()) | |
1461 return; | |
1462 | |
1463 for (RecordInfo::Bases::iterator it = parent->GetBases().begin(); | |
1464 it != parent->GetBases().end(); | |
1465 ++it) { | |
1466 if (!it->second.IsProperlyTraced()) | |
1467 ReportBaseRequiresTracing(parent, trace, it->first); | |
1468 } | |
1469 | |
1470 for (RecordInfo::Fields::iterator it = parent->GetFields().begin(); | |
1471 it != parent->GetFields().end(); | |
1472 ++it) { | |
1473 if (!it->second.IsProperlyTraced()) { | |
1474 // Discontinue once an untraced-field error is found. | |
1475 ReportFieldsRequireTracing(parent, trace); | |
1476 break; | |
1477 } | |
1478 } | |
1479 } | |
1480 | |
1481 void DumpClass(RecordInfo* info) { | |
1482 if (!json_) | |
1483 return; | |
1484 | |
1485 json_->OpenObject(); | |
1486 json_->Write("name", info->record()->getQualifiedNameAsString()); | |
1487 json_->Write("loc", GetLocString(info->record()->getLocStart())); | |
1488 json_->CloseObject(); | |
1489 | |
1490 class DumpEdgeVisitor : public RecursiveEdgeVisitor { | |
1491 public: | |
1492 DumpEdgeVisitor(JsonWriter* json) : json_(json) {} | |
1493 void DumpEdge(RecordInfo* src, | |
1494 RecordInfo* dst, | |
1495 const string& lbl, | |
1496 const Edge::LivenessKind& kind, | |
1497 const string& loc) { | |
1498 json_->OpenObject(); | |
1499 json_->Write("src", src->record()->getQualifiedNameAsString()); | |
1500 json_->Write("dst", dst->record()->getQualifiedNameAsString()); | |
1501 json_->Write("lbl", lbl); | |
1502 json_->Write("kind", kind); | |
1503 json_->Write("loc", loc); | |
1504 json_->Write("ptr", | |
1505 !Parent() ? "val" : | |
1506 Parent()->IsRawPtr() ? "raw" : | |
1507 Parent()->IsRefPtr() ? "ref" : | |
1508 Parent()->IsOwnPtr() ? "own" : | |
1509 (Parent()->IsMember() || | |
1510 Parent()->IsWeakMember()) ? "mem" : | |
1511 "val"); | |
1512 json_->CloseObject(); | |
1513 } | |
1514 | |
1515 void DumpField(RecordInfo* src, FieldPoint* point, const string& loc) { | |
1516 src_ = src; | |
1517 point_ = point; | |
1518 loc_ = loc; | |
1519 point_->edge()->Accept(this); | |
1520 } | |
1521 | |
1522 void AtValue(Value* e) override { | |
1523 // The liveness kind of a path from the point to this value | |
1524 // is given by the innermost place that is non-strong. | |
1525 Edge::LivenessKind kind = Edge::kStrong; | |
1526 if (Config::IsIgnoreCycleAnnotated(point_->field())) { | |
1527 kind = Edge::kWeak; | |
1528 } else { | |
1529 for (Context::iterator it = context().begin(); | |
1530 it != context().end(); | |
1531 ++it) { | |
1532 Edge::LivenessKind pointer_kind = (*it)->Kind(); | |
1533 if (pointer_kind != Edge::kStrong) { | |
1534 kind = pointer_kind; | |
1535 break; | |
1536 } | |
1537 } | |
1538 } | |
1539 DumpEdge( | |
1540 src_, e->value(), point_->field()->getNameAsString(), kind, loc_); | |
1541 } | |
1542 | |
1543 private: | |
1544 JsonWriter* json_; | |
1545 RecordInfo* src_; | |
1546 FieldPoint* point_; | |
1547 string loc_; | |
1548 }; | |
1549 | |
1550 DumpEdgeVisitor visitor(json_); | |
1551 | |
1552 RecordInfo::Bases& bases = info->GetBases(); | |
1553 for (RecordInfo::Bases::iterator it = bases.begin(); | |
1554 it != bases.end(); | |
1555 ++it) { | |
1556 visitor.DumpEdge(info, | |
1557 it->second.info(), | |
1558 "<super>", | |
1559 Edge::kStrong, | |
1560 GetLocString(it->second.spec().getLocStart())); | |
1561 } | |
1562 | |
1563 RecordInfo::Fields& fields = info->GetFields(); | |
1564 for (RecordInfo::Fields::iterator it = fields.begin(); | |
1565 it != fields.end(); | |
1566 ++it) { | |
1567 visitor.DumpField(info, | |
1568 &it->second, | |
1569 GetLocString(it->second.field()->getLocStart())); | |
1570 } | |
1571 } | |
1572 | |
1573 // Adds either a warning or error, based on the current handling of -Werror. | |
1574 DiagnosticsEngine::Level getErrorLevel() { | |
1575 return diagnostic_.getWarningsAsErrors() ? DiagnosticsEngine::Error | |
1576 : DiagnosticsEngine::Warning; | |
1577 } | |
1578 | |
1579 const string GetLocString(SourceLocation loc) { | |
1580 const SourceManager& source_manager = instance_.getSourceManager(); | |
1581 PresumedLoc ploc = source_manager.getPresumedLoc(loc); | |
1582 if (ploc.isInvalid()) | |
1583 return ""; | |
1584 string loc_str; | |
1585 llvm::raw_string_ostream OS(loc_str); | |
1586 OS << ploc.getFilename() | |
1587 << ":" << ploc.getLine() | |
1588 << ":" << ploc.getColumn(); | |
1589 return OS.str(); | |
1590 } | |
1591 | |
1592 bool IsIgnored(RecordInfo* record) { | |
1593 return !record || | |
1594 !InCheckedNamespace(record) || | |
1595 IsIgnoredClass(record) || | |
1596 InIgnoredDirectory(record); | |
1597 } | |
1598 | |
1599 bool IsIgnoredClass(RecordInfo* info) { | |
1600 // Ignore any class prefixed by SameSizeAs. These are used in | |
1601 // Blink to verify class sizes and don't need checking. | |
1602 const string SameSizeAs = "SameSizeAs"; | |
1603 if (info->name().compare(0, SameSizeAs.size(), SameSizeAs) == 0) | |
1604 return true; | |
1605 return options_.ignored_classes.find(info->name()) != | |
1606 options_.ignored_classes.end(); | |
1607 } | |
1608 | |
1609 bool InIgnoredDirectory(RecordInfo* info) { | |
1610 string filename; | |
1611 if (!GetFilename(info->record()->getLocStart(), &filename)) | |
1612 return false; // TODO: should we ignore non-existing file locations? | |
1613 std::vector<string>::iterator it = options_.ignored_directories.begin(); | |
1614 for (; it != options_.ignored_directories.end(); ++it) | |
1615 if (filename.find(*it) != string::npos) | |
1616 return true; | |
1617 return false; | |
1618 } | |
1619 | |
1620 bool InCheckedNamespace(RecordInfo* info) { | |
1621 if (!info) | |
1622 return false; | |
1623 for (DeclContext* context = info->record()->getDeclContext(); | |
1624 !context->isTranslationUnit(); | |
1625 context = context->getParent()) { | |
1626 if (NamespaceDecl* decl = dyn_cast<NamespaceDecl>(context)) { | |
1627 if (options_.checked_namespaces.find(decl->getNameAsString()) != | |
1628 options_.checked_namespaces.end()) { | |
1629 return true; | |
1630 } | |
1631 } | |
1632 } | |
1633 return false; | |
1634 } | |
1635 | |
1636 bool GetFilename(SourceLocation loc, string* filename) { | |
1637 const SourceManager& source_manager = instance_.getSourceManager(); | |
1638 SourceLocation spelling_location = source_manager.getSpellingLoc(loc); | |
1639 PresumedLoc ploc = source_manager.getPresumedLoc(spelling_location); | |
1640 if (ploc.isInvalid()) { | |
1641 // If we're in an invalid location, we're looking at things that aren't | |
1642 // actually stated in the source. | |
1643 return false; | |
1644 } | |
1645 *filename = ploc.getFilename(); | |
1646 return true; | |
1647 } | |
1648 | |
1649 void ReportClassMustLeftMostlyDeriveGC(RecordInfo* info) { | |
1650 SourceLocation loc = info->record()->getInnerLocStart(); | |
1651 SourceManager& manager = instance_.getSourceManager(); | |
1652 FullSourceLoc full_loc(loc, manager); | |
1653 diagnostic_.Report(full_loc, diag_class_must_left_mostly_derive_gc_) | |
1654 << info->record(); | |
1655 } | |
1656 | |
1657 void ReportClassRequiresTraceMethod(RecordInfo* info) { | |
1658 SourceLocation loc = info->record()->getInnerLocStart(); | |
1659 SourceManager& manager = instance_.getSourceManager(); | |
1660 FullSourceLoc full_loc(loc, manager); | |
1661 diagnostic_.Report(full_loc, diag_class_requires_trace_method_) | |
1662 << info->record(); | |
1663 | |
1664 for (RecordInfo::Bases::iterator it = info->GetBases().begin(); | |
1665 it != info->GetBases().end(); | |
1666 ++it) { | |
1667 if (it->second.NeedsTracing().IsNeeded()) | |
1668 NoteBaseRequiresTracing(&it->second); | |
1669 } | |
1670 | |
1671 for (RecordInfo::Fields::iterator it = info->GetFields().begin(); | |
1672 it != info->GetFields().end(); | |
1673 ++it) { | |
1674 if (!it->second.IsProperlyTraced()) | |
1675 NoteFieldRequiresTracing(info, it->first); | |
1676 } | |
1677 } | |
1678 | |
1679 void ReportBaseRequiresTracing(RecordInfo* derived, | |
1680 CXXMethodDecl* trace, | |
1681 CXXRecordDecl* base) { | |
1682 SourceLocation loc = trace->getLocStart(); | |
1683 SourceManager& manager = instance_.getSourceManager(); | |
1684 FullSourceLoc full_loc(loc, manager); | |
1685 diagnostic_.Report(full_loc, diag_base_requires_tracing_) | |
1686 << base << derived->record(); | |
1687 } | |
1688 | |
1689 void ReportFieldsRequireTracing(RecordInfo* info, CXXMethodDecl* trace) { | |
1690 SourceLocation loc = trace->getLocStart(); | |
1691 SourceManager& manager = instance_.getSourceManager(); | |
1692 FullSourceLoc full_loc(loc, manager); | |
1693 diagnostic_.Report(full_loc, diag_fields_require_tracing_) | |
1694 << info->record(); | |
1695 for (RecordInfo::Fields::iterator it = info->GetFields().begin(); | |
1696 it != info->GetFields().end(); | |
1697 ++it) { | |
1698 if (!it->second.IsProperlyTraced()) | |
1699 NoteFieldRequiresTracing(info, it->first); | |
1700 } | |
1701 } | |
1702 | |
1703 void ReportClassContainsInvalidFields(RecordInfo* info, | |
1704 CheckFieldsVisitor::Errors* errors) { | |
1705 SourceLocation loc = info->record()->getLocStart(); | |
1706 SourceManager& manager = instance_.getSourceManager(); | |
1707 FullSourceLoc full_loc(loc, manager); | |
1708 bool only_warnings = options_.warn_raw_ptr; | |
1709 for (CheckFieldsVisitor::Errors::iterator it = errors->begin(); | |
1710 only_warnings && it != errors->end(); | |
1711 ++it) { | |
1712 if (it->second != CheckFieldsVisitor::kRawPtrToGCManagedWarning) | |
1713 only_warnings = false; | |
1714 } | |
1715 diagnostic_.Report(full_loc, only_warnings ? | |
1716 diag_class_contains_invalid_fields_warning_ : | |
1717 diag_class_contains_invalid_fields_) | |
1718 << info->record(); | |
1719 for (CheckFieldsVisitor::Errors::iterator it = errors->begin(); | |
1720 it != errors->end(); | |
1721 ++it) { | |
1722 unsigned error; | |
1723 if (it->second == CheckFieldsVisitor::kRawPtrToGCManaged || | |
1724 it->second == CheckFieldsVisitor::kRawPtrToGCManagedWarning) { | |
1725 error = diag_raw_ptr_to_gc_managed_class_note_; | |
1726 } else if (it->second == CheckFieldsVisitor::kRefPtrToGCManaged) { | |
1727 error = diag_ref_ptr_to_gc_managed_class_note_; | |
1728 } else if (it->second == CheckFieldsVisitor::kOwnPtrToGCManaged) { | |
1729 error = diag_own_ptr_to_gc_managed_class_note_; | |
1730 } else if (it->second == CheckFieldsVisitor::kMemberInUnmanaged) { | |
1731 error = diag_member_in_unmanaged_class_note_; | |
1732 } else if (it->second == CheckFieldsVisitor::kPtrFromHeapToStack) { | |
1733 error = diag_stack_allocated_field_note_; | |
1734 } else if (it->second == CheckFieldsVisitor::kGCDerivedPartObject) { | |
1735 error = diag_part_object_to_gc_derived_class_note_; | |
1736 } else { | |
1737 assert(false && "Unknown field error"); | |
1738 } | |
1739 NoteField(it->first, error); | |
1740 } | |
1741 } | |
1742 | |
1743 void ReportClassContainsGCRoots(RecordInfo* info, | |
1744 CheckGCRootsVisitor::Errors* errors) { | |
1745 SourceLocation loc = info->record()->getLocStart(); | |
1746 SourceManager& manager = instance_.getSourceManager(); | |
1747 FullSourceLoc full_loc(loc, manager); | |
1748 for (CheckGCRootsVisitor::Errors::iterator it = errors->begin(); | |
1749 it != errors->end(); | |
1750 ++it) { | |
1751 CheckGCRootsVisitor::RootPath::iterator path = it->begin(); | |
1752 FieldPoint* point = *path; | |
1753 diagnostic_.Report(full_loc, diag_class_contains_gc_root_) | |
1754 << info->record() << point->field(); | |
1755 while (++path != it->end()) { | |
1756 NotePartObjectContainsGCRoot(point); | |
1757 point = *path; | |
1758 } | |
1759 NoteFieldContainsGCRoot(point); | |
1760 } | |
1761 } | |
1762 | |
1763 void ReportFinalizerAccessesFinalizedFields( | |
1764 CXXMethodDecl* dtor, | |
1765 CheckFinalizerVisitor::Errors* fields) { | |
1766 for (CheckFinalizerVisitor::Errors::iterator it = fields->begin(); | |
1767 it != fields->end(); | |
1768 ++it) { | |
1769 SourceLocation loc = it->first->getLocStart(); | |
1770 SourceManager& manager = instance_.getSourceManager(); | |
1771 FullSourceLoc full_loc(loc, manager); | |
1772 diagnostic_.Report(full_loc, diag_finalizer_accesses_finalized_field_) | |
1773 << dtor << it->second->field(); | |
1774 NoteField(it->second, diag_finalized_field_note_); | |
1775 } | |
1776 } | |
1777 | |
1778 void ReportClassRequiresFinalization(RecordInfo* info) { | |
1779 SourceLocation loc = info->record()->getInnerLocStart(); | |
1780 SourceManager& manager = instance_.getSourceManager(); | |
1781 FullSourceLoc full_loc(loc, manager); | |
1782 diagnostic_.Report(full_loc, diag_class_requires_finalization_) | |
1783 << info->record(); | |
1784 } | |
1785 | |
1786 void ReportClassDoesNotRequireFinalization(RecordInfo* info) { | |
1787 SourceLocation loc = info->record()->getInnerLocStart(); | |
1788 SourceManager& manager = instance_.getSourceManager(); | |
1789 FullSourceLoc full_loc(loc, manager); | |
1790 diagnostic_.Report(full_loc, diag_class_does_not_require_finalization_) | |
1791 << info->record(); | |
1792 } | |
1793 | |
1794 void ReportClassMustDeclareGCMixinTraceMethod(RecordInfo* info) { | |
1795 SourceLocation loc = info->record()->getInnerLocStart(); | |
1796 SourceManager& manager = instance_.getSourceManager(); | |
1797 FullSourceLoc full_loc(loc, manager); | |
1798 diagnostic_.Report( | |
1799 full_loc, diag_class_must_declare_gc_mixin_trace_method_) | |
1800 << info->record(); | |
1801 } | |
1802 | |
1803 void ReportOverriddenNonVirtualTrace(RecordInfo* info, | |
1804 CXXMethodDecl* trace, | |
1805 CXXMethodDecl* overridden) { | |
1806 SourceLocation loc = trace->getLocStart(); | |
1807 SourceManager& manager = instance_.getSourceManager(); | |
1808 FullSourceLoc full_loc(loc, manager); | |
1809 diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_) | |
1810 << info->record() << overridden->getParent(); | |
1811 NoteOverriddenNonVirtualTrace(overridden); | |
1812 } | |
1813 | |
1814 void ReportMissingTraceDispatchMethod(RecordInfo* info) { | |
1815 ReportMissingDispatchMethod(info, diag_missing_trace_dispatch_method_); | |
1816 } | |
1817 | |
1818 void ReportMissingFinalizeDispatchMethod(RecordInfo* info) { | |
1819 ReportMissingDispatchMethod(info, diag_missing_finalize_dispatch_method_); | |
1820 } | |
1821 | |
1822 void ReportMissingDispatchMethod(RecordInfo* info, unsigned error) { | |
1823 SourceLocation loc = info->record()->getInnerLocStart(); | |
1824 SourceManager& manager = instance_.getSourceManager(); | |
1825 FullSourceLoc full_loc(loc, manager); | |
1826 diagnostic_.Report(full_loc, error) << info->record(); | |
1827 } | |
1828 | |
1829 void ReportVirtualAndManualDispatch(RecordInfo* info, | |
1830 CXXMethodDecl* dispatch) { | |
1831 SourceLocation loc = info->record()->getInnerLocStart(); | |
1832 SourceManager& manager = instance_.getSourceManager(); | |
1833 FullSourceLoc full_loc(loc, manager); | |
1834 diagnostic_.Report(full_loc, diag_virtual_and_manual_dispatch_) | |
1835 << info->record(); | |
1836 NoteManualDispatchMethod(dispatch); | |
1837 } | |
1838 | |
1839 void ReportMissingTraceDispatch(const FunctionDecl* dispatch, | |
1840 RecordInfo* receiver) { | |
1841 ReportMissingDispatch(dispatch, receiver, diag_missing_trace_dispatch_); | |
1842 } | |
1843 | |
1844 void ReportMissingFinalizeDispatch(const FunctionDecl* dispatch, | |
1845 RecordInfo* receiver) { | |
1846 ReportMissingDispatch(dispatch, receiver, diag_missing_finalize_dispatch_); | |
1847 } | |
1848 | |
1849 void ReportMissingDispatch(const FunctionDecl* dispatch, | |
1850 RecordInfo* receiver, | |
1851 unsigned error) { | |
1852 SourceLocation loc = dispatch->getLocStart(); | |
1853 SourceManager& manager = instance_.getSourceManager(); | |
1854 FullSourceLoc full_loc(loc, manager); | |
1855 diagnostic_.Report(full_loc, error) << receiver->record(); | |
1856 } | |
1857 | |
1858 void ReportDerivesNonStackAllocated(RecordInfo* info, BasePoint* base) { | |
1859 SourceLocation loc = base->spec().getLocStart(); | |
1860 SourceManager& manager = instance_.getSourceManager(); | |
1861 FullSourceLoc full_loc(loc, manager); | |
1862 diagnostic_.Report(full_loc, diag_derives_non_stack_allocated_) | |
1863 << info->record() << base->info()->record(); | |
1864 } | |
1865 | |
1866 void ReportClassOverridesNew(RecordInfo* info, CXXMethodDecl* newop) { | |
1867 SourceLocation loc = newop->getLocStart(); | |
1868 SourceManager& manager = instance_.getSourceManager(); | |
1869 FullSourceLoc full_loc(loc, manager); | |
1870 diagnostic_.Report(full_loc, diag_class_overrides_new_) << info->record(); | |
1871 } | |
1872 | |
1873 void ReportClassDeclaresPureVirtualTrace(RecordInfo* info, | |
1874 CXXMethodDecl* trace) { | |
1875 SourceLocation loc = trace->getLocStart(); | |
1876 SourceManager& manager = instance_.getSourceManager(); | |
1877 FullSourceLoc full_loc(loc, manager); | |
1878 diagnostic_.Report(full_loc, diag_class_declares_pure_virtual_trace_) | |
1879 << info->record(); | |
1880 } | |
1881 | |
1882 void ReportLeftMostBaseMustBePolymorphic(RecordInfo* derived, | |
1883 CXXRecordDecl* base) { | |
1884 SourceLocation loc = base->getLocStart(); | |
1885 SourceManager& manager = instance_.getSourceManager(); | |
1886 FullSourceLoc full_loc(loc, manager); | |
1887 diagnostic_.Report(full_loc, diag_left_most_base_must_be_polymorphic_) | |
1888 << base << derived->record(); | |
1889 } | |
1890 | |
1891 void ReportBaseClassMustDeclareVirtualTrace(RecordInfo* derived, | |
1892 CXXRecordDecl* base) { | |
1893 SourceLocation loc = base->getLocStart(); | |
1894 SourceManager& manager = instance_.getSourceManager(); | |
1895 FullSourceLoc full_loc(loc, manager); | |
1896 diagnostic_.Report(full_loc, diag_base_class_must_declare_virtual_trace_) | |
1897 << base << derived->record(); | |
1898 } | |
1899 | |
1900 void NoteManualDispatchMethod(CXXMethodDecl* dispatch) { | |
1901 SourceLocation loc = dispatch->getLocStart(); | |
1902 SourceManager& manager = instance_.getSourceManager(); | |
1903 FullSourceLoc full_loc(loc, manager); | |
1904 diagnostic_.Report(full_loc, diag_manual_dispatch_method_note_) << dispatch; | |
1905 } | |
1906 | |
1907 void NoteBaseRequiresTracing(BasePoint* base) { | |
1908 SourceLocation loc = base->spec().getLocStart(); | |
1909 SourceManager& manager = instance_.getSourceManager(); | |
1910 FullSourceLoc full_loc(loc, manager); | |
1911 diagnostic_.Report(full_loc, diag_base_requires_tracing_note_) | |
1912 << base->info()->record(); | |
1913 } | |
1914 | |
1915 void NoteFieldRequiresTracing(RecordInfo* holder, FieldDecl* field) { | |
1916 NoteField(field, diag_field_requires_tracing_note_); | |
1917 } | |
1918 | |
1919 void NotePartObjectContainsGCRoot(FieldPoint* point) { | |
1920 FieldDecl* field = point->field(); | |
1921 SourceLocation loc = field->getLocStart(); | |
1922 SourceManager& manager = instance_.getSourceManager(); | |
1923 FullSourceLoc full_loc(loc, manager); | |
1924 diagnostic_.Report(full_loc, diag_part_object_contains_gc_root_note_) | |
1925 << field << field->getParent(); | |
1926 } | |
1927 | |
1928 void NoteFieldContainsGCRoot(FieldPoint* point) { | |
1929 NoteField(point, diag_field_contains_gc_root_note_); | |
1930 } | |
1931 | |
1932 void NoteUserDeclaredDestructor(CXXMethodDecl* dtor) { | |
1933 SourceLocation loc = dtor->getLocStart(); | |
1934 SourceManager& manager = instance_.getSourceManager(); | |
1935 FullSourceLoc full_loc(loc, manager); | |
1936 diagnostic_.Report(full_loc, diag_user_declared_destructor_note_); | |
1937 } | |
1938 | |
1939 void NoteUserDeclaredFinalizer(CXXMethodDecl* dtor) { | |
1940 SourceLocation loc = dtor->getLocStart(); | |
1941 SourceManager& manager = instance_.getSourceManager(); | |
1942 FullSourceLoc full_loc(loc, manager); | |
1943 diagnostic_.Report(full_loc, diag_user_declared_finalizer_note_); | |
1944 } | |
1945 | |
1946 void NoteBaseRequiresFinalization(BasePoint* base) { | |
1947 SourceLocation loc = base->spec().getLocStart(); | |
1948 SourceManager& manager = instance_.getSourceManager(); | |
1949 FullSourceLoc full_loc(loc, manager); | |
1950 diagnostic_.Report(full_loc, diag_base_requires_finalization_note_) | |
1951 << base->info()->record(); | |
1952 } | |
1953 | |
1954 void NoteField(FieldPoint* point, unsigned note) { | |
1955 NoteField(point->field(), note); | |
1956 } | |
1957 | |
1958 void NoteField(FieldDecl* field, unsigned note) { | |
1959 SourceLocation loc = field->getLocStart(); | |
1960 SourceManager& manager = instance_.getSourceManager(); | |
1961 FullSourceLoc full_loc(loc, manager); | |
1962 diagnostic_.Report(full_loc, note) << field; | |
1963 } | |
1964 | |
1965 void NoteOverriddenNonVirtualTrace(CXXMethodDecl* overridden) { | |
1966 SourceLocation loc = overridden->getLocStart(); | |
1967 SourceManager& manager = instance_.getSourceManager(); | |
1968 FullSourceLoc full_loc(loc, manager); | |
1969 diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_note_) | |
1970 << overridden; | |
1971 } | |
1972 | |
1973 unsigned diag_class_must_left_mostly_derive_gc_; | |
1974 unsigned diag_class_requires_trace_method_; | |
1975 unsigned diag_base_requires_tracing_; | |
1976 unsigned diag_fields_require_tracing_; | |
1977 unsigned diag_class_contains_invalid_fields_; | |
1978 unsigned diag_class_contains_invalid_fields_warning_; | |
1979 unsigned diag_class_contains_gc_root_; | |
1980 unsigned diag_class_requires_finalization_; | |
1981 unsigned diag_class_does_not_require_finalization_; | |
1982 unsigned diag_finalizer_accesses_finalized_field_; | |
1983 unsigned diag_overridden_non_virtual_trace_; | |
1984 unsigned diag_missing_trace_dispatch_method_; | |
1985 unsigned diag_missing_finalize_dispatch_method_; | |
1986 unsigned diag_virtual_and_manual_dispatch_; | |
1987 unsigned diag_missing_trace_dispatch_; | |
1988 unsigned diag_missing_finalize_dispatch_; | |
1989 unsigned diag_derives_non_stack_allocated_; | |
1990 unsigned diag_class_overrides_new_; | |
1991 unsigned diag_class_declares_pure_virtual_trace_; | |
1992 unsigned diag_left_most_base_must_be_polymorphic_; | |
1993 unsigned diag_base_class_must_declare_virtual_trace_; | |
1994 unsigned diag_class_must_declare_gc_mixin_trace_method_; | |
1995 | |
1996 unsigned diag_base_requires_tracing_note_; | |
1997 unsigned diag_field_requires_tracing_note_; | |
1998 unsigned diag_raw_ptr_to_gc_managed_class_note_; | |
1999 unsigned diag_ref_ptr_to_gc_managed_class_note_; | |
2000 unsigned diag_own_ptr_to_gc_managed_class_note_; | |
2001 unsigned diag_stack_allocated_field_note_; | |
2002 unsigned diag_member_in_unmanaged_class_note_; | |
2003 unsigned diag_part_object_to_gc_derived_class_note_; | |
2004 unsigned diag_part_object_contains_gc_root_note_; | |
2005 unsigned diag_field_contains_gc_root_note_; | |
2006 unsigned diag_finalized_field_note_; | |
2007 unsigned diag_user_declared_destructor_note_; | |
2008 unsigned diag_user_declared_finalizer_note_; | |
2009 unsigned diag_base_requires_finalization_note_; | |
2010 unsigned diag_field_requires_finalization_note_; | |
2011 unsigned diag_overridden_non_virtual_trace_note_; | |
2012 unsigned diag_manual_dispatch_method_note_; | |
2013 | |
2014 CompilerInstance& instance_; | |
2015 DiagnosticsEngine& diagnostic_; | |
2016 BlinkGCPluginOptions options_; | |
2017 RecordCache cache_; | |
2018 JsonWriter* json_; | |
2019 }; | |
2020 | 19 |
2021 class BlinkGCPluginAction : public PluginASTAction { | 20 class BlinkGCPluginAction : public PluginASTAction { |
2022 public: | 21 public: |
2023 BlinkGCPluginAction() {} | 22 BlinkGCPluginAction() {} |
2024 | 23 |
2025 protected: | 24 protected: |
2026 // Overridden from PluginASTAction: | 25 // Overridden from PluginASTAction: |
2027 virtual std::unique_ptr<ASTConsumer> CreateASTConsumer( | 26 virtual std::unique_ptr<ASTConsumer> CreateASTConsumer( |
2028 CompilerInstance& instance, | 27 CompilerInstance& instance, |
2029 llvm::StringRef ref) { | 28 llvm::StringRef ref) { |
2030 return llvm::make_unique<BlinkGCPluginConsumer>(instance, options_); | 29 return llvm::make_unique<BlinkGCPluginConsumer>(instance, options_); |
2031 } | 30 } |
2032 | 31 |
2033 virtual bool ParseArgs(const CompilerInstance& instance, | 32 virtual bool ParseArgs(const CompilerInstance& instance, |
2034 const std::vector<string>& args) { | 33 const std::vector<std::string>& args) { |
2035 bool parsed = true; | 34 bool parsed = true; |
2036 | 35 |
2037 for (size_t i = 0; i < args.size() && parsed; ++i) { | 36 for (size_t i = 0; i < args.size() && parsed; ++i) { |
2038 if (args[i] == "enable-oilpan") { | 37 if (args[i] == "enable-oilpan") { |
2039 options_.enable_oilpan = true; | 38 options_.enable_oilpan = true; |
2040 } else if (args[i] == "dump-graph") { | 39 } else if (args[i] == "dump-graph") { |
2041 options_.dump_graph = true; | 40 options_.dump_graph = true; |
2042 } else if (args[i] == "warn-raw-ptr") { | 41 } else if (args[i] == "warn-raw-ptr") { |
2043 options_.warn_raw_ptr = true; | 42 options_.warn_raw_ptr = true; |
2044 } else if (args[i] == "warn-unneeded-finalizer") { | 43 } else if (args[i] == "warn-unneeded-finalizer") { |
2045 options_.warn_unneeded_finalizer = true; | 44 options_.warn_unneeded_finalizer = true; |
2046 } else { | 45 } else { |
2047 parsed = false; | 46 parsed = false; |
2048 llvm::errs() << "Unknown blink-gc-plugin argument: " << args[i] << "\n"; | 47 llvm::errs() << "Unknown blink-gc-plugin argument: " << args[i] << "\n"; |
2049 } | 48 } |
2050 } | 49 } |
2051 | 50 |
2052 return parsed; | 51 return parsed; |
2053 } | 52 } |
2054 | 53 |
2055 private: | 54 private: |
2056 BlinkGCPluginOptions options_; | 55 BlinkGCPluginOptions options_; |
2057 }; | 56 }; |
2058 | 57 |
2059 } // namespace | |
2060 | |
2061 static FrontendPluginRegistry::Add<BlinkGCPluginAction> X( | 58 static FrontendPluginRegistry::Add<BlinkGCPluginAction> X( |
2062 "blink-gc-plugin", | 59 "blink-gc-plugin", |
2063 "Check Blink GC invariants"); | 60 "Check Blink GC invariants"); |
OLD | NEW |