Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(102)

Side by Side Diff: tools/clang/blink_gc_plugin/RecordInfo.cpp

Issue 2588943002: Disallow heap objects containing unsafe on-heap iterators. (Closed)
Patch Set: formatting Created 3 years, 12 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "Config.h" 5 #include "Config.h"
6 #include "RecordInfo.h" 6 #include "RecordInfo.h"
7 #include "clang/Sema/Sema.h" 7 #include "clang/Sema/Sema.h"
8 8
9 using namespace clang; 9 using namespace clang;
10 using std::string; 10 using std::string;
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 106
107 return dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl()); 107 return dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
108 } 108 }
109 109
110 void RecordInfo::walkBases() { 110 void RecordInfo::walkBases() {
111 // This traversal is akin to CXXRecordDecl::forallBases()'s, 111 // This traversal is akin to CXXRecordDecl::forallBases()'s,
112 // but without stepping over dependent bases -- these might also 112 // but without stepping over dependent bases -- these might also
113 // have a "GC base name", so are to be included and considered. 113 // have a "GC base name", so are to be included and considered.
114 SmallVector<const CXXRecordDecl*, 8> queue; 114 SmallVector<const CXXRecordDecl*, 8> queue;
115 115
116 const CXXRecordDecl *base_record = record(); 116 const CXXRecordDecl* base_record = record();
117 while (true) { 117 while (true) {
118 for (const auto& it : base_record->bases()) { 118 for (const auto& it : base_record->bases()) {
119 const RecordType *type = it.getType()->getAs<RecordType>(); 119 const RecordType* type = it.getType()->getAs<RecordType>();
120 CXXRecordDecl* base; 120 CXXRecordDecl* base;
121 if (!type) 121 if (!type)
122 base = GetDependentTemplatedDecl(*it.getType()); 122 base = GetDependentTemplatedDecl(*it.getType());
123 else { 123 else {
124 base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition()); 124 base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
125 if (base) 125 if (base)
126 queue.push_back(base); 126 queue.push_back(base);
127 } 127 }
128 if (!base) 128 if (!base)
129 continue; 129 continue;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
164 // This is a mixin if all GC bases are mixins. 164 // This is a mixin if all GC bases are mixins.
165 return true; 165 return true;
166 } 166 }
167 167
168 // Test if a record is allocated on the managed heap. 168 // Test if a record is allocated on the managed heap.
169 bool RecordInfo::IsGCAllocated() { 169 bool RecordInfo::IsGCAllocated() {
170 return IsGCDerived() || IsHeapAllocatedCollection(); 170 return IsGCDerived() || IsHeapAllocatedCollection();
171 } 171 }
172 172
173 bool RecordInfo::IsEagerlyFinalized() { 173 bool RecordInfo::IsEagerlyFinalized() {
174 if (is_eagerly_finalized_ == kNotComputed) { 174 if (is_eagerly_finalized_ != kNotComputed)
175 is_eagerly_finalized_ = kFalse; 175 return is_eagerly_finalized_;
176 if (IsGCFinalized()) { 176
177 for (Decl* decl : record_->decls()) { 177 is_eagerly_finalized_ = kFalse;
178 if (TypedefDecl* typedef_decl = dyn_cast<TypedefDecl>(decl)) { 178 if (!IsGCFinalized())
179 if (typedef_decl->getNameAsString() == kIsEagerlyFinalizedName) { 179 return is_eagerly_finalized_;
180 is_eagerly_finalized_ = kTrue; 180
181 break; 181 for (Decl* decl : record_->decls()) {
182 } 182 if (TypedefDecl* typedef_decl = dyn_cast<TypedefDecl>(decl)) {
183 } 183 if (typedef_decl->getNameAsString() != kIsEagerlyFinalizedName)
184 } 184 continue;
185 is_eagerly_finalized_ = kTrue;
186 break;
185 } 187 }
186 } 188 }
187 return is_eagerly_finalized_; 189 return is_eagerly_finalized_;
188 } 190 }
189 191
190 bool RecordInfo::HasDefinition() { 192 bool RecordInfo::HasDefinition() {
191 return record_->hasDefinition(); 193 return record_->hasDefinition();
192 } 194 }
193 195
194 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) { 196 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
407 if (!record_->hasDefinition()) 409 if (!record_->hasDefinition())
408 return fields; 410 return fields;
409 TracingStatus fields_status = TracingStatus::Unneeded(); 411 TracingStatus fields_status = TracingStatus::Unneeded();
410 for (RecordDecl::field_iterator it = record_->field_begin(); 412 for (RecordDecl::field_iterator it = record_->field_begin();
411 it != record_->field_end(); 413 it != record_->field_end();
412 ++it) { 414 ++it) {
413 FieldDecl* field = *it; 415 FieldDecl* field = *it;
414 // Ignore fields annotated with the GC_PLUGIN_IGNORE macro. 416 // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
415 if (Config::IsIgnoreAnnotated(field)) 417 if (Config::IsIgnoreAnnotated(field))
416 continue; 418 continue;
417 if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) { 419 // Check if the unexpanded type should be recorded; needed
420 // to track iterator aliases only
421 const Type* unexpandedType = field->getType().getSplitUnqualifiedType().Ty;
422 Edge* edge = CreateEdgeFromOriginalType(unexpandedType);
423 if (!edge)
424 edge = CreateEdge(field->getType().getTypePtrOrNull());
425 if (edge) {
418 fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive)); 426 fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
419 fields->insert(std::make_pair(field, FieldPoint(field, edge))); 427 fields->insert(std::make_pair(field, FieldPoint(field, edge)));
420 } 428 }
421 } 429 }
422 fields_need_tracing_ = fields_status; 430 fields_need_tracing_ = fields_status;
423 return fields; 431 return fields;
424 } 432 }
425 433
426 void RecordInfo::DetermineTracingMethods() { 434 void RecordInfo::DetermineTracingMethods() {
427 if (determined_trace_methods_) 435 if (determined_trace_methods_)
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
560 static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns) 568 static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns)
561 { 569 {
562 while (ns) { 570 while (ns) {
563 if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns)) 571 if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns))
564 return true; 572 return true;
565 ns = dyn_cast<NamespaceDecl>(ns->getParent()); 573 ns = dyn_cast<NamespaceDecl>(ns->getParent());
566 } 574 }
567 return false; 575 return false;
568 } 576 }
569 577
578 Edge* RecordInfo::CreateEdgeFromOriginalType(const Type* type) {
579 if (!type)
580 return nullptr;
581
582 // look for "typedef ... iterator;"
583 if (!isa<ElaboratedType>(type))
584 return nullptr;
585 const ElaboratedType* elaboratedType = cast<ElaboratedType>(type);
586 if (!isa<TypedefType>(elaboratedType->getNamedType()))
587 return nullptr;
588 const TypedefType* typedefType =
589 cast<TypedefType>(elaboratedType->getNamedType());
590 std::string typeName = typedefType->getDecl()->getNameAsString();
591 if (!Config::IsIterator(typeName))
592 return nullptr;
593 RecordInfo* info =
594 cache_->Lookup(elaboratedType->getQualifier()->getAsType());
595
596 bool on_heap = false;
597 bool is_unsafe = false;
598 // Silently handle unknown types; the on-heap collection types will
599 // have to be in scope for the declaration to compile, though.
600 if (info) {
601 is_unsafe = Config::IsGCCollectionWithUnsafeIterator(info->name());
602 // Don't mark iterator as being on the heap if it is not supported.
603 on_heap = !is_unsafe && Config::IsGCCollection(info->name());
604 }
605 return new Iterator(info, on_heap, is_unsafe);
606 }
607
570 Edge* RecordInfo::CreateEdge(const Type* type) { 608 Edge* RecordInfo::CreateEdge(const Type* type) {
571 if (!type) { 609 if (!type) {
572 return 0; 610 return 0;
573 } 611 }
574 612
575 if (type->isPointerType() || type->isReferenceType()) { 613 if (type->isPointerType() || type->isReferenceType()) {
576 if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull())) 614 if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
577 return new RawPtr(ptr, type->isReferenceType()); 615 return new RawPtr(ptr, type->isReferenceType());
578 return 0; 616 return 0;
579 } 617 }
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
655 edge->members().push_back(member); 693 edge->members().push_back(member);
656 } 694 }
657 // TODO: Handle the case where we fail to create an edge (eg, if the 695 // TODO: Handle the case where we fail to create an edge (eg, if the
658 // argument is a primitive type or just not fully known yet). 696 // argument is a primitive type or just not fully known yet).
659 } 697 }
660 return edge; 698 return edge;
661 } 699 }
662 700
663 return new Value(info); 701 return new Value(info);
664 } 702 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698