Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(187)

Side by Side Diff: base/trace_event/trace_buffer.cc

Issue 1459143002: Remove ScopedVector from trace_buffer (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: fix Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/trace_event/trace_buffer.h" 5 #include "base/trace_event/trace_buffer.h"
6 6
7 #include <utility> 7 #include <utility>
8 #include <vector>
8 9
9 #include "base/memory/scoped_vector.h" 10 #include "base/memory/scoped_ptr.h"
10 #include "base/trace_event/trace_event_impl.h" 11 #include "base/trace_event/trace_event_impl.h"
11 12
12 namespace base { 13 namespace base {
13 namespace trace_event { 14 namespace trace_event {
14 15
15 namespace { 16 namespace {
16 17
17 class TraceBufferRingBuffer : public TraceBuffer { 18 class TraceBufferRingBuffer : public TraceBuffer {
18 public: 19 public:
19 TraceBufferRingBuffer(size_t max_chunks) 20 TraceBufferRingBuffer(size_t max_chunks)
(...skipping 13 matching lines...) Expand all
33 // the queue should never be empty. 34 // the queue should never be empty.
34 DCHECK(!QueueIsEmpty()); 35 DCHECK(!QueueIsEmpty());
35 36
36 *index = recyclable_chunks_queue_[queue_head_]; 37 *index = recyclable_chunks_queue_[queue_head_];
37 queue_head_ = NextQueueIndex(queue_head_); 38 queue_head_ = NextQueueIndex(queue_head_);
38 current_iteration_index_ = queue_head_; 39 current_iteration_index_ = queue_head_;
39 40
40 if (*index >= chunks_.size()) 41 if (*index >= chunks_.size())
41 chunks_.resize(*index + 1); 42 chunks_.resize(*index + 1);
42 43
43 TraceBufferChunk* chunk = chunks_[*index]; 44 TraceBufferChunk* chunk = chunks_[*index].release();
44 chunks_[*index] = NULL; // Put NULL in the slot of a in-flight chunk. 45 chunks_[*index] = NULL; // Put NULL in the slot of a in-flight chunk.
45 if (chunk) 46 if (chunk)
46 chunk->Reset(current_chunk_seq_++); 47 chunk->Reset(current_chunk_seq_++);
47 else 48 else
48 chunk = new TraceBufferChunk(current_chunk_seq_++); 49 chunk = new TraceBufferChunk(current_chunk_seq_++);
49 50
50 return scoped_ptr<TraceBufferChunk>(chunk); 51 return scoped_ptr<TraceBufferChunk>(chunk);
51 } 52 }
52 53
53 void ReturnChunk(size_t index, scoped_ptr<TraceBufferChunk> chunk) override { 54 void ReturnChunk(size_t index, scoped_ptr<TraceBufferChunk> chunk) override {
54 // When this method is called, the queue should not be full because it 55 // When this method is called, the queue should not be full because it
55 // can contain all chunks including the one to be returned. 56 // can contain all chunks including the one to be returned.
56 DCHECK(!QueueIsFull()); 57 DCHECK(!QueueIsFull());
57 DCHECK(chunk); 58 DCHECK(chunk);
58 DCHECK_LT(index, chunks_.size()); 59 DCHECK_LT(index, chunks_.size());
59 DCHECK(!chunks_[index]); 60 DCHECK(!chunks_[index]);
60 chunks_[index] = chunk.release(); 61 chunks_[index] = std::move(chunk);
61 recyclable_chunks_queue_[queue_tail_] = index; 62 recyclable_chunks_queue_[queue_tail_] = index;
62 queue_tail_ = NextQueueIndex(queue_tail_); 63 queue_tail_ = NextQueueIndex(queue_tail_);
63 } 64 }
64 65
65 bool IsFull() const override { return false; } 66 bool IsFull() const override { return false; }
66 67
67 size_t Size() const override { 68 size_t Size() const override {
68 // This is approximate because not all of the chunks are full. 69 // This is approximate because not all of the chunks are full.
69 return chunks_.size() * TraceBufferChunk::kTraceBufferChunkSize; 70 return chunks_.size() * TraceBufferChunk::kTraceBufferChunkSize;
70 } 71 }
71 72
72 size_t Capacity() const override { 73 size_t Capacity() const override {
73 return max_chunks_ * TraceBufferChunk::kTraceBufferChunkSize; 74 return max_chunks_ * TraceBufferChunk::kTraceBufferChunkSize;
74 } 75 }
75 76
76 TraceEvent* GetEventByHandle(TraceEventHandle handle) override { 77 TraceEvent* GetEventByHandle(TraceEventHandle handle) override {
77 if (handle.chunk_index >= chunks_.size()) 78 if (handle.chunk_index >= chunks_.size())
78 return NULL; 79 return NULL;
79 TraceBufferChunk* chunk = chunks_[handle.chunk_index]; 80 TraceBufferChunk* chunk = chunks_[handle.chunk_index].get();
80 if (!chunk || chunk->seq() != handle.chunk_seq) 81 if (!chunk || chunk->seq() != handle.chunk_seq)
81 return NULL; 82 return NULL;
82 return chunk->GetEventAt(handle.event_index); 83 return chunk->GetEventAt(handle.event_index);
83 } 84 }
84 85
85 const TraceBufferChunk* NextChunk() override { 86 const TraceBufferChunk* NextChunk() override {
86 if (chunks_.empty()) 87 if (chunks_.empty())
87 return NULL; 88 return NULL;
88 89
89 while (current_iteration_index_ != queue_tail_) { 90 while (current_iteration_index_ != queue_tail_) {
90 size_t chunk_index = recyclable_chunks_queue_[current_iteration_index_]; 91 size_t chunk_index = recyclable_chunks_queue_[current_iteration_index_];
91 current_iteration_index_ = NextQueueIndex(current_iteration_index_); 92 current_iteration_index_ = NextQueueIndex(current_iteration_index_);
92 if (chunk_index >= chunks_.size()) // Skip uninitialized chunks. 93 if (chunk_index >= chunks_.size()) // Skip uninitialized chunks.
93 continue; 94 continue;
94 DCHECK(chunks_[chunk_index]); 95 DCHECK(chunks_[chunk_index]);
95 return chunks_[chunk_index]; 96 return chunks_[chunk_index].get();
96 } 97 }
97 return NULL; 98 return NULL;
98 } 99 }
99 100
100 scoped_ptr<TraceBuffer> CloneForIteration() const override { 101 scoped_ptr<TraceBuffer> CloneForIteration() const override {
101 scoped_ptr<ClonedTraceBuffer> cloned_buffer(new ClonedTraceBuffer()); 102 scoped_ptr<ClonedTraceBuffer> cloned_buffer(new ClonedTraceBuffer());
102 for (size_t queue_index = queue_head_; queue_index != queue_tail_; 103 for (size_t queue_index = queue_head_; queue_index != queue_tail_;
103 queue_index = NextQueueIndex(queue_index)) { 104 queue_index = NextQueueIndex(queue_index)) {
104 size_t chunk_index = recyclable_chunks_queue_[queue_index]; 105 size_t chunk_index = recyclable_chunks_queue_[queue_index];
105 if (chunk_index >= chunks_.size()) // Skip uninitialized chunks. 106 if (chunk_index >= chunks_.size()) // Skip uninitialized chunks.
106 continue; 107 continue;
107 TraceBufferChunk* chunk = chunks_[chunk_index]; 108 TraceBufferChunk* chunk = chunks_[chunk_index].get();
108 cloned_buffer->chunks_.push_back(chunk ? chunk->Clone().release() : NULL); 109 cloned_buffer->chunks_.push_back(chunk ? chunk->Clone() : NULL);
109 } 110 }
110 return std::move(cloned_buffer); 111 return std::move(cloned_buffer);
111 } 112 }
112 113
113 void EstimateTraceMemoryOverhead( 114 void EstimateTraceMemoryOverhead(
114 TraceEventMemoryOverhead* overhead) override { 115 TraceEventMemoryOverhead* overhead) override {
115 overhead->Add("TraceBufferRingBuffer", sizeof(*this)); 116 overhead->Add("TraceBufferRingBuffer", sizeof(*this));
116 for (size_t queue_index = queue_head_; queue_index != queue_tail_; 117 for (size_t queue_index = queue_head_; queue_index != queue_tail_;
117 queue_index = NextQueueIndex(queue_index)) { 118 queue_index = NextQueueIndex(queue_index)) {
118 size_t chunk_index = recyclable_chunks_queue_[queue_index]; 119 size_t chunk_index = recyclable_chunks_queue_[queue_index];
119 if (chunk_index >= chunks_.size()) // Skip uninitialized chunks. 120 if (chunk_index >= chunks_.size()) // Skip uninitialized chunks.
120 continue; 121 continue;
121 chunks_[chunk_index]->EstimateTraceMemoryOverhead(overhead); 122 chunks_[chunk_index]->EstimateTraceMemoryOverhead(overhead);
122 } 123 }
123 } 124 }
124 125
125 private: 126 private:
126 class ClonedTraceBuffer : public TraceBuffer { 127 class ClonedTraceBuffer : public TraceBuffer {
127 public: 128 public:
128 ClonedTraceBuffer() : current_iteration_index_(0) {} 129 ClonedTraceBuffer() : current_iteration_index_(0) {}
129 130
130 // The only implemented method. 131 // The only implemented method.
131 const TraceBufferChunk* NextChunk() override { 132 const TraceBufferChunk* NextChunk() override {
132 return current_iteration_index_ < chunks_.size() 133 return current_iteration_index_ < chunks_.size()
133 ? chunks_[current_iteration_index_++] 134 ? chunks_[current_iteration_index_++].get()
134 : NULL; 135 : NULL;
135 } 136 }
136 137
137 scoped_ptr<TraceBufferChunk> GetChunk(size_t* index) override { 138 scoped_ptr<TraceBufferChunk> GetChunk(size_t* index) override {
138 NOTIMPLEMENTED(); 139 NOTIMPLEMENTED();
139 return scoped_ptr<TraceBufferChunk>(); 140 return scoped_ptr<TraceBufferChunk>();
140 } 141 }
141 void ReturnChunk(size_t index, scoped_ptr<TraceBufferChunk>) override { 142 void ReturnChunk(size_t index, scoped_ptr<TraceBufferChunk>) override {
142 NOTIMPLEMENTED(); 143 NOTIMPLEMENTED();
143 } 144 }
144 bool IsFull() const override { return false; } 145 bool IsFull() const override { return false; }
145 size_t Size() const override { return 0; } 146 size_t Size() const override { return 0; }
146 size_t Capacity() const override { return 0; } 147 size_t Capacity() const override { return 0; }
147 TraceEvent* GetEventByHandle(TraceEventHandle handle) override { 148 TraceEvent* GetEventByHandle(TraceEventHandle handle) override {
148 return NULL; 149 return NULL;
149 } 150 }
150 scoped_ptr<TraceBuffer> CloneForIteration() const override { 151 scoped_ptr<TraceBuffer> CloneForIteration() const override {
151 NOTIMPLEMENTED(); 152 NOTIMPLEMENTED();
152 return scoped_ptr<TraceBuffer>(); 153 return scoped_ptr<TraceBuffer>();
153 } 154 }
154 void EstimateTraceMemoryOverhead( 155 void EstimateTraceMemoryOverhead(
155 TraceEventMemoryOverhead* overhead) override { 156 TraceEventMemoryOverhead* overhead) override {
156 NOTIMPLEMENTED(); 157 NOTIMPLEMENTED();
157 } 158 }
158 159
159 size_t current_iteration_index_; 160 size_t current_iteration_index_;
160 ScopedVector<TraceBufferChunk> chunks_; 161 std::vector<scoped_ptr<TraceBufferChunk>> chunks_;
161 }; 162 };
162 163
163 bool QueueIsEmpty() const { return queue_head_ == queue_tail_; } 164 bool QueueIsEmpty() const { return queue_head_ == queue_tail_; }
164 165
165 size_t QueueSize() const { 166 size_t QueueSize() const {
166 return queue_tail_ > queue_head_ 167 return queue_tail_ > queue_head_
167 ? queue_tail_ - queue_head_ 168 ? queue_tail_ - queue_head_
168 : queue_tail_ + queue_capacity() - queue_head_; 169 : queue_tail_ + queue_capacity() - queue_head_;
169 } 170 }
170 171
171 bool QueueIsFull() const { return QueueSize() == queue_capacity() - 1; } 172 bool QueueIsFull() const { return QueueSize() == queue_capacity() - 1; }
172 173
173 size_t queue_capacity() const { 174 size_t queue_capacity() const {
174 // One extra space to help distinguish full state and empty state. 175 // One extra space to help distinguish full state and empty state.
175 return max_chunks_ + 1; 176 return max_chunks_ + 1;
176 } 177 }
177 178
178 size_t NextQueueIndex(size_t index) const { 179 size_t NextQueueIndex(size_t index) const {
179 index++; 180 index++;
180 if (index >= queue_capacity()) 181 if (index >= queue_capacity())
181 index = 0; 182 index = 0;
182 return index; 183 return index;
183 } 184 }
184 185
185 size_t max_chunks_; 186 size_t max_chunks_;
186 ScopedVector<TraceBufferChunk> chunks_; 187 std::vector<scoped_ptr<TraceBufferChunk>> chunks_;
187 188
188 scoped_ptr<size_t[]> recyclable_chunks_queue_; 189 scoped_ptr<size_t[]> recyclable_chunks_queue_;
189 size_t queue_head_; 190 size_t queue_head_;
190 size_t queue_tail_; 191 size_t queue_tail_;
191 192
192 size_t current_iteration_index_; 193 size_t current_iteration_index_;
193 uint32 current_chunk_seq_; 194 uint32 current_chunk_seq_;
194 195
195 DISALLOW_COPY_AND_ASSIGN(TraceBufferRingBuffer); 196 DISALLOW_COPY_AND_ASSIGN(TraceBufferRingBuffer);
196 }; 197 };
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
389 TraceBuffer* TraceBuffer::CreateTraceBufferRingBuffer(size_t max_chunks) { 390 TraceBuffer* TraceBuffer::CreateTraceBufferRingBuffer(size_t max_chunks) {
390 return new TraceBufferRingBuffer(max_chunks); 391 return new TraceBufferRingBuffer(max_chunks);
391 } 392 }
392 393
393 TraceBuffer* TraceBuffer::CreateTraceBufferVectorOfSize(size_t max_chunks) { 394 TraceBuffer* TraceBuffer::CreateTraceBufferVectorOfSize(size_t max_chunks) {
394 return new TraceBufferVector(max_chunks); 395 return new TraceBufferVector(max_chunks);
395 } 396 }
396 397
397 } // namespace trace_event 398 } // namespace trace_event
398 } // namespace base 399 } // namespace base
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698