Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(423)

Side by Side Diff: runtime/vm/profiler.cc

Issue 2966593002: Updated native memory allocation profiling to use its own sample buffer instead of sharing a sample… (Closed)
Patch Set: Updated native memory allocation profiling to use its own sample buffer instead of sharing a sample… Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/profiler.h ('k') | runtime/vm/profiler_service.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "platform/address_sanitizer.h" 5 #include "platform/address_sanitizer.h"
6 #include "platform/memory_sanitizer.h" 6 #include "platform/memory_sanitizer.h"
7 #include "platform/utils.h" 7 #include "platform/utils.h"
8 8
9 #include "vm/allocation.h" 9 #include "vm/allocation.h"
10 #include "vm/atomic.h" 10 #include "vm/atomic.h"
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
50 #if defined(USING_SIMULATOR) 50 #if defined(USING_SIMULATOR)
51 DEFINE_FLAG(bool, profile_vm, true, "Always collect native stack traces."); 51 DEFINE_FLAG(bool, profile_vm, true, "Always collect native stack traces.");
52 #else 52 #else
53 DEFINE_FLAG(bool, profile_vm, false, "Always collect native stack traces."); 53 DEFINE_FLAG(bool, profile_vm, false, "Always collect native stack traces.");
54 #endif 54 #endif
55 55
56 #ifndef PRODUCT 56 #ifndef PRODUCT
57 57
58 bool Profiler::initialized_ = false; 58 bool Profiler::initialized_ = false;
59 SampleBuffer* Profiler::sample_buffer_ = NULL; 59 SampleBuffer* Profiler::sample_buffer_ = NULL;
60 AllocationSampleBuffer* Profiler::allocation_sample_buffer_ = NULL;
60 ProfilerCounters Profiler::counters_; 61 ProfilerCounters Profiler::counters_;
61 62
62 void Profiler::InitOnce() { 63 void Profiler::InitOnce() {
63 // Place some sane restrictions on user controlled flags. 64 // Place some sane restrictions on user controlled flags.
64 SetSamplePeriod(FLAG_profile_period); 65 SetSamplePeriod(FLAG_profile_period);
65 SetSampleDepth(FLAG_max_profile_depth); 66 SetSampleDepth(FLAG_max_profile_depth);
66 Sample::InitOnce(); 67 Sample::InitOnce();
67 if (!FLAG_profiler) { 68 if (!FLAG_profiler) {
68 return; 69 return;
69 } 70 }
70 ASSERT(!initialized_); 71 ASSERT(!initialized_);
71 sample_buffer_ = new SampleBuffer(); 72 sample_buffer_ = new SampleBuffer();
73 Profiler::InitAllocationSampleBuffer();
72 // Zero counters. 74 // Zero counters.
73 memset(&counters_, 0, sizeof(counters_)); 75 memset(&counters_, 0, sizeof(counters_));
74 NativeSymbolResolver::InitOnce(); 76 NativeSymbolResolver::InitOnce();
75 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period); 77 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period);
76 ThreadInterrupter::Startup(); 78 ThreadInterrupter::Startup();
77 initialized_ = true; 79 initialized_ = true;
78 } 80 }
79 81
80 82
83 void Profiler::InitAllocationSampleBuffer() {
84 if (FLAG_profiler_native_memory &&
85 (Profiler::allocation_sample_buffer_ == NULL)) {
86 Profiler::allocation_sample_buffer_ = new AllocationSampleBuffer();
87 }
88 }
89
90
81 void Profiler::Shutdown() { 91 void Profiler::Shutdown() {
82 if (!FLAG_profiler) { 92 if (!FLAG_profiler) {
83 return; 93 return;
84 } 94 }
85 ASSERT(initialized_); 95 ASSERT(initialized_);
86 ThreadInterrupter::Shutdown(); 96 ThreadInterrupter::Shutdown();
87 NativeSymbolResolver::ShutdownOnce(); 97 NativeSymbolResolver::ShutdownOnce();
88 } 98 }
89 99
90 100
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
143 cursor_ = 0; 153 cursor_ = 0;
144 154
145 if (FLAG_trace_profiler) { 155 if (FLAG_trace_profiler) {
146 OS::Print("Profiler holds %" Pd " samples\n", capacity); 156 OS::Print("Profiler holds %" Pd " samples\n", capacity);
147 OS::Print("Profiler sample is %" Pd " bytes\n", Sample::instance_size()); 157 OS::Print("Profiler sample is %" Pd " bytes\n", Sample::instance_size());
148 OS::Print("Profiler memory usage = %" Pd " bytes\n", size); 158 OS::Print("Profiler memory usage = %" Pd " bytes\n", size);
149 } 159 }
150 } 160 }
151 161
152 162
163 AllocationSampleBuffer::AllocationSampleBuffer(intptr_t capacity)
164 : SampleBuffer(capacity), mutex_(new Mutex()) {}
165
166
153 SampleBuffer::~SampleBuffer() { 167 SampleBuffer::~SampleBuffer() {
154 delete memory_; 168 delete memory_;
155 } 169 }
156 170
157 171
172 AllocationSampleBuffer::~AllocationSampleBuffer() {
173 delete mutex_;
174 }
175
176
158 Sample* SampleBuffer::At(intptr_t idx) const { 177 Sample* SampleBuffer::At(intptr_t idx) const {
159 ASSERT(idx >= 0); 178 ASSERT(idx >= 0);
160 ASSERT(idx < capacity_); 179 ASSERT(idx < capacity_);
161 intptr_t offset = idx * Sample::instance_size(); 180 intptr_t offset = idx * Sample::instance_size();
162 uint8_t* samples = reinterpret_cast<uint8_t*>(samples_); 181 uint8_t* samples = reinterpret_cast<uint8_t*>(samples_);
163 return reinterpret_cast<Sample*>(samples + offset); 182 return reinterpret_cast<Sample*>(samples + offset);
164 } 183 }
165 184
166 185
167 intptr_t SampleBuffer::ReserveSampleSlot() { 186 intptr_t SampleBuffer::ReserveSampleSlot() {
168 ASSERT(samples_ != NULL); 187 ASSERT(samples_ != NULL);
169 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_); 188 uintptr_t cursor = AtomicOperations::FetchAndIncrement(&cursor_);
170 // Map back into sample buffer range. 189 // Map back into sample buffer range.
171 cursor = cursor % capacity_; 190 cursor = cursor % capacity_;
172 return cursor; 191 return cursor;
173 } 192 }
174 193
194
175 Sample* SampleBuffer::ReserveSample() { 195 Sample* SampleBuffer::ReserveSample() {
176 return At(ReserveSampleSlot()); 196 return At(ReserveSampleSlot());
177 } 197 }
178 198
179 199
180 Sample* SampleBuffer::ReserveSampleAndLink(Sample* previous) { 200 Sample* SampleBuffer::ReserveSampleAndLink(Sample* previous) {
181 ASSERT(previous != NULL); 201 ASSERT(previous != NULL);
182 intptr_t next_index = ReserveSampleSlot(); 202 intptr_t next_index = ReserveSampleSlot();
183 Sample* next = At(next_index); 203 Sample* next = At(next_index);
184 next->Init(previous->port(), previous->timestamp(), previous->tid()); 204 next->Init(previous->port(), previous->timestamp(), previous->tid());
185 next->set_head_sample(false); 205 next->set_head_sample(false);
186 // Mark that previous continues at next. 206 // Mark that previous continues at next.
187 previous->SetContinuationIndex(next_index); 207 previous->SetContinuationIndex(next_index);
188 return next; 208 return next;
189 } 209 }
190 210
191 211
212 void AllocationSampleBuffer::FreeAllocationSample(Sample* sample) {
213 MutexLocker ml(mutex_);
214 while (sample != NULL) {
215 intptr_t continuation_index = -1;
216 if (sample->is_continuation_sample()) {
217 continuation_index = sample->continuation_index();
218 }
219 sample->Clear();
220 sample->set_next_free(free_sample_list_);
221 free_sample_list_ = sample;
222
223 if (continuation_index != -1) {
224 sample = At(continuation_index);
225 } else {
226 sample = NULL;
227 }
228 }
229 }
230
231
232 intptr_t AllocationSampleBuffer::ReserveSampleSlotLocked() {
233 if (free_sample_list_ != NULL) {
234 Sample* free_sample = free_sample_list_;
235 free_sample_list_ = free_sample->next_free();
236 free_sample->set_next_free(NULL);
237 uint8_t* samples_array_ptr = reinterpret_cast<uint8_t*>(samples_);
238 uint8_t* free_sample_ptr = reinterpret_cast<uint8_t*>(free_sample);
239 return static_cast<intptr_t>((free_sample_ptr - samples_array_ptr) /
240 Sample::instance_size());
241 } else if (cursor_ < static_cast<uintptr_t>(capacity_ - 1)) {
242 return cursor_++;
243 } else {
244 return -1;
245 }
246 }
247
248
249 Sample* AllocationSampleBuffer::ReserveSampleAndLink(Sample* previous) {
250 MutexLocker ml(mutex_);
251 ASSERT(previous != NULL);
252 intptr_t next_index = ReserveSampleSlotLocked();
253 if (next_index < 0) {
254 // Could not find a free sample.
255 return NULL;
256 }
257 Sample* next = At(next_index);
258 next->Init(previous->port(), previous->timestamp(), previous->tid());
259 next->set_native_allocation_address(previous->native_allocation_address());
260 next->set_native_allocation_size_bytes(
261 previous->native_allocation_size_bytes());
262 next->set_head_sample(false);
263 // Mark that previous continues at next.
264 previous->SetContinuationIndex(next_index);
265 return next;
266 }
267
268
269 Sample* AllocationSampleBuffer::ReserveSample() {
270 MutexLocker ml(mutex_);
271 intptr_t index = ReserveSampleSlotLocked();
272 if (index < 0) {
273 return NULL;
274 }
275 return At(index);
276 }
277
278
192 // Attempts to find the true return address when a Dart frame is being setup 279 // Attempts to find the true return address when a Dart frame is being setup
193 // or torn down. 280 // or torn down.
194 // NOTE: Architecture specific implementations below. 281 // NOTE: Architecture specific implementations below.
195 class ReturnAddressLocator : public ValueObject { 282 class ReturnAddressLocator : public ValueObject {
196 public: 283 public:
197 ReturnAddressLocator(Sample* sample, const Code& code) 284 ReturnAddressLocator(Sample* sample, const Code& code)
198 : stack_buffer_(sample->GetStackBuffer()), 285 : stack_buffer_(sample->GetStackBuffer()),
199 pc_(sample->pc()), 286 pc_(sample->pc()),
200 code_(Code::ZoneHandle(code.raw())) { 287 code_(Code::ZoneHandle(code.raw())) {
201 ASSERT(!code_.IsNull()); 288 ASSERT(!code_.IsNull());
(...skipping 747 matching lines...) Expand 10 before | Expand all | Expand 10 after
949 #endif 1036 #endif
950 sample->set_vm_tag(vm_tag); 1037 sample->set_vm_tag(vm_tag);
951 sample->set_user_tag(isolate->user_tag()); 1038 sample->set_user_tag(isolate->user_tag());
952 sample->set_thread_task(thread->task_kind()); 1039 sample->set_thread_task(thread->task_kind());
953 return sample; 1040 return sample;
954 } 1041 }
955 1042
956 1043
957 static Sample* SetupSampleNative(SampleBuffer* sample_buffer, ThreadId tid) { 1044 static Sample* SetupSampleNative(SampleBuffer* sample_buffer, ThreadId tid) {
958 Sample* sample = sample_buffer->ReserveSample(); 1045 Sample* sample = sample_buffer->ReserveSample();
1046 if (sample == NULL) {
1047 return NULL;
1048 }
959 sample->Init(ILLEGAL_PORT, OS::GetCurrentMonotonicMicros(), tid); 1049 sample->Init(ILLEGAL_PORT, OS::GetCurrentMonotonicMicros(), tid);
960 sample->set_is_native_allocation_sample(true);
961 Thread* thread = Thread::Current(); 1050 Thread* thread = Thread::Current();
962 1051
963 // Note: setting thread task in order to be consistent with other samples. The 1052 // Note: setting thread task in order to be consistent with other samples. The
964 // task kind is not used by NativeAllocationSampleFilter for filtering 1053 // task kind is not used by NativeAllocationSampleFilter for filtering
965 // purposes as some samples may be collected when no thread exists. 1054 // purposes as some samples may be collected when no thread exists.
966 if (thread != NULL) { 1055 if (thread != NULL) {
967 sample->set_thread_task(thread->task_kind()); 1056 sample->set_thread_task(thread->task_kind());
968 } 1057 }
969 return sample; 1058 return sample;
970 } 1059 }
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
1109 Sample* sample = SetupSample(thread, sample_buffer, os_thread->trace_id()); 1198 Sample* sample = SetupSample(thread, sample_buffer, os_thread->trace_id());
1110 sample->SetAllocationCid(cid); 1199 sample->SetAllocationCid(cid);
1111 sample->SetAt(0, pc); 1200 sample->SetAt(0, pc);
1112 } 1201 }
1113 } 1202 }
1114 1203
1115 1204
1116 Sample* Profiler::SampleNativeAllocation(intptr_t skip_count, 1205 Sample* Profiler::SampleNativeAllocation(intptr_t skip_count,
1117 uword address, 1206 uword address,
1118 uintptr_t allocation_size) { 1207 uintptr_t allocation_size) {
1119 SampleBuffer* sample_buffer = Profiler::sample_buffer(); 1208 AllocationSampleBuffer* sample_buffer = Profiler::allocation_sample_buffer();
1120 if (sample_buffer == NULL) { 1209 if (sample_buffer == NULL) {
1121 return NULL; 1210 return NULL;
1122 } 1211 }
1123 1212
1124 uintptr_t sp = Thread::GetCurrentStackPointer(); 1213 uintptr_t sp = Thread::GetCurrentStackPointer();
1125 uintptr_t fp = 0; 1214 uintptr_t fp = 0;
1126 uintptr_t pc = OS::GetProgramCounter(); 1215 uintptr_t pc = OS::GetProgramCounter();
1127 1216
1128 COPY_FP_REGISTER(fp); 1217 COPY_FP_REGISTER(fp);
1129 1218
1130 uword stack_lower = 0; 1219 uword stack_lower = 0;
1131 uword stack_upper = 0; 1220 uword stack_upper = 0;
1132 if (!InitialRegisterCheck(pc, fp, sp)) { 1221 if (!InitialRegisterCheck(pc, fp, sp)) {
1133 AtomicOperations::IncrementInt64By( 1222 AtomicOperations::IncrementInt64By(
1134 &counters_.failure_native_allocation_sample, 1); 1223 &counters_.failure_native_allocation_sample, 1);
1135 return NULL; 1224 return NULL;
1136 } 1225 }
1137 1226
1138 if (!(OSThread::GetCurrentStackBounds(&stack_lower, &stack_upper) && 1227 if (!(OSThread::GetCurrentStackBounds(&stack_lower, &stack_upper) &&
1139 ValidateThreadStackBounds(fp, sp, stack_lower, stack_upper))) { 1228 ValidateThreadStackBounds(fp, sp, stack_lower, stack_upper))) {
1140 // Could not get stack boundary. 1229 // Could not get stack boundary.
1141 AtomicOperations::IncrementInt64By( 1230 AtomicOperations::IncrementInt64By(
1142 &counters_.failure_native_allocation_sample, 1); 1231 &counters_.failure_native_allocation_sample, 1);
1143 return NULL; 1232 return NULL;
1144 } 1233 }
1145 1234
1146 OSThread* os_thread = OSThread::Current(); 1235 OSThread* os_thread = OSThread::Current();
1147 Sample* sample = SetupSampleNative(sample_buffer, os_thread->trace_id()); 1236 Sample* sample = SetupSampleNative(sample_buffer, os_thread->trace_id());
1237 if (sample == NULL) {
1238 OS::PrintErr(
1239 "Native memory profile sample buffer is full because there are more "
1240 "than %" Pd
1241 " outstanding allocations. Not recording allocation "
1242 "0x%" Px " with size: %" Pu " bytes.\n",
1243 sample_buffer->capacity(), address, allocation_size);
1244 return NULL;
1245 }
1246
1148 sample->set_native_allocation_address(address); 1247 sample->set_native_allocation_address(address);
1149 sample->set_native_allocation_size_bytes(allocation_size); 1248 sample->set_native_allocation_size_bytes(allocation_size);
1150 1249
1151 ProfilerNativeStackWalker native_stack_walker( 1250 ProfilerNativeStackWalker native_stack_walker(
1152 ILLEGAL_PORT, sample, sample_buffer, stack_lower, stack_upper, pc, fp, sp, 1251 ILLEGAL_PORT, sample, sample_buffer, stack_lower, stack_upper, pc, fp, sp,
1153 skip_count); 1252 skip_count);
1154 1253
1155 native_stack_walker.walk(); 1254 native_stack_walker.walk();
1255
1156 return sample; 1256 return sample;
1157 } 1257 }
1158 1258
1159 1259
1160 void Profiler::SampleThreadSingleFrame(Thread* thread, uintptr_t pc) { 1260 void Profiler::SampleThreadSingleFrame(Thread* thread, uintptr_t pc) {
1161 ASSERT(thread != NULL); 1261 ASSERT(thread != NULL);
1162 OSThread* os_thread = thread->os_thread(); 1262 OSThread* os_thread = thread->os_thread();
1163 ASSERT(os_thread != NULL); 1263 ASSERT(os_thread != NULL);
1164 Isolate* isolate = thread->isolate(); 1264 Isolate* isolate = thread->isolate();
1165 1265
(...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after
1621 1721
1622 1722
1623 ProcessedSampleBuffer::ProcessedSampleBuffer() 1723 ProcessedSampleBuffer::ProcessedSampleBuffer()
1624 : code_lookup_table_(new CodeLookupTable(Thread::Current())) { 1724 : code_lookup_table_(new CodeLookupTable(Thread::Current())) {
1625 ASSERT(code_lookup_table_ != NULL); 1725 ASSERT(code_lookup_table_ != NULL);
1626 } 1726 }
1627 1727
1628 #endif // !PRODUCT 1728 #endif // !PRODUCT
1629 1729
1630 } // namespace dart 1730 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/profiler.h ('k') | runtime/vm/profiler_service.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698