Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(133)

Side by Side Diff: runtime/vm/class_table.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/class_finalizer_test.cc ('k') | runtime/vm/clustered_snapshot.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/class_table.h" 5 #include "vm/class_table.h"
6 6
7 #include "vm/atomic.h" 7 #include "vm/atomic.h"
8 #include "vm/flags.h" 8 #include "vm/flags.h"
9 #include "vm/freelist.h" 9 #include "vm/freelist.h"
10 #include "vm/growable_array.h" 10 #include "vm/growable_array.h"
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
52 } 52 }
53 #ifndef PRODUCT 53 #ifndef PRODUCT
54 predefined_class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>( 54 predefined_class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>(
55 calloc(kNumPredefinedCids, sizeof(ClassHeapStats))); // NOLINT 55 calloc(kNumPredefinedCids, sizeof(ClassHeapStats))); // NOLINT
56 for (intptr_t i = 0; i < kNumPredefinedCids; i++) { 56 for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
57 predefined_class_heap_stats_table_[i].Initialize(); 57 predefined_class_heap_stats_table_[i].Initialize();
58 } 58 }
59 #endif // !PRODUCT 59 #endif // !PRODUCT
60 } 60 }
61 61
62
63 ClassTable::ClassTable(ClassTable* original) 62 ClassTable::ClassTable(ClassTable* original)
64 : top_(original->top_), 63 : top_(original->top_),
65 capacity_(original->top_), 64 capacity_(original->top_),
66 table_(original->table_), 65 table_(original->table_),
67 old_tables_(NULL) { 66 old_tables_(NULL) {
68 NOT_IN_PRODUCT(class_heap_stats_table_ = NULL); 67 NOT_IN_PRODUCT(class_heap_stats_table_ = NULL);
69 NOT_IN_PRODUCT(predefined_class_heap_stats_table_ = NULL); 68 NOT_IN_PRODUCT(predefined_class_heap_stats_table_ = NULL);
70 } 69 }
71 70
72
73 ClassTable::~ClassTable() { 71 ClassTable::~ClassTable() {
74 if (old_tables_ != NULL) { 72 if (old_tables_ != NULL) {
75 FreeOldTables(); 73 FreeOldTables();
76 delete old_tables_; 74 delete old_tables_;
77 free(table_); 75 free(table_);
78 NOT_IN_PRODUCT(free(predefined_class_heap_stats_table_)); 76 NOT_IN_PRODUCT(free(predefined_class_heap_stats_table_));
79 NOT_IN_PRODUCT(free(class_heap_stats_table_)); 77 NOT_IN_PRODUCT(free(class_heap_stats_table_));
80 } else { 78 } else {
81 // This instance was a shallow copy. It doesn't own any memory. 79 // This instance was a shallow copy. It doesn't own any memory.
82 NOT_IN_PRODUCT(ASSERT(predefined_class_heap_stats_table_ == NULL)); 80 NOT_IN_PRODUCT(ASSERT(predefined_class_heap_stats_table_ == NULL));
83 NOT_IN_PRODUCT(ASSERT(class_heap_stats_table_ == NULL)); 81 NOT_IN_PRODUCT(ASSERT(class_heap_stats_table_ == NULL));
84 } 82 }
85 } 83 }
86 84
87
88 void ClassTable::AddOldTable(RawClass** old_table) { 85 void ClassTable::AddOldTable(RawClass** old_table) {
89 ASSERT(Thread::Current()->IsMutatorThread()); 86 ASSERT(Thread::Current()->IsMutatorThread());
90 old_tables_->Add(old_table); 87 old_tables_->Add(old_table);
91 } 88 }
92 89
93
94 void ClassTable::FreeOldTables() { 90 void ClassTable::FreeOldTables() {
95 while (old_tables_->length() > 0) { 91 while (old_tables_->length() > 0) {
96 free(old_tables_->RemoveLast()); 92 free(old_tables_->RemoveLast());
97 } 93 }
98 } 94 }
99 95
100
101 #ifndef PRODUCT 96 #ifndef PRODUCT
102 void ClassTable::SetTraceAllocationFor(intptr_t cid, bool trace) { 97 void ClassTable::SetTraceAllocationFor(intptr_t cid, bool trace) {
103 ClassHeapStats* stats = PreliminaryStatsAt(cid); 98 ClassHeapStats* stats = PreliminaryStatsAt(cid);
104 stats->set_trace_allocation(trace); 99 stats->set_trace_allocation(trace);
105 } 100 }
106 101
107
108 bool ClassTable::TraceAllocationFor(intptr_t cid) { 102 bool ClassTable::TraceAllocationFor(intptr_t cid) {
109 ClassHeapStats* stats = PreliminaryStatsAt(cid); 103 ClassHeapStats* stats = PreliminaryStatsAt(cid);
110 return stats->trace_allocation(); 104 return stats->trace_allocation();
111 } 105 }
112 #endif // !PRODUCT 106 #endif // !PRODUCT
113 107
114
115 void ClassTable::Register(const Class& cls) { 108 void ClassTable::Register(const Class& cls) {
116 ASSERT(Thread::Current()->IsMutatorThread()); 109 ASSERT(Thread::Current()->IsMutatorThread());
117 intptr_t index = cls.id(); 110 intptr_t index = cls.id();
118 if (index != kIllegalCid) { 111 if (index != kIllegalCid) {
119 ASSERT(index > 0); 112 ASSERT(index > 0);
120 ASSERT(index < kNumPredefinedCids); 113 ASSERT(index < kNumPredefinedCids);
121 ASSERT(table_[index] == 0); 114 ASSERT(table_[index] == 0);
122 ASSERT(index < capacity_); 115 ASSERT(index < capacity_);
123 table_[index] = cls.raw(); 116 table_[index] = cls.raw();
124 // Add the vtable for this predefined class into the static vtable registry 117 // Add the vtable for this predefined class into the static vtable registry
(...skipping 30 matching lines...) Expand all
155 if (!Class::is_valid_id(top_)) { 148 if (!Class::is_valid_id(top_)) {
156 FATAL1("Fatal error in ClassTable::Register: invalid index %" Pd "\n", 149 FATAL1("Fatal error in ClassTable::Register: invalid index %" Pd "\n",
157 top_); 150 top_);
158 } 151 }
159 cls.set_id(top_); 152 cls.set_id(top_);
160 table_[top_] = cls.raw(); 153 table_[top_] = cls.raw();
161 top_++; // Increment next index. 154 top_++; // Increment next index.
162 } 155 }
163 } 156 }
164 157
165
166 void ClassTable::AllocateIndex(intptr_t index) { 158 void ClassTable::AllocateIndex(intptr_t index) {
167 if (index >= capacity_) { 159 if (index >= capacity_) {
168 // Grow the capacity of the class table. 160 // Grow the capacity of the class table.
169 // TODO(koda): Add ClassTable::Grow to share code. 161 // TODO(koda): Add ClassTable::Grow to share code.
170 intptr_t new_capacity = index + capacity_increment_; 162 intptr_t new_capacity = index + capacity_increment_;
171 if (!Class::is_valid_id(index) || new_capacity < capacity_) { 163 if (!Class::is_valid_id(index) || new_capacity < capacity_) {
172 FATAL1("Fatal error in ClassTable::Register: invalid index %" Pd "\n", 164 FATAL1("Fatal error in ClassTable::Register: invalid index %" Pd "\n",
173 index); 165 index);
174 } 166 }
175 RawClass** new_table = reinterpret_cast<RawClass**>( 167 RawClass** new_table = reinterpret_cast<RawClass**>(
(...skipping 14 matching lines...) Expand all
190 NOT_IN_PRODUCT(class_heap_stats_table_ = new_stats_table); 182 NOT_IN_PRODUCT(class_heap_stats_table_ = new_stats_table);
191 ASSERT(capacity_increment_ >= 1); 183 ASSERT(capacity_increment_ >= 1);
192 } 184 }
193 185
194 ASSERT(table_[index] == 0); 186 ASSERT(table_[index] == 0);
195 if (index >= top_) { 187 if (index >= top_) {
196 top_ = index + 1; 188 top_ = index + 1;
197 } 189 }
198 } 190 }
199 191
200
201 void ClassTable::RegisterAt(intptr_t index, const Class& cls) { 192 void ClassTable::RegisterAt(intptr_t index, const Class& cls) {
202 ASSERT(Thread::Current()->IsMutatorThread()); 193 ASSERT(Thread::Current()->IsMutatorThread());
203 ASSERT(index != kIllegalCid); 194 ASSERT(index != kIllegalCid);
204 ASSERT(index >= kNumPredefinedCids); 195 ASSERT(index >= kNumPredefinedCids);
205 AllocateIndex(index); 196 AllocateIndex(index);
206 cls.set_id(index); 197 cls.set_id(index);
207 table_[index] = cls.raw(); 198 table_[index] = cls.raw();
208 } 199 }
209 200
210
211 #if defined(DEBUG) 201 #if defined(DEBUG)
212 void ClassTable::Unregister(intptr_t index) { 202 void ClassTable::Unregister(intptr_t index) {
213 table_[index] = 0; 203 table_[index] = 0;
214 } 204 }
215 #endif 205 #endif
216 206
217
218 void ClassTable::Remap(intptr_t* old_to_new_cid) { 207 void ClassTable::Remap(intptr_t* old_to_new_cid) {
219 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0); 208 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
220 intptr_t num_cids = NumCids(); 209 intptr_t num_cids = NumCids();
221 RawClass** cls_by_old_cid = new RawClass*[num_cids]; 210 RawClass** cls_by_old_cid = new RawClass*[num_cids];
222 for (intptr_t i = 0; i < num_cids; i++) { 211 for (intptr_t i = 0; i < num_cids; i++) {
223 cls_by_old_cid[i] = table_[i]; 212 cls_by_old_cid[i] = table_[i];
224 } 213 }
225 for (intptr_t i = 0; i < num_cids; i++) { 214 for (intptr_t i = 0; i < num_cids; i++) {
226 table_[old_to_new_cid[i]] = cls_by_old_cid[i]; 215 table_[old_to_new_cid[i]] = cls_by_old_cid[i];
227 } 216 }
228 delete[] cls_by_old_cid; 217 delete[] cls_by_old_cid;
229 } 218 }
230 219
231
232 void ClassTable::VisitObjectPointers(ObjectPointerVisitor* visitor) { 220 void ClassTable::VisitObjectPointers(ObjectPointerVisitor* visitor) {
233 ASSERT(visitor != NULL); 221 ASSERT(visitor != NULL);
234 visitor->VisitPointers(reinterpret_cast<RawObject**>(&table_[0]), top_); 222 visitor->VisitPointers(reinterpret_cast<RawObject**>(&table_[0]), top_);
235 } 223 }
236 224
237
238 void ClassTable::Validate() { 225 void ClassTable::Validate() {
239 Class& cls = Class::Handle(); 226 Class& cls = Class::Handle();
240 for (intptr_t cid = kNumPredefinedCids; cid < top_; cid++) { 227 for (intptr_t cid = kNumPredefinedCids; cid < top_; cid++) {
241 // Some of the class table entries maybe NULL as we create some 228 // Some of the class table entries maybe NULL as we create some
242 // top level classes but do not add them to the list of anonymous 229 // top level classes but do not add them to the list of anonymous
243 // classes in a library if there are no top level fields or functions. 230 // classes in a library if there are no top level fields or functions.
244 // Since there are no references to these top level classes they are 231 // Since there are no references to these top level classes they are
245 // not written into a full snapshot and will not be recreated when 232 // not written into a full snapshot and will not be recreated when
246 // we read back the full snapshot. These class slots end up with NULL 233 // we read back the full snapshot. These class slots end up with NULL
247 // entries. 234 // entries.
248 if (HasValidClassAt(cid)) { 235 if (HasValidClassAt(cid)) {
249 cls = At(cid); 236 cls = At(cid);
250 ASSERT(cls.IsClass()); 237 ASSERT(cls.IsClass());
251 ASSERT(cls.id() == cid); 238 ASSERT(cls.id() == cid);
252 } 239 }
253 } 240 }
254 } 241 }
255 242
256
257 void ClassTable::Print() { 243 void ClassTable::Print() {
258 Class& cls = Class::Handle(); 244 Class& cls = Class::Handle();
259 String& name = String::Handle(); 245 String& name = String::Handle();
260 246
261 for (intptr_t i = 1; i < top_; i++) { 247 for (intptr_t i = 1; i < top_; i++) {
262 if (!HasValidClassAt(i)) { 248 if (!HasValidClassAt(i)) {
263 continue; 249 continue;
264 } 250 }
265 cls = At(i); 251 cls = At(i);
266 if (cls.raw() != reinterpret_cast<RawClass*>(0)) { 252 if (cls.raw() != reinterpret_cast<RawClass*>(0)) {
267 name = cls.Name(); 253 name = cls.Name();
268 OS::Print("%" Pd ": %s\n", i, name.ToCString()); 254 OS::Print("%" Pd ": %s\n", i, name.ToCString());
269 } 255 }
270 } 256 }
271 } 257 }
272 258
273
274 #ifndef PRODUCT 259 #ifndef PRODUCT
275 void ClassTable::PrintToJSONObject(JSONObject* object) { 260 void ClassTable::PrintToJSONObject(JSONObject* object) {
276 if (!FLAG_support_service) { 261 if (!FLAG_support_service) {
277 return; 262 return;
278 } 263 }
279 Class& cls = Class::Handle(); 264 Class& cls = Class::Handle();
280 object->AddProperty("type", "ClassList"); 265 object->AddProperty("type", "ClassList");
281 { 266 {
282 JSONArray members(object, "classes"); 267 JSONArray members(object, "classes");
283 for (intptr_t i = 1; i < top_; i++) { 268 for (intptr_t i = 1; i < top_; i++) {
284 if (HasValidClassAt(i)) { 269 if (HasValidClassAt(i)) {
285 cls = At(i); 270 cls = At(i);
286 members.AddValue(cls); 271 members.AddValue(cls);
287 } 272 }
288 } 273 }
289 } 274 }
290 } 275 }
291 276
292
293 void ClassHeapStats::Initialize() { 277 void ClassHeapStats::Initialize() {
294 pre_gc.Reset(); 278 pre_gc.Reset();
295 post_gc.Reset(); 279 post_gc.Reset();
296 recent.Reset(); 280 recent.Reset();
297 accumulated.Reset(); 281 accumulated.Reset();
298 last_reset.Reset(); 282 last_reset.Reset();
299 promoted_count = 0; 283 promoted_count = 0;
300 promoted_size = 0; 284 promoted_size = 0;
301 state_ = 0; 285 state_ = 0;
302 USE(align_); 286 USE(align_);
303 } 287 }
304 288
305
306 void ClassHeapStats::ResetAtNewGC() { 289 void ClassHeapStats::ResetAtNewGC() {
307 Verify(); 290 Verify();
308 pre_gc.new_count = post_gc.new_count + recent.new_count; 291 pre_gc.new_count = post_gc.new_count + recent.new_count;
309 pre_gc.new_size = post_gc.new_size + recent.new_size; 292 pre_gc.new_size = post_gc.new_size + recent.new_size;
310 // Accumulate allocations. 293 // Accumulate allocations.
311 accumulated.new_count += recent.new_count - last_reset.new_count; 294 accumulated.new_count += recent.new_count - last_reset.new_count;
312 accumulated.new_size += recent.new_size - last_reset.new_size; 295 accumulated.new_size += recent.new_size - last_reset.new_size;
313 last_reset.ResetNew(); 296 last_reset.ResetNew();
314 post_gc.ResetNew(); 297 post_gc.ResetNew();
315 recent.ResetNew(); 298 recent.ResetNew();
316 old_pre_new_gc_count_ = recent.old_count; 299 old_pre_new_gc_count_ = recent.old_count;
317 old_pre_new_gc_size_ = recent.old_size; 300 old_pre_new_gc_size_ = recent.old_size;
318 } 301 }
319 302
320
321 void ClassHeapStats::ResetAtOldGC() { 303 void ClassHeapStats::ResetAtOldGC() {
322 Verify(); 304 Verify();
323 pre_gc.old_count = post_gc.old_count + recent.old_count; 305 pre_gc.old_count = post_gc.old_count + recent.old_count;
324 pre_gc.old_size = post_gc.old_size + recent.old_size; 306 pre_gc.old_size = post_gc.old_size + recent.old_size;
325 // Accumulate allocations. 307 // Accumulate allocations.
326 accumulated.old_count += recent.old_count - last_reset.old_count; 308 accumulated.old_count += recent.old_count - last_reset.old_count;
327 accumulated.old_size += recent.old_size - last_reset.old_size; 309 accumulated.old_size += recent.old_size - last_reset.old_size;
328 last_reset.ResetOld(); 310 last_reset.ResetOld();
329 post_gc.ResetOld(); 311 post_gc.ResetOld();
330 recent.ResetOld(); 312 recent.ResetOld();
331 } 313 }
332 314
333
334 void ClassHeapStats::Verify() { 315 void ClassHeapStats::Verify() {
335 pre_gc.Verify(); 316 pre_gc.Verify();
336 post_gc.Verify(); 317 post_gc.Verify();
337 recent.Verify(); 318 recent.Verify();
338 accumulated.Verify(); 319 accumulated.Verify();
339 last_reset.Verify(); 320 last_reset.Verify();
340 } 321 }
341 322
342
343 void ClassHeapStats::UpdateSize(intptr_t instance_size) { 323 void ClassHeapStats::UpdateSize(intptr_t instance_size) {
344 pre_gc.UpdateSize(instance_size); 324 pre_gc.UpdateSize(instance_size);
345 post_gc.UpdateSize(instance_size); 325 post_gc.UpdateSize(instance_size);
346 recent.UpdateSize(instance_size); 326 recent.UpdateSize(instance_size);
347 accumulated.UpdateSize(instance_size); 327 accumulated.UpdateSize(instance_size);
348 last_reset.UpdateSize(instance_size); 328 last_reset.UpdateSize(instance_size);
349 promoted_size = promoted_count * instance_size; 329 promoted_size = promoted_count * instance_size;
350 old_pre_new_gc_size_ = old_pre_new_gc_count_ * instance_size; 330 old_pre_new_gc_size_ = old_pre_new_gc_count_ * instance_size;
351 } 331 }
352 332
353
354 void ClassHeapStats::ResetAccumulator() { 333 void ClassHeapStats::ResetAccumulator() {
355 // Remember how much was allocated so we can subtract this from the result 334 // Remember how much was allocated so we can subtract this from the result
356 // when printing. 335 // when printing.
357 last_reset.new_count = recent.new_count; 336 last_reset.new_count = recent.new_count;
358 last_reset.new_size = recent.new_size; 337 last_reset.new_size = recent.new_size;
359 last_reset.old_count = recent.old_count; 338 last_reset.old_count = recent.old_count;
360 last_reset.old_size = recent.old_size; 339 last_reset.old_size = recent.old_size;
361 accumulated.Reset(); 340 accumulated.Reset();
362 } 341 }
363 342
364
365 void ClassHeapStats::UpdatePromotedAfterNewGC() { 343 void ClassHeapStats::UpdatePromotedAfterNewGC() {
366 promoted_count = recent.old_count - old_pre_new_gc_count_; 344 promoted_count = recent.old_count - old_pre_new_gc_count_;
367 promoted_size = recent.old_size - old_pre_new_gc_size_; 345 promoted_size = recent.old_size - old_pre_new_gc_size_;
368 } 346 }
369 347
370
371 void ClassHeapStats::PrintToJSONObject(const Class& cls, 348 void ClassHeapStats::PrintToJSONObject(const Class& cls,
372 JSONObject* obj) const { 349 JSONObject* obj) const {
373 if (!FLAG_support_service) { 350 if (!FLAG_support_service) {
374 return; 351 return;
375 } 352 }
376 obj->AddProperty("type", "ClassHeapStats"); 353 obj->AddProperty("type", "ClassHeapStats");
377 obj->AddProperty("class", cls); 354 obj->AddProperty("class", cls);
378 { 355 {
379 JSONArray new_stats(obj, "new"); 356 JSONArray new_stats(obj, "new");
380 new_stats.AddValue(pre_gc.new_count); 357 new_stats.AddValue(pre_gc.new_count);
(...skipping 17 matching lines...) Expand all
398 old_stats.AddValue(recent.old_size); 375 old_stats.AddValue(recent.old_size);
399 old_stats.AddValue64(accumulated.old_count + recent.old_count - 376 old_stats.AddValue64(accumulated.old_count + recent.old_count -
400 last_reset.old_count); 377 last_reset.old_count);
401 old_stats.AddValue64(accumulated.old_size + recent.old_size - 378 old_stats.AddValue64(accumulated.old_size + recent.old_size -
402 last_reset.old_size); 379 last_reset.old_size);
403 } 380 }
404 obj->AddProperty("promotedInstances", promoted_count); 381 obj->AddProperty("promotedInstances", promoted_count);
405 obj->AddProperty("promotedBytes", promoted_size); 382 obj->AddProperty("promotedBytes", promoted_size);
406 } 383 }
407 384
408
409 void ClassTable::UpdateAllocatedNew(intptr_t cid, intptr_t size) { 385 void ClassTable::UpdateAllocatedNew(intptr_t cid, intptr_t size) {
410 ClassHeapStats* stats = PreliminaryStatsAt(cid); 386 ClassHeapStats* stats = PreliminaryStatsAt(cid);
411 ASSERT(stats != NULL); 387 ASSERT(stats != NULL);
412 ASSERT(size != 0); 388 ASSERT(size != 0);
413 stats->recent.AddNew(size); 389 stats->recent.AddNew(size);
414 } 390 }
415 391
416
417 void ClassTable::UpdateAllocatedOld(intptr_t cid, intptr_t size) { 392 void ClassTable::UpdateAllocatedOld(intptr_t cid, intptr_t size) {
418 ClassHeapStats* stats = PreliminaryStatsAt(cid); 393 ClassHeapStats* stats = PreliminaryStatsAt(cid);
419 ASSERT(stats != NULL); 394 ASSERT(stats != NULL);
420 ASSERT(size != 0); 395 ASSERT(size != 0);
421 stats->recent.AddOld(size); 396 stats->recent.AddOld(size);
422 } 397 }
423 398
424
425 bool ClassTable::ShouldUpdateSizeForClassId(intptr_t cid) { 399 bool ClassTable::ShouldUpdateSizeForClassId(intptr_t cid) {
426 return !RawObject::IsVariableSizeClassId(cid); 400 return !RawObject::IsVariableSizeClassId(cid);
427 } 401 }
428 402
429
430 ClassHeapStats* ClassTable::PreliminaryStatsAt(intptr_t cid) { 403 ClassHeapStats* ClassTable::PreliminaryStatsAt(intptr_t cid) {
431 ASSERT(cid > 0); 404 ASSERT(cid > 0);
432 if (cid < kNumPredefinedCids) { 405 if (cid < kNumPredefinedCids) {
433 return &predefined_class_heap_stats_table_[cid]; 406 return &predefined_class_heap_stats_table_[cid];
434 } 407 }
435 ASSERT(cid < top_); 408 ASSERT(cid < top_);
436 return &class_heap_stats_table_[cid]; 409 return &class_heap_stats_table_[cid];
437 } 410 }
438 411
439
440 ClassHeapStats* ClassTable::StatsWithUpdatedSize(intptr_t cid) { 412 ClassHeapStats* ClassTable::StatsWithUpdatedSize(intptr_t cid) {
441 if (!HasValidClassAt(cid) || (cid == kFreeListElement) || 413 if (!HasValidClassAt(cid) || (cid == kFreeListElement) ||
442 (cid == kForwardingCorpse) || (cid == kSmiCid)) { 414 (cid == kForwardingCorpse) || (cid == kSmiCid)) {
443 return NULL; 415 return NULL;
444 } 416 }
445 Class& cls = Class::Handle(At(cid)); 417 Class& cls = Class::Handle(At(cid));
446 if (!(cls.is_finalized() || cls.is_prefinalized())) { 418 if (!(cls.is_finalized() || cls.is_prefinalized())) {
447 // Not finalized. 419 // Not finalized.
448 return NULL; 420 return NULL;
449 } 421 }
450 ClassHeapStats* stats = PreliminaryStatsAt(cid); 422 ClassHeapStats* stats = PreliminaryStatsAt(cid);
451 if (ShouldUpdateSizeForClassId(cid)) { 423 if (ShouldUpdateSizeForClassId(cid)) {
452 stats->UpdateSize(cls.instance_size()); 424 stats->UpdateSize(cls.instance_size());
453 } 425 }
454 stats->Verify(); 426 stats->Verify();
455 return stats; 427 return stats;
456 } 428 }
457 429
458
459 void ClassTable::ResetCountersOld() { 430 void ClassTable::ResetCountersOld() {
460 for (intptr_t i = 0; i < kNumPredefinedCids; i++) { 431 for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
461 predefined_class_heap_stats_table_[i].ResetAtOldGC(); 432 predefined_class_heap_stats_table_[i].ResetAtOldGC();
462 } 433 }
463 for (intptr_t i = kNumPredefinedCids; i < top_; i++) { 434 for (intptr_t i = kNumPredefinedCids; i < top_; i++) {
464 class_heap_stats_table_[i].ResetAtOldGC(); 435 class_heap_stats_table_[i].ResetAtOldGC();
465 } 436 }
466 } 437 }
467 438
468
469 void ClassTable::ResetCountersNew() { 439 void ClassTable::ResetCountersNew() {
470 for (intptr_t i = 0; i < kNumPredefinedCids; i++) { 440 for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
471 predefined_class_heap_stats_table_[i].ResetAtNewGC(); 441 predefined_class_heap_stats_table_[i].ResetAtNewGC();
472 } 442 }
473 for (intptr_t i = kNumPredefinedCids; i < top_; i++) { 443 for (intptr_t i = kNumPredefinedCids; i < top_; i++) {
474 class_heap_stats_table_[i].ResetAtNewGC(); 444 class_heap_stats_table_[i].ResetAtNewGC();
475 } 445 }
476 } 446 }
477 447
478
479 void ClassTable::UpdatePromoted() { 448 void ClassTable::UpdatePromoted() {
480 for (intptr_t i = 0; i < kNumPredefinedCids; i++) { 449 for (intptr_t i = 0; i < kNumPredefinedCids; i++) {
481 predefined_class_heap_stats_table_[i].UpdatePromotedAfterNewGC(); 450 predefined_class_heap_stats_table_[i].UpdatePromotedAfterNewGC();
482 } 451 }
483 for (intptr_t i = kNumPredefinedCids; i < top_; i++) { 452 for (intptr_t i = kNumPredefinedCids; i < top_; i++) {
484 class_heap_stats_table_[i].UpdatePromotedAfterNewGC(); 453 class_heap_stats_table_[i].UpdatePromotedAfterNewGC();
485 } 454 }
486 } 455 }
487 456
488
489 ClassHeapStats** ClassTable::TableAddressFor(intptr_t cid) { 457 ClassHeapStats** ClassTable::TableAddressFor(intptr_t cid) {
490 return (cid < kNumPredefinedCids) ? &predefined_class_heap_stats_table_ 458 return (cid < kNumPredefinedCids) ? &predefined_class_heap_stats_table_
491 : &class_heap_stats_table_; 459 : &class_heap_stats_table_;
492 } 460 }
493 461
494
495 intptr_t ClassTable::TableOffsetFor(intptr_t cid) { 462 intptr_t ClassTable::TableOffsetFor(intptr_t cid) {
496 return (cid < kNumPredefinedCids) 463 return (cid < kNumPredefinedCids)
497 ? OFFSET_OF(ClassTable, predefined_class_heap_stats_table_) 464 ? OFFSET_OF(ClassTable, predefined_class_heap_stats_table_)
498 : OFFSET_OF(ClassTable, class_heap_stats_table_); 465 : OFFSET_OF(ClassTable, class_heap_stats_table_);
499 } 466 }
500 467
501
502 intptr_t ClassTable::ClassOffsetFor(intptr_t cid) { 468 intptr_t ClassTable::ClassOffsetFor(intptr_t cid) {
503 return cid * sizeof(ClassHeapStats); // NOLINT 469 return cid * sizeof(ClassHeapStats); // NOLINT
504 } 470 }
505 471
506
507 intptr_t ClassTable::CounterOffsetFor(intptr_t cid, bool is_new_space) { 472 intptr_t ClassTable::CounterOffsetFor(intptr_t cid, bool is_new_space) {
508 const intptr_t class_offset = ClassOffsetFor(cid); 473 const intptr_t class_offset = ClassOffsetFor(cid);
509 const intptr_t count_field_offset = 474 const intptr_t count_field_offset =
510 is_new_space ? ClassHeapStats::allocated_since_gc_new_space_offset() 475 is_new_space ? ClassHeapStats::allocated_since_gc_new_space_offset()
511 : ClassHeapStats::allocated_since_gc_old_space_offset(); 476 : ClassHeapStats::allocated_since_gc_old_space_offset();
512 return class_offset + count_field_offset; 477 return class_offset + count_field_offset;
513 } 478 }
514 479
515
516 intptr_t ClassTable::StateOffsetFor(intptr_t cid) { 480 intptr_t ClassTable::StateOffsetFor(intptr_t cid) {
517 return ClassOffsetFor(cid) + ClassHeapStats::state_offset(); 481 return ClassOffsetFor(cid) + ClassHeapStats::state_offset();
518 } 482 }
519 483
520
521 intptr_t ClassTable::SizeOffsetFor(intptr_t cid, bool is_new_space) { 484 intptr_t ClassTable::SizeOffsetFor(intptr_t cid, bool is_new_space) {
522 const uword class_offset = ClassOffsetFor(cid); 485 const uword class_offset = ClassOffsetFor(cid);
523 const uword size_field_offset = 486 const uword size_field_offset =
524 is_new_space ? ClassHeapStats::allocated_size_since_gc_new_space_offset() 487 is_new_space ? ClassHeapStats::allocated_size_since_gc_new_space_offset()
525 : ClassHeapStats::allocated_size_since_gc_old_space_offset(); 488 : ClassHeapStats::allocated_size_since_gc_old_space_offset();
526 return class_offset + size_field_offset; 489 return class_offset + size_field_offset;
527 } 490 }
528 491
529
530 void ClassTable::AllocationProfilePrintJSON(JSONStream* stream) { 492 void ClassTable::AllocationProfilePrintJSON(JSONStream* stream) {
531 if (!FLAG_support_service) { 493 if (!FLAG_support_service) {
532 return; 494 return;
533 } 495 }
534 Isolate* isolate = Isolate::Current(); 496 Isolate* isolate = Isolate::Current();
535 ASSERT(isolate != NULL); 497 ASSERT(isolate != NULL);
536 Heap* heap = isolate->heap(); 498 Heap* heap = isolate->heap();
537 ASSERT(heap != NULL); 499 ASSERT(heap != NULL);
538 JSONObject obj(stream); 500 JSONObject obj(stream);
539 obj.AddProperty("type", "AllocationProfile"); 501 obj.AddProperty("type", "AllocationProfile");
(...skipping 19 matching lines...) Expand all
559 const ClassHeapStats* stats = StatsWithUpdatedSize(i); 521 const ClassHeapStats* stats = StatsWithUpdatedSize(i);
560 if (stats != NULL) { 522 if (stats != NULL) {
561 JSONObject obj(&arr); 523 JSONObject obj(&arr);
562 cls = At(i); 524 cls = At(i);
563 stats->PrintToJSONObject(cls, &obj); 525 stats->PrintToJSONObject(cls, &obj);
564 } 526 }
565 } 527 }
566 } 528 }
567 } 529 }
568 530
569
570 void ClassTable::ResetAllocationAccumulators() { 531 void ClassTable::ResetAllocationAccumulators() {
571 for (intptr_t i = 1; i < top_; i++) { 532 for (intptr_t i = 1; i < top_; i++) {
572 ClassHeapStats* stats = StatsWithUpdatedSize(i); 533 ClassHeapStats* stats = StatsWithUpdatedSize(i);
573 if (stats != NULL) { 534 if (stats != NULL) {
574 stats->ResetAccumulator(); 535 stats->ResetAccumulator();
575 } 536 }
576 } 537 }
577 } 538 }
578 539
579
580 void ClassTable::UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count) { 540 void ClassTable::UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count) {
581 ClassHeapStats* stats = PreliminaryStatsAt(cid); 541 ClassHeapStats* stats = PreliminaryStatsAt(cid);
582 ASSERT(stats != NULL); 542 ASSERT(stats != NULL);
583 ASSERT(size >= 0); 543 ASSERT(size >= 0);
584 ASSERT(count >= 0); 544 ASSERT(count >= 0);
585 stats->post_gc.AddOld(size, count); 545 stats->post_gc.AddOld(size, count);
586 } 546 }
587 547
588
589 void ClassTable::UpdateLiveNew(intptr_t cid, intptr_t size) { 548 void ClassTable::UpdateLiveNew(intptr_t cid, intptr_t size) {
590 ClassHeapStats* stats = PreliminaryStatsAt(cid); 549 ClassHeapStats* stats = PreliminaryStatsAt(cid);
591 ASSERT(stats != NULL); 550 ASSERT(stats != NULL);
592 ASSERT(size >= 0); 551 ASSERT(size >= 0);
593 stats->post_gc.AddNew(size); 552 stats->post_gc.AddNew(size);
594 } 553 }
595 #endif // !PRODUCT 554 #endif // !PRODUCT
596 555
597
598 } // namespace dart 556 } // namespace dart
OLDNEW
« no previous file with comments | « runtime/vm/class_finalizer_test.cc ('k') | runtime/vm/clustered_snapshot.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698