OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 #include "vm/code_statistics.h" |
| 6 |
| 7 namespace dart { |
| 8 |
| 9 CombinedCodeStatistics::CombinedCodeStatistics() { |
| 10 unaccounted_bytes_ = 0; |
| 11 alignment_bytes_ = 0; |
| 12 object_header_bytes_ = 0; |
| 13 wasteful_function_count_ = 0; |
| 14 return_const_count_ = 0; |
| 15 return_const_with_load_field_count_ = 0; |
| 16 intptr_t i = 0; |
| 17 |
| 18 #define DO(type) \ |
| 19 entries_[i].name = #type; \ |
| 20 entries_[i].bytes = 0; \ |
| 21 entries_[i++].count = 0; |
| 22 |
| 23 FOR_EACH_INSTRUCTION(DO) |
| 24 |
| 25 #undef DO |
| 26 |
| 27 #define INIT_SPECIAL_ENTRY(tag, str) \ |
| 28 entries_[tag].name = str; \ |
| 29 entries_[tag].bytes = 0; \ |
| 30 entries_[tag].count = 0; |
| 31 |
| 32 INIT_SPECIAL_ENTRY(kPolymorphicInstanceCallAsStaticCall, |
| 33 "PolymorphicInstanceCall -> StaticCall") |
| 34 |
| 35 INIT_SPECIAL_ENTRY(kTagCheckedSmiSlowPath, "<slow-path:checked-smi>") |
| 36 INIT_SPECIAL_ENTRY(kTagCheckedSmiCmpSlowPath, "<slow-path:checked-smi-compare>
") |
| 37 INIT_SPECIAL_ENTRY(kTagBoxAllocationSlowPath, "<slow-path:box-allocation>") |
| 38 INIT_SPECIAL_ENTRY(kTagAllocateContextSlowPath, |
| 39 "<slow-path:allocate-context>") |
| 40 INIT_SPECIAL_ENTRY(kTagCheckStackOverflowSlowPath, |
| 41 "<slow-path:stack-overflow>") |
| 42 INIT_SPECIAL_ENTRY(kTagMegamorphicSlowPath, "<slow-path:megamorphic>") |
| 43 |
| 44 INIT_SPECIAL_ENTRY(kTagCheckArgumentCount, "<check argument count>"); |
| 45 INIT_SPECIAL_ENTRY(kTagCopyParameters, "<copy parameters>"); |
| 46 INIT_SPECIAL_ENTRY(kTagStubCode, "<stub-code>"); |
| 47 INIT_SPECIAL_ENTRY(kTagCheckedEntry, "<checked-entry-prologue>"); |
| 48 INIT_SPECIAL_ENTRY(kTagFrameEntry, "<frame-entry>"); |
| 49 INIT_SPECIAL_ENTRY(kTagLoadClosureContext, "<load-closure-context>"); |
| 50 INIT_SPECIAL_ENTRY(kTagIntrinsics, "<intrinsics>"); |
| 51 INIT_SPECIAL_ENTRY(kDebugAfterBody, "<debug-after-body>"); |
| 52 |
| 53 INIT_SPECIAL_ENTRY(kTagTrySyncSpilling, "<try-sync-spilling-code>"); |
| 54 |
| 55 #undef INIT_SPECIAL_ENTRY |
| 56 } |
| 57 |
| 58 void CombinedCodeStatistics::DumpStatistics() { |
| 59 ASSERT(unaccounted_bytes_ >= 0); |
| 60 |
| 61 SlowSort(); |
| 62 |
| 63 intptr_t instruction_bytes = 0; |
| 64 for (intptr_t i = 0; i < kNumEntries; i++) { |
| 65 instruction_bytes += entries_[i].bytes; |
| 66 } |
| 67 intptr_t total = object_header_bytes_ + |
| 68 instruction_bytes + |
| 69 unaccounted_bytes_ + |
| 70 alignment_bytes_; |
| 71 float ftotal = static_cast<float>(total) / 100.0; |
| 72 |
| 73 fprintf(stderr, "--------------------\n"); |
| 74 |
| 75 for (intptr_t i = 0; i < kNumEntries; i++) { |
| 76 const char* name = entries_[i].name; |
| 77 intptr_t bytes = entries_[i].bytes; |
| 78 intptr_t count = entries_[i].count; |
| 79 float percent = bytes / ftotal; |
| 80 float avg = static_cast<float>(bytes) / count; |
| 81 if (bytes > 0) { |
| 82 fprintf( |
| 83 stderr, |
| 84 "%5.2f %% " |
| 85 "% 8" Pd " bytes " |
| 86 "% 8" Pd " count " |
| 87 "%8.2f avg bytes/entry " |
| 88 "- %s\n", |
| 89 percent, |
| 90 bytes, |
| 91 count, |
| 92 avg, |
| 93 name); |
| 94 } |
| 95 } |
| 96 |
| 97 fprintf(stderr, "--------------------\n"); |
| 98 |
| 99 fprintf(stderr, "%5.2f %% % 8" Pd " bytes unaccounted\n", |
| 100 unaccounted_bytes_/ftotal, unaccounted_bytes_); |
| 101 fprintf(stderr, "%5.2f %% % 8" Pd " bytes alignment\n", |
| 102 alignment_bytes_ / ftotal, alignment_bytes_); |
| 103 fprintf(stderr, "%5.2f %% % 8" Pd " bytes instruction object header\n", |
| 104 object_header_bytes_ / ftotal, object_header_bytes_); |
| 105 fprintf(stderr, "%5.2f %% % 8" Pd " bytes instructions\n", |
| 106 instruction_bytes / ftotal, instruction_bytes); |
| 107 fprintf(stderr, "--------------------\n"); |
| 108 fprintf(stderr, "%5.2f %% % 8" Pd " bytes in total\n", |
| 109 total/ftotal, total); |
| 110 fprintf(stderr, "--------------------\n"); |
| 111 fprintf(stderr, "% 8" Pd " return-constant functions\n", return_const_count_); |
| 112 fprintf(stderr, "% 8" Pd " return-constant-with-load-field functions\n", |
| 113 return_const_with_load_field_count_); |
| 114 fprintf(stderr, "% 8" Pd " wasteful functions (body < 2 * frame overhead)\n", |
| 115 wasteful_function_count_); |
| 116 fprintf(stderr, "--------------------\n"); |
| 117 } |
| 118 |
| 119 |
| 120 void CombinedCodeStatistics::SlowSort() { |
| 121 for (intptr_t upper = kNumEntries - 1; upper >= 0; upper--) { |
| 122 intptr_t largest_index = 0; |
| 123 intptr_t largest_value = entries_[largest_index].bytes; |
| 124 for (intptr_t i = 1; i <= upper; i++) { |
| 125 intptr_t bytes = entries_[i].bytes; |
| 126 if (largest_value < bytes) { |
| 127 largest_index = i; |
| 128 largest_value = bytes; |
| 129 } |
| 130 } |
| 131 if (largest_index != upper) Swap(largest_index, upper); |
| 132 } |
| 133 } |
| 134 |
| 135 void CombinedCodeStatistics::Swap(intptr_t a, intptr_t b) { |
| 136 const char* a_name = entries_[a].name; |
| 137 intptr_t a_bytes = entries_[a].bytes; |
| 138 intptr_t a_count = entries_[a].count; |
| 139 |
| 140 entries_[a].name = entries_[b].name; |
| 141 entries_[a].bytes = entries_[b].bytes; |
| 142 entries_[a].count = entries_[b].count; |
| 143 |
| 144 entries_[b].name = a_name; |
| 145 entries_[b].bytes = a_bytes; |
| 146 entries_[b].count = a_count; |
| 147 } |
| 148 |
| 149 CodeStatistics::CodeStatistics(Assembler* assembler) |
| 150 : assembler_(assembler) { |
| 151 memset(entries_, 0, CombinedCodeStatistics::kNumEntries * sizeof(Entry)); |
| 152 instruction_bytes_ = 0; |
| 153 unaccounted_bytes_ = 0; |
| 154 alignment_bytes_ = 0; |
| 155 |
| 156 stack_index_ = -1; |
| 157 for (intptr_t i = 0; i < kStackSize; i++) stack_[i] = -1; |
| 158 } |
| 159 |
| 160 void CodeStatistics::Begin(Instruction* instruction) { |
| 161 SpecialBegin(static_cast<intptr_t>(instruction->tag())); |
| 162 } |
| 163 |
| 164 void CodeStatistics::End(Instruction* instruction) { |
| 165 SpecialEnd(static_cast<intptr_t>(instruction->tag())); |
| 166 } |
| 167 |
| 168 void CodeStatistics::SpecialBegin(intptr_t tag) { |
| 169 stack_index_++; |
| 170 ASSERT(stack_index_ < kStackSize); |
| 171 ASSERT(stack_[stack_index_] == -1); |
| 172 ASSERT(tag < CombinedCodeStatistics::kNumEntries); |
| 173 stack_[stack_index_] = assembler_->CodeSize(); |
| 174 ASSERT(stack_[stack_index_] >= 0); |
| 175 } |
| 176 |
| 177 void CodeStatistics::SpecialEnd(intptr_t tag) { |
| 178 ASSERT(stack_index_ > 0 || stack_[stack_index_] >= 0); |
| 179 ASSERT(tag < CombinedCodeStatistics::kNumEntries); |
| 180 |
| 181 intptr_t diff = assembler_->CodeSize() - stack_[stack_index_]; |
| 182 ASSERT(diff >= 0); |
| 183 ASSERT(entries_[tag].bytes >= 0); |
| 184 ASSERT(entries_[tag].count >= 0); |
| 185 entries_[tag].bytes += diff; |
| 186 entries_[tag].count++; |
| 187 instruction_bytes_ += diff; |
| 188 stack_[stack_index_] = -1; |
| 189 stack_index_--; |
| 190 |
| 191 // By adding to the current stack index we will increase the 'assembler_->Code
Size()' offset |
| 192 // when the parent instruction started, thereby reducing it's accounted size |
| 193 // by `diff`. |
| 194 if (tag == CombinedCodeStatistics::kTagTrySyncSpilling) { |
| 195 // We make try-sync-spilling code be subtracted from the calls. |
| 196 ASSERT(stack_index_ >= 0); |
| 197 stack_[stack_index_] += diff; |
| 198 } else if (tag == CombinedCodeStatistics::kPolymorphicInstanceCallAsStaticCall
) { |
| 199 // We make polymorphic-as-static-calls be subtraced from the polymorphic |
| 200 // calls. |
| 201 ASSERT(stack_index_ >= 0); |
| 202 stack_[stack_index_] += diff; |
| 203 } |
| 204 } |
| 205 |
| 206 void CodeStatistics::Finalize() { |
| 207 intptr_t function_size = assembler_->CodeSize(); |
| 208 unaccounted_bytes_ = function_size - instruction_bytes_; |
| 209 ASSERT(unaccounted_bytes_ >= 0); |
| 210 alignment_bytes_ = |
| 211 Utils::RoundUp(function_size, OS::PreferredCodeAlignment()) - |
| 212 function_size; |
| 213 assembler_ = NULL; |
| 214 } |
| 215 |
| 216 void CodeStatistics::AppendTo(CombinedCodeStatistics* stat) { |
| 217 intptr_t sum = 0; |
| 218 bool returns_constant = true; |
| 219 bool returns_const_with_load_field_ = true; |
| 220 |
| 221 for (intptr_t i = 0; i < CombinedCodeStatistics::kNumEntries; i++) { |
| 222 intptr_t bytes = entries_[i].bytes; |
| 223 stat->entries_[i].count += entries_[i].count; |
| 224 if (bytes > 0) { |
| 225 sum += bytes; |
| 226 stat->entries_[i].bytes += bytes; |
| 227 if (i != CombinedCodeStatistics::kTagFrameEntry && |
| 228 i != CombinedCodeStatistics::kTagParallelMove && |
| 229 i != CombinedCodeStatistics::kTagReturn && |
| 230 i != CombinedCodeStatistics::kTagCheckStackOverflow && |
| 231 i != CombinedCodeStatistics::kTagCheckStackOverflowSlowPath) { |
| 232 returns_constant = false; |
| 233 if (i != CombinedCodeStatistics::kTagLoadField && |
| 234 i != CombinedCodeStatistics::kTagTargetEntry && |
| 235 i != CombinedCodeStatistics::kTagJoinEntry) { |
| 236 returns_const_with_load_field_ = false; |
| 237 } |
| 238 } |
| 239 } |
| 240 } |
| 241 stat->unaccounted_bytes_ += unaccounted_bytes_; |
| 242 ASSERT(stat->unaccounted_bytes_ >= 0); |
| 243 stat->alignment_bytes_ += alignment_bytes_; |
| 244 stat->object_header_bytes_ += Instructions::HeaderSize(); |
| 245 |
| 246 intptr_t frame_overhead = |
| 247 entries_[CombinedCodeStatistics::kTagFrameEntry].bytes + |
| 248 entries_[CombinedCodeStatistics::kTagReturn].bytes; |
| 249 |
| 250 bool is_wasteful = sum < (2 * frame_overhead); |
| 251 if (is_wasteful) stat->wasteful_function_count_++; |
| 252 if (returns_constant) stat->return_const_count_++; |
| 253 if (returns_const_with_load_field_) { |
| 254 stat->return_const_with_load_field_count_++; |
| 255 } |
| 256 } |
| 257 |
| 258 } // namespace dart |
OLD | NEW |