Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1310)

Side by Side Diff: src/heap-snapshot-generator.cc

Issue 304553002: Replace STATIC_CHECK with STATIC_ASSERT. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/ia32/code-stubs-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "heap-snapshot-generator-inl.h" 7 #include "heap-snapshot-generator-inl.h"
8 8
9 #include "allocation-tracker.h" 9 #include "allocation-tracker.h"
10 #include "code-stubs.h" 10 #include "code-stubs.h"
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
75 int index, 75 int index,
76 HeapEntry* entry) { 76 HeapEntry* entry) {
77 HeapGraphEdge edge(type, index, this->index(), entry->index()); 77 HeapGraphEdge edge(type, index, this->index(), entry->index());
78 snapshot_->edges().Add(edge); 78 snapshot_->edges().Add(edge);
79 ++children_count_; 79 ++children_count_;
80 } 80 }
81 81
82 82
83 void HeapEntry::Print( 83 void HeapEntry::Print(
84 const char* prefix, const char* edge_name, int max_depth, int indent) { 84 const char* prefix, const char* edge_name, int max_depth, int indent) {
85 STATIC_CHECK(sizeof(unsigned) == sizeof(id())); 85 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
86 OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", 86 OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ",
87 self_size(), id(), indent, ' ', prefix, edge_name); 87 self_size(), id(), indent, ' ', prefix, edge_name);
88 if (type() != kString) { 88 if (type() != kString) {
89 OS::Print("%s %.40s\n", TypeAsString(), name_); 89 OS::Print("%s %.40s\n", TypeAsString(), name_);
90 } else { 90 } else {
91 OS::Print("\""); 91 OS::Print("\"");
92 const char* c = name_; 92 const char* c = name_;
93 while (*c && (c - name_) <= 40) { 93 while (*c && (c - name_) <= 40) {
94 if (*c != '\n') 94 if (*c != '\n')
95 OS::Print("%c", *c); 95 OS::Print("%c", *c);
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler, 183 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
184 const char* title, 184 const char* title,
185 unsigned uid) 185 unsigned uid)
186 : profiler_(profiler), 186 : profiler_(profiler),
187 title_(title), 187 title_(title),
188 uid_(uid), 188 uid_(uid),
189 root_index_(HeapEntry::kNoEntry), 189 root_index_(HeapEntry::kNoEntry),
190 gc_roots_index_(HeapEntry::kNoEntry), 190 gc_roots_index_(HeapEntry::kNoEntry),
191 natives_root_index_(HeapEntry::kNoEntry), 191 natives_root_index_(HeapEntry::kNoEntry),
192 max_snapshot_js_object_id_(0) { 192 max_snapshot_js_object_id_(0) {
193 STATIC_CHECK( 193 STATIC_ASSERT(
194 sizeof(HeapGraphEdge) == 194 sizeof(HeapGraphEdge) ==
195 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize); 195 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
196 STATIC_CHECK( 196 STATIC_ASSERT(
197 sizeof(HeapEntry) == 197 sizeof(HeapEntry) ==
198 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize); 198 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
199 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize); 199 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
200 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize); 200 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
201 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize); 201 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
202 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize); 202 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
203 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) { 203 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
204 gc_subroot_indexes_[i] = HeapEntry::kNoEntry; 204 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
205 } 205 }
206 } 206 }
(...skipping 986 matching lines...) Expand 10 before | Expand all | Expand 10 after
1193 SetInternalReference(js_fun, entry, 1193 SetInternalReference(js_fun, entry,
1194 "shared", shared_info, 1194 "shared", shared_info,
1195 JSFunction::kSharedFunctionInfoOffset); 1195 JSFunction::kSharedFunctionInfoOffset);
1196 TagObject(js_fun->context(), "(context)"); 1196 TagObject(js_fun->context(), "(context)");
1197 SetInternalReference(js_fun, entry, 1197 SetInternalReference(js_fun, entry,
1198 "context", js_fun->context(), 1198 "context", js_fun->context(),
1199 JSFunction::kContextOffset); 1199 JSFunction::kContextOffset);
1200 SetWeakReference(js_fun, entry, 1200 SetWeakReference(js_fun, entry,
1201 "next_function_link", js_fun->next_function_link(), 1201 "next_function_link", js_fun->next_function_link(),
1202 JSFunction::kNextFunctionLinkOffset); 1202 JSFunction::kNextFunctionLinkOffset);
1203 STATIC_CHECK(JSFunction::kNextFunctionLinkOffset 1203 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset
1204 == JSFunction::kNonWeakFieldsEndOffset); 1204 == JSFunction::kNonWeakFieldsEndOffset);
1205 STATIC_CHECK(JSFunction::kNextFunctionLinkOffset + kPointerSize 1205 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1206 == JSFunction::kSize); 1206 == JSFunction::kSize);
1207 } else if (obj->IsGlobalObject()) { 1207 } else if (obj->IsGlobalObject()) {
1208 GlobalObject* global_obj = GlobalObject::cast(obj); 1208 GlobalObject* global_obj = GlobalObject::cast(obj);
1209 SetInternalReference(global_obj, entry, 1209 SetInternalReference(global_obj, entry,
1210 "builtins", global_obj->builtins(), 1210 "builtins", global_obj->builtins(),
1211 GlobalObject::kBuiltinsOffset); 1211 GlobalObject::kBuiltinsOffset);
1212 SetInternalReference(global_obj, entry, 1212 SetInternalReference(global_obj, entry,
1213 "native_context", global_obj->native_context(), 1213 "native_context", global_obj->native_context(),
1214 GlobalObject::kNativeContextOffset); 1214 GlobalObject::kNativeContextOffset);
1215 SetInternalReference(global_obj, entry, 1215 SetInternalReference(global_obj, entry,
1216 "global_context", global_obj->global_context(), 1216 "global_context", global_obj->global_context(),
1217 GlobalObject::kGlobalContextOffset); 1217 GlobalObject::kGlobalContextOffset);
1218 SetInternalReference(global_obj, entry, 1218 SetInternalReference(global_obj, entry,
1219 "global_receiver", global_obj->global_receiver(), 1219 "global_receiver", global_obj->global_receiver(),
1220 GlobalObject::kGlobalReceiverOffset); 1220 GlobalObject::kGlobalReceiverOffset);
1221 STATIC_CHECK(GlobalObject::kHeaderSize - JSObject::kHeaderSize == 1221 STATIC_ASSERT(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1222 4 * kPointerSize); 1222 4 * kPointerSize);
1223 } else if (obj->IsJSArrayBufferView()) { 1223 } else if (obj->IsJSArrayBufferView()) {
1224 JSArrayBufferView* view = JSArrayBufferView::cast(obj); 1224 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1225 SetInternalReference(view, entry, "buffer", view->buffer(), 1225 SetInternalReference(view, entry, "buffer", view->buffer(),
1226 JSArrayBufferView::kBufferOffset); 1226 JSArrayBufferView::kBufferOffset);
1227 SetWeakReference(view, entry, "weak_next", view->weak_next(), 1227 SetWeakReference(view, entry, "weak_next", view->weak_next(),
1228 JSArrayBufferView::kWeakNextOffset); 1228 JSArrayBufferView::kWeakNextOffset);
1229 } 1229 }
1230 TagObject(js_obj->properties(), "(object properties)"); 1230 TagObject(js_obj->properties(), "(object properties)");
1231 SetInternalReference(obj, entry, 1231 SetInternalReference(obj, entry,
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
1310 TagObject(context->normalized_map_cache(), "(context norm. map cache)"); 1310 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1311 TagObject(context->runtime_context(), "(runtime context)"); 1311 TagObject(context->runtime_context(), "(runtime context)");
1312 TagObject(context->embedder_data(), "(context data)"); 1312 TagObject(context->embedder_data(), "(context data)");
1313 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD); 1313 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1314 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused, 1314 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1315 optimized_functions_list); 1315 optimized_functions_list);
1316 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list); 1316 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1317 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list); 1317 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1318 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link); 1318 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1319 #undef EXTRACT_CONTEXT_FIELD 1319 #undef EXTRACT_CONTEXT_FIELD
1320 STATIC_CHECK(Context::OPTIMIZED_FUNCTIONS_LIST == Context::FIRST_WEAK_SLOT); 1320 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1321 STATIC_CHECK(Context::NEXT_CONTEXT_LINK + 1 1321 Context::FIRST_WEAK_SLOT);
1322 == Context::NATIVE_CONTEXT_SLOTS); 1322 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1323 STATIC_CHECK(Context::FIRST_WEAK_SLOT + 5 == Context::NATIVE_CONTEXT_SLOTS); 1323 Context::NATIVE_CONTEXT_SLOTS);
1324 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 5 ==
1325 Context::NATIVE_CONTEXT_SLOTS);
1324 } 1326 }
1325 } 1327 }
1326 1328
1327 1329
1328 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { 1330 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1329 if (map->HasTransitionArray()) { 1331 if (map->HasTransitionArray()) {
1330 TransitionArray* transitions = map->transitions(); 1332 TransitionArray* transitions = map->transitions();
1331 int transitions_entry = GetEntry(transitions)->index(); 1333 int transitions_entry = GetEntry(transitions)->index();
1332 Object* back_pointer = transitions->back_pointer_storage(); 1334 Object* back_pointer = transitions->back_pointer_storage();
1333 TagObject(back_pointer, "(back pointer)"); 1335 TagObject(back_pointer, "(back pointer)");
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
1548 AllocationSite* site) { 1550 AllocationSite* site) {
1549 SetInternalReference(site, entry, "transition_info", site->transition_info(), 1551 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1550 AllocationSite::kTransitionInfoOffset); 1552 AllocationSite::kTransitionInfoOffset);
1551 SetInternalReference(site, entry, "nested_site", site->nested_site(), 1553 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1552 AllocationSite::kNestedSiteOffset); 1554 AllocationSite::kNestedSiteOffset);
1553 MarkAsWeakContainer(site->dependent_code()); 1555 MarkAsWeakContainer(site->dependent_code());
1554 SetInternalReference(site, entry, "dependent_code", site->dependent_code(), 1556 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1555 AllocationSite::kDependentCodeOffset); 1557 AllocationSite::kDependentCodeOffset);
1556 // Do not visit weak_next as it is not visited by the StaticVisitor, 1558 // Do not visit weak_next as it is not visited by the StaticVisitor,
1557 // and we're not very interested in weak_next field here. 1559 // and we're not very interested in weak_next field here.
1558 STATIC_CHECK(AllocationSite::kWeakNextOffset >= 1560 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1559 AllocationSite::BodyDescriptor::kEndOffset); 1561 AllocationSite::BodyDescriptor::kEndOffset);
1560 } 1562 }
1561 1563
1562 1564
1563 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator { 1565 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1564 public: 1566 public:
1565 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer) 1567 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1566 : size_(size) 1568 : size_(size)
1567 , explorer_(explorer) { 1569 , explorer_(explorer) {
1568 } 1570 }
(...skipping 1246 matching lines...) Expand 10 before | Expand all | Expand 10 after
2815 2817
2816 template<> struct ToUnsigned<8> { 2818 template<> struct ToUnsigned<8> {
2817 typedef uint64_t Type; 2819 typedef uint64_t Type;
2818 }; 2820 };
2819 2821
2820 } // namespace 2822 } // namespace
2821 2823
2822 2824
2823 template<typename T> 2825 template<typename T>
2824 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) { 2826 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2825 STATIC_CHECK(static_cast<T>(-1) > 0); // Check that T is unsigned 2827 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2826 int number_of_digits = 0; 2828 int number_of_digits = 0;
2827 T t = value; 2829 T t = value;
2828 do { 2830 do {
2829 ++number_of_digits; 2831 ++number_of_digits;
2830 } while (t /= 10); 2832 } while (t /= 10);
2831 2833
2832 buffer_pos += number_of_digits; 2834 buffer_pos += number_of_digits;
2833 int result = buffer_pos; 2835 int result = buffer_pos;
2834 do { 2836 do {
2835 int last_digit = static_cast<int>(value % 10); 2837 int last_digit = static_cast<int>(value % 10);
2836 buffer[--buffer_pos] = '0' + last_digit; 2838 buffer[--buffer_pos] = '0' + last_digit;
2837 value /= 10; 2839 value /= 10;
2838 } while (value); 2840 } while (value);
2839 return result; 2841 return result;
2840 } 2842 }
2841 2843
2842 2844
2843 template<typename T> 2845 template<typename T>
2844 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) { 2846 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2845 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value; 2847 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2846 STATIC_CHECK(sizeof(value) == sizeof(unsigned_value)); 2848 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2847 return utoa_impl(unsigned_value, buffer, buffer_pos); 2849 return utoa_impl(unsigned_value, buffer, buffer_pos);
2848 } 2850 }
2849 2851
2850 2852
2851 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge, 2853 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2852 bool first_edge) { 2854 bool first_edge) {
2853 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0 2855 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2854 static const int kBufferSize = 2856 static const int kBufferSize =
2855 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT 2857 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2856 EmbeddedVector<char, kBufferSize> buffer; 2858 EmbeddedVector<char, kBufferSize> buffer;
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after
3167 writer_->AddString("\"<dummy>\""); 3169 writer_->AddString("\"<dummy>\"");
3168 for (int i = 1; i < sorted_strings.length(); ++i) { 3170 for (int i = 1; i < sorted_strings.length(); ++i) {
3169 writer_->AddCharacter(','); 3171 writer_->AddCharacter(',');
3170 SerializeString(sorted_strings[i]); 3172 SerializeString(sorted_strings[i]);
3171 if (writer_->aborted()) return; 3173 if (writer_->aborted()) return;
3172 } 3174 }
3173 } 3175 }
3174 3176
3175 3177
3176 } } // namespace v8::internal 3178 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/ia32/code-stubs-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698