Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(78)

Side by Side Diff: base/trace_event/heap_profiler_allocation_context.cc

Issue 1907593002: Revert of [tracing] Turn StackFrame into struct. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/trace_event/heap_profiler_allocation_context.h" 5 #include "base/trace_event/heap_profiler_allocation_context.h"
6 6
7 #include <cstring> 7 #include <cstring>
8 8
9 #include "base/hash.h" 9 #include "base/hash.h"
10 #include "base/macros.h" 10 #include "base/macros.h"
11 11
12 namespace base { 12 namespace base {
13 namespace trace_event { 13 namespace trace_event {
14 14
15 bool operator < (const StackFrame& lhs, const StackFrame& rhs) { 15 // Constructor that does not initialize members.
16 return lhs.value < rhs.value; 16 AllocationContext::AllocationContext() {}
17
18 // static
19 AllocationContext AllocationContext::Empty() {
20 AllocationContext ctx;
21
22 for (size_t i = 0; i < arraysize(ctx.backtrace.frames); i++)
23 ctx.backtrace.frames[i] = nullptr;
24
25 ctx.type_name = nullptr;
26
27 return ctx;
17 } 28 }
18 29
19 bool operator == (const StackFrame& lhs, const StackFrame& rhs) { 30 bool operator==(const Backtrace& lhs, const Backtrace& rhs) {
20 return lhs.value == rhs.value; 31 // Pointer equality of the stack frames is assumed, so instead of doing a deep
32 // string comparison on all of the frames, a |memcmp| suffices.
33 return std::memcmp(lhs.frames, rhs.frames, sizeof(lhs.frames)) == 0;
21 } 34 }
22 35
23 bool operator != (const StackFrame& lhs, const StackFrame& rhs) {
24 return !(lhs.value == rhs.value);
25 }
26
27 Backtrace::Backtrace(): frame_count(0) {}
28
29 bool operator==(const Backtrace& lhs, const Backtrace& rhs) {
30 if (lhs.frame_count != rhs.frame_count) return false;
31 return std::equal(lhs.frames, lhs.frames + lhs.frame_count, rhs.frames);
32 }
33
34 AllocationContext::AllocationContext(): type_name(nullptr) {}
35
36 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) { 36 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) {
37 return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name); 37 return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name);
38 } 38 }
39 39
40 } // namespace trace_event 40 } // namespace trace_event
41 } // namespace base 41 } // namespace base
42 42
43 namespace BASE_HASH_NAMESPACE { 43 namespace BASE_HASH_NAMESPACE {
44 using base::trace_event::AllocationContext; 44 using base::trace_event::AllocationContext;
45 using base::trace_event::Backtrace; 45 using base::trace_event::Backtrace;
46 using base::trace_event::StackFrame;
47
48 size_t hash<StackFrame>::operator()(const StackFrame& frame) const {
49 return hash<const void*>()(frame.value);
50 }
51 46
52 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const { 47 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const {
53 const void* values[Backtrace::kMaxFrameCount]; 48 return base::SuperFastHash(reinterpret_cast<const char*>(backtrace.frames),
54 for (size_t i = 0; i != backtrace.frame_count; ++i) { 49 sizeof(backtrace.frames));
55 values[i] = backtrace.frames[i].value;
56 }
57 return base::SuperFastHash(reinterpret_cast<const char*>(values),
58 backtrace.frame_count * sizeof(*values));
59 } 50 }
60 51
61 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const { 52 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const {
62 size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace); 53 size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace);
63 54
64 // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits, 55 // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits,
65 // because the magic number is a prime very close to 2^32 / golden ratio, but 56 // because the magic number is a prime very close to 2^32 / golden ratio, but
66 // will still redistribute keys bijectively on 64-bit architectures because 57 // will still redistribute keys bijectively on 64-bit architectures because
67 // the magic number is coprime to 2^64. 58 // the magic number is coprime to 2^64.
68 size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761; 59 size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761;
69 60
70 // Multiply one side to break the commutativity of +. Multiplication with a 61 // Multiply one side to break the commutativity of +. Multiplication with a
71 // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so 62 // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so
72 // randomness is preserved. 63 // randomness is preserved.
73 return (backtrace_hash * 3) + type_hash; 64 return (backtrace_hash * 3) + type_hash;
74 } 65 }
75 66
76 } // BASE_HASH_NAMESPACE 67 } // BASE_HASH_NAMESPACE
OLDNEW
« no previous file with comments | « base/trace_event/heap_profiler_allocation_context.h ('k') | base/trace_event/heap_profiler_allocation_context_tracker.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698