OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "base/trace_event/heap_profiler_allocation_context.h" | 5 #include "base/trace_event/heap_profiler_allocation_context.h" |
6 | 6 |
7 #include <cstring> | 7 #include <cstring> |
8 | 8 |
9 #include "base/hash.h" | 9 #include "base/hash.h" |
10 #include "base/macros.h" | 10 #include "base/macros.h" |
11 | 11 |
12 namespace base { | 12 namespace base { |
13 namespace trace_event { | 13 namespace trace_event { |
14 | 14 |
15 bool operator < (const StackFrame& lhs, const StackFrame& rhs) { | |
16 return lhs.value < rhs.value; | |
17 } | |
18 | |
19 bool operator == (const StackFrame& lhs, const StackFrame& rhs) { | |
20 return lhs.value == rhs.value; | |
21 } | |
22 | |
23 bool operator != (const StackFrame& lhs, const StackFrame& rhs) { | |
24 return lhs.value != rhs.value; | |
Primiano Tucci (use gerrit)
2016/04/19 19:45:06
small thing. I'd probably just implement this as !
Dmitry Skiba
2016/04/19 22:14:14
Done.
| |
25 } | |
26 | |
15 // Constructor that does not initialize members. | 27 // Constructor that does not initialize members. |
16 AllocationContext::AllocationContext() {} | 28 AllocationContext::AllocationContext() {} |
17 | 29 |
18 // static | 30 // static |
19 AllocationContext AllocationContext::Empty() { | 31 AllocationContext AllocationContext::Empty() { |
20 AllocationContext ctx; | 32 AllocationContext ctx; |
Primiano Tucci (use gerrit)
2016/04/19 19:45:06
At this point can we remove this and make this int
Dmitry Skiba
2016/04/19 22:14:14
Done.
| |
21 | 33 |
22 for (size_t i = 0; i < arraysize(ctx.backtrace.frames); i++) | 34 ctx.backtrace.frame_count = 0; |
23 ctx.backtrace.frames[i] = nullptr; | |
24 | |
25 ctx.type_name = nullptr; | 35 ctx.type_name = nullptr; |
26 | 36 |
27 return ctx; | 37 return ctx; |
28 } | 38 } |
29 | 39 |
30 bool operator==(const Backtrace& lhs, const Backtrace& rhs) { | 40 bool operator==(const Backtrace& lhs, const Backtrace& rhs) { |
31 // Pointer equality of the stack frames is assumed, so instead of doing a deep | 41 if (lhs.frame_count != rhs.frame_count) return false; |
32 // string comparison on all of the frames, a |memcmp| suffices. | 42 return std::equal(lhs.frames, lhs.frames + lhs.frame_count, rhs.frames); |
33 return std::memcmp(lhs.frames, rhs.frames, sizeof(lhs.frames)) == 0; | |
34 } | 43 } |
35 | 44 |
36 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) { | 45 bool operator==(const AllocationContext& lhs, const AllocationContext& rhs) { |
37 return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name); | 46 return (lhs.backtrace == rhs.backtrace) && (lhs.type_name == rhs.type_name); |
38 } | 47 } |
39 | 48 |
40 } // namespace trace_event | 49 } // namespace trace_event |
41 } // namespace base | 50 } // namespace base |
42 | 51 |
43 namespace BASE_HASH_NAMESPACE { | 52 namespace BASE_HASH_NAMESPACE { |
44 using base::trace_event::AllocationContext; | 53 using base::trace_event::AllocationContext; |
45 using base::trace_event::Backtrace; | 54 using base::trace_event::Backtrace; |
55 using base::trace_event::StackFrame; | |
56 | |
57 size_t hash<StackFrame>::operator()(const StackFrame& frame) const { | |
58 return hash<const void*>()(frame.value); | |
59 } | |
46 | 60 |
47 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const { | 61 size_t hash<Backtrace>::operator()(const Backtrace& backtrace) const { |
48 return base::SuperFastHash(reinterpret_cast<const char*>(backtrace.frames), | 62 const void* values[Backtrace::kMaxFrameCount]; |
49 sizeof(backtrace.frames)); | 63 for (size_t i = 0; i != backtrace.frame_count; ++i) { |
64 values[i] = backtrace.frames[i].value; | |
65 } | |
66 return base::SuperFastHash(reinterpret_cast<const char*>(values), | |
67 backtrace.frame_count * sizeof(*values)); | |
50 } | 68 } |
51 | 69 |
52 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const { | 70 size_t hash<AllocationContext>::operator()(const AllocationContext& ctx) const { |
53 size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace); | 71 size_t backtrace_hash = hash<Backtrace>()(ctx.backtrace); |
54 | 72 |
55 // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits, | 73 // Multiplicative hash from [Knuth 1998]. Works best if |size_t| is 32 bits, |
56 // because the magic number is a prime very close to 2^32 / golden ratio, but | 74 // because the magic number is a prime very close to 2^32 / golden ratio, but |
57 // will still redistribute keys bijectively on 64-bit architectures because | 75 // will still redistribute keys bijectively on 64-bit architectures because |
58 // the magic number is coprime to 2^64. | 76 // the magic number is coprime to 2^64. |
59 size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761; | 77 size_t type_hash = reinterpret_cast<size_t>(ctx.type_name) * 2654435761; |
60 | 78 |
61 // Multiply one side to break the commutativity of +. Multiplication with a | 79 // Multiply one side to break the commutativity of +. Multiplication with a |
62 // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so | 80 // number coprime to |numeric_limits<size_t>::max() + 1| is bijective so |
63 // randomness is preserved. | 81 // randomness is preserved. |
64 return (backtrace_hash * 3) + type_hash; | 82 return (backtrace_hash * 3) + type_hash; |
65 } | 83 } |
66 | 84 |
67 } // BASE_HASH_NAMESPACE | 85 } // BASE_HASH_NAMESPACE |
OLD | NEW |