OLD | NEW |
1 // Copyright 2012 Google Inc. All Rights Reserved. | 1 // Copyright 2012 Google Inc. All Rights Reserved. |
2 // | 2 // |
3 // Licensed under the Apache License, Version 2.0 (the "License"); | 3 // Licensed under the Apache License, Version 2.0 (the "License"); |
4 // you may not use this file except in compliance with the License. | 4 // you may not use this file except in compliance with the License. |
5 // You may obtain a copy of the License at | 5 // You may obtain a copy of the License at |
6 // | 6 // |
7 // http://www.apache.org/licenses/LICENSE-2.0 | 7 // http://www.apache.org/licenses/LICENSE-2.0 |
8 // | 8 // |
9 // Unless required by applicable law or agreed to in writing, software | 9 // Unless required by applicable law or agreed to in writing, software |
10 // distributed under the License is distributed on an "AS IS" BASIS, | 10 // distributed under the License is distributed on an "AS IS" BASIS, |
(...skipping 15 matching lines...) Expand all Loading... |
26 | 26 |
27 namespace { | 27 namespace { |
28 | 28 |
29 #ifdef _WIN64 | 29 #ifdef _WIN64 |
30 // A lock under which the shadow instance is modified. | 30 // A lock under which the shadow instance is modified. |
31 base::Lock shadow_instance_lock; | 31 base::Lock shadow_instance_lock; |
32 | 32 |
33 // The pointer for the exception handler to know what shadow object is | 33 // The pointer for the exception handler to know what shadow object is |
34 // currently used. Under shadow_instance_lock. | 34 // currently used. Under shadow_instance_lock. |
35 // TODO(loskutov): eliminate this by enforcing Shadow to be a singleton. | 35 // TODO(loskutov): eliminate this by enforcing Shadow to be a singleton. |
36 const Shadow* shadow_instance; | 36 const Shadow* shadow_instance = nullptr; |
37 | 37 |
38 // The exception handler, intended to map the pages for shadow and page_bits | 38 // The exception handler, intended to map the pages for shadow and page_bits |
39 // on demand. When a page fault happens, the operating systems calls | 39 // on demand. When a page fault happens, the operating systems calls |
40 // this handler, and if the page is inside shadow or page_bits, it gets | 40 // this handler, and if the page is inside shadow or page_bits, it gets |
41 // commited seamlessly for the caller, and then execution continues. | 41 // commited seamlessly for the caller, and then execution continues. |
42 // Otherwise, the OS keeps searching for an appropriate handler. | 42 // Otherwise, the OS keeps searching for an appropriate handler. |
43 LONG NTAPI ShadowExceptionHandler(PEXCEPTION_POINTERS exception_pointers) { | 43 LONG NTAPI ShadowExceptionHandler(PEXCEPTION_POINTERS exception_pointers) { |
| 44 DCHECK_NE(static_cast<const Shadow*>(nullptr), shadow_instance); |
44 // Only handle access violations. | 45 // Only handle access violations. |
45 if (exception_pointers->ExceptionRecord->ExceptionCode != | 46 if (exception_pointers->ExceptionRecord->ExceptionCode != |
46 EXCEPTION_ACCESS_VIOLATION) { | 47 EXCEPTION_ACCESS_VIOLATION) { |
47 return EXCEPTION_CONTINUE_SEARCH; | 48 return EXCEPTION_CONTINUE_SEARCH; |
48 } | 49 } |
49 | 50 |
50 // Only handle access violations that land within the shadow memory | 51 // Only handle access violations that land within the shadow memory |
51 // or the page bits. | 52 // or the page bits. |
52 | 53 |
53 void* addr = reinterpret_cast<void*>( | 54 void* addr = reinterpret_cast<void*>( |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
85 size_t* index, | 86 size_t* index, |
86 uint8_t* mask) { | 87 uint8_t* mask) { |
87 DCHECK_NE(static_cast<size_t*>(nullptr), index); | 88 DCHECK_NE(static_cast<size_t*>(nullptr), index); |
88 DCHECK_NE(static_cast<uint8_t*>(nullptr), mask); | 89 DCHECK_NE(static_cast<uint8_t*>(nullptr), mask); |
89 | 90 |
90 size_t i = reinterpret_cast<uintptr_t>(address) / kPageSize; | 91 size_t i = reinterpret_cast<uintptr_t>(address) / kPageSize; |
91 *index = i / 8; | 92 *index = i / 8; |
92 *mask = 1 << (i % 8); | 93 *mask = 1 << (i % 8); |
93 } | 94 } |
94 | 95 |
| 96 // Check if |addr| is in a memory region that has been committed. |
| 97 bool AddressIsInCommittedMemory(const void* addr) { |
| 98 MEMORY_BASIC_INFORMATION memory_info = {}; |
| 99 SIZE_T mem_status = ::VirtualQuery(addr, &memory_info, sizeof(memory_info)); |
| 100 DCHECK_GT(mem_status, 0u); |
| 101 if (memory_info.State != MEM_COMMIT) |
| 102 return false; |
| 103 return true; |
| 104 } |
| 105 |
95 } // namespace | 106 } // namespace |
96 | 107 |
97 Shadow::Shadow() : own_memory_(false), shadow_(nullptr), length_(0) { | 108 Shadow::Shadow() : own_memory_(false), shadow_(nullptr), length_(0) { |
98 Init(RequiredLength()); | 109 Init(RequiredLength()); |
99 } | 110 } |
100 | 111 |
101 Shadow::Shadow(size_t length) | 112 Shadow::Shadow(size_t length) |
102 : own_memory_(false), shadow_(nullptr), length_(0) { | 113 : own_memory_(false), shadow_(nullptr), length_(0) { |
103 Init(length); | 114 Init(length); |
104 } | 115 } |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
208 } | 219 } |
209 | 220 |
210 auto cursor = shadow_ + i; | 221 auto cursor = shadow_ + i; |
211 MEMORY_BASIC_INFORMATION info = {}; | 222 MEMORY_BASIC_INFORMATION info = {}; |
212 while (i < length_) { | 223 while (i < length_) { |
213 auto next_cursor = cursor; | 224 auto next_cursor = cursor; |
214 while (cursor < shadow_ + length_) { | 225 while (cursor < shadow_ + length_) { |
215 auto ret = ::VirtualQuery(cursor, &info, sizeof(info)); | 226 auto ret = ::VirtualQuery(cursor, &info, sizeof(info)); |
216 DCHECK_GT(ret, 0u); | 227 DCHECK_GT(ret, 0u); |
217 next_cursor = static_cast<uint8_t*>(info.BaseAddress) + info.RegionSize; | 228 next_cursor = static_cast<uint8_t*>(info.BaseAddress) + info.RegionSize; |
218 if (info.Type == MEM_COMMIT) | 229 if (info.State == MEM_COMMIT) |
219 break; | 230 break; |
220 cursor = next_cursor; | 231 cursor = next_cursor; |
221 } | 232 } |
222 i = cursor - shadow_; | 233 i = cursor - shadow_; |
223 next_cursor = std::min(next_cursor, shadow_ + length_); | 234 next_cursor = std::min(next_cursor, shadow_ + length_); |
224 auto next_i = next_cursor - shadow_; | 235 auto next_i = next_cursor - shadow_; |
225 for (; i < next_i; ++i) { | 236 for (; i < next_i; ++i) { |
226 if ((i >= shadow_begin && i < shadow_end) || | 237 if ((i >= shadow_begin && i < shadow_end) || |
227 (i >= page_bits_begin && i < page_bits_end) || | 238 (i >= page_bits_begin && i < page_bits_end) || |
228 (i >= this_begin && i < this_end)) { | 239 (i >= this_begin && i < this_end)) { |
(...skipping 648 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
877 } | 888 } |
878 | 889 |
879 bool Shadow::ScanLeftForBracketingBlockStart( | 890 bool Shadow::ScanLeftForBracketingBlockStart( |
880 size_t initial_nesting_depth, size_t cursor, size_t* location) const { | 891 size_t initial_nesting_depth, size_t cursor, size_t* location) const { |
881 DCHECK_NE(static_cast<size_t*>(NULL), location); | 892 DCHECK_NE(static_cast<size_t*>(NULL), location); |
882 | 893 |
883 static const size_t kLowerBound = kAddressLowerBound / kShadowRatio; | 894 static const size_t kLowerBound = kAddressLowerBound / kShadowRatio; |
884 | 895 |
885 size_t left = cursor; | 896 size_t left = cursor; |
886 int nesting_depth = static_cast<int>(initial_nesting_depth); | 897 int nesting_depth = static_cast<int>(initial_nesting_depth); |
| 898 if (!AddressIsInCommittedMemory((&shadow_[left]))) |
| 899 return false; |
887 if (ShadowMarkerHelper::IsBlockEnd(shadow_[left])) | 900 if (ShadowMarkerHelper::IsBlockEnd(shadow_[left])) |
888 --nesting_depth; | 901 --nesting_depth; |
889 while (true) { | 902 while (true) { |
| 903 if (!AddressIsInCommittedMemory((&shadow_[left]))) |
| 904 return false; |
890 if (ShadowMarkerHelper::IsBlockStart(shadow_[left])) { | 905 if (ShadowMarkerHelper::IsBlockStart(shadow_[left])) { |
891 if (nesting_depth == 0) { | 906 if (nesting_depth == 0) { |
892 *location = left; | 907 *location = left; |
893 return true; | 908 return true; |
894 } | 909 } |
895 // If this is not a nested block then there's no hope of finding a | 910 // If this is not a nested block then there's no hope of finding a |
896 // block containing the original cursor. | 911 // block containing the original cursor. |
897 if (!ShadowMarkerHelper::IsNestedBlockStart(shadow_[left])) | 912 if (!ShadowMarkerHelper::IsNestedBlockStart(shadow_[left])) |
898 return false; | 913 return false; |
899 --nesting_depth; | 914 --nesting_depth; |
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1042 size_t initial_nesting_depth, | 1057 size_t initial_nesting_depth, |
1043 const void* addr, | 1058 const void* addr, |
1044 CompactBlockInfo* info) const { | 1059 CompactBlockInfo* info) const { |
1045 DCHECK_NE(static_cast<void*>(NULL), addr); | 1060 DCHECK_NE(static_cast<void*>(NULL), addr); |
1046 DCHECK_NE(static_cast<CompactBlockInfo*>(NULL), info); | 1061 DCHECK_NE(static_cast<CompactBlockInfo*>(NULL), info); |
1047 | 1062 |
1048 // Convert the address to an offset in the shadow memory. | 1063 // Convert the address to an offset in the shadow memory. |
1049 size_t left = reinterpret_cast<uintptr_t>(addr) / kShadowRatio; | 1064 size_t left = reinterpret_cast<uintptr_t>(addr) / kShadowRatio; |
1050 size_t right = left; | 1065 size_t right = left; |
1051 | 1066 |
| 1067 if (!AddressIsInCommittedMemory(&shadow_[left])) |
| 1068 return false; |
| 1069 |
1052 if (!ScanLeftForBracketingBlockStart(initial_nesting_depth, left, &left)) | 1070 if (!ScanLeftForBracketingBlockStart(initial_nesting_depth, left, &left)) |
1053 return false; | 1071 return false; |
1054 if (!ScanRightForBracketingBlockEnd(initial_nesting_depth, right, &right)) | 1072 if (!ScanRightForBracketingBlockEnd(initial_nesting_depth, right, &right)) |
1055 return false; | 1073 return false; |
1056 ++right; | 1074 ++right; |
1057 | 1075 |
1058 uint8_t* block = reinterpret_cast<uint8_t*>(left * kShadowRatio); | 1076 uint8_t* block = reinterpret_cast<uint8_t*>(left * kShadowRatio); |
1059 info->header = reinterpret_cast<BlockHeader*>(block); | 1077 info->header = reinterpret_cast<BlockHeader*>(block); |
1060 info->block_size = static_cast<uint32_t>((right - left) * kShadowRatio); | 1078 info->block_size = static_cast<uint32_t>((right - left) * kShadowRatio); |
1061 | 1079 |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1215 continue; | 1233 continue; |
1216 } | 1234 } |
1217 } | 1235 } |
1218 } | 1236 } |
1219 | 1237 |
1220 return false; | 1238 return false; |
1221 } | 1239 } |
1222 | 1240 |
1223 } // namespace asan | 1241 } // namespace asan |
1224 } // namespace agent | 1242 } // namespace agent |
OLD | NEW |