OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
15 // | 15 // |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
| 28 #include "store-buffer.h" |
| 29 |
| 30 #include <algorithm> |
| 31 |
28 #include "v8.h" | 32 #include "v8.h" |
29 | |
30 #include "store-buffer.h" | |
31 #include "store-buffer-inl.h" | 33 #include "store-buffer-inl.h" |
32 #include "v8-counters.h" | 34 #include "v8-counters.h" |
33 | 35 |
34 namespace v8 { | 36 namespace v8 { |
35 namespace internal { | 37 namespace internal { |
36 | 38 |
37 StoreBuffer::StoreBuffer(Heap* heap) | 39 StoreBuffer::StoreBuffer(Heap* heap) |
38 : heap_(heap), | 40 : heap_(heap), |
39 start_(NULL), | 41 start_(NULL), |
40 limit_(NULL), | 42 limit_(NULL), |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
115 start_ = limit_ = NULL; | 117 start_ = limit_ = NULL; |
116 heap_->public_set_store_buffer_top(start_); | 118 heap_->public_set_store_buffer_top(start_); |
117 } | 119 } |
118 | 120 |
119 | 121 |
120 void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { | 122 void StoreBuffer::StoreBufferOverflow(Isolate* isolate) { |
121 isolate->heap()->store_buffer()->Compact(); | 123 isolate->heap()->store_buffer()->Compact(); |
122 } | 124 } |
123 | 125 |
124 | 126 |
125 #if V8_TARGET_ARCH_X64 | |
126 static int CompareAddresses(const void* void_a, const void* void_b) { | |
127 intptr_t a = | |
128 reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_a)); | |
129 intptr_t b = | |
130 reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_b)); | |
131 // Unfortunately if int is smaller than intptr_t there is no branch-free | |
132 // way to return a number with the same sign as the difference between the | |
133 // pointers. | |
134 if (a == b) return 0; | |
135 if (a < b) return -1; | |
136 ASSERT(a > b); | |
137 return 1; | |
138 } | |
139 #else | |
140 static int CompareAddresses(const void* void_a, const void* void_b) { | |
141 intptr_t a = | |
142 reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_a)); | |
143 intptr_t b = | |
144 reinterpret_cast<intptr_t>(*reinterpret_cast<const Address*>(void_b)); | |
145 ASSERT(sizeof(1) == sizeof(a)); | |
146 // Shift down to avoid wraparound. | |
147 return (a >> kPointerSizeLog2) - (b >> kPointerSizeLog2); | |
148 } | |
149 #endif | |
150 | |
151 | |
152 void StoreBuffer::Uniq() { | 127 void StoreBuffer::Uniq() { |
153 // Remove adjacent duplicates and cells that do not point at new space. | 128 // Remove adjacent duplicates and cells that do not point at new space. |
154 Address previous = NULL; | 129 Address previous = NULL; |
155 Address* write = old_start_; | 130 Address* write = old_start_; |
156 ASSERT(may_move_store_buffer_entries_); | 131 ASSERT(may_move_store_buffer_entries_); |
157 for (Address* read = old_start_; read < old_top_; read++) { | 132 for (Address* read = old_start_; read < old_top_; read++) { |
158 Address current = *read; | 133 Address current = *read; |
159 if (current != previous) { | 134 if (current != previous) { |
160 if (heap_->InNewSpace(*reinterpret_cast<Object**>(current))) { | 135 if (heap_->InNewSpace(*reinterpret_cast<Object**>(current))) { |
161 *write++ = current; | 136 *write++ = current; |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
276 | 251 |
277 // Filtering hash sets are inconsistent with the store buffer after this | 252 // Filtering hash sets are inconsistent with the store buffer after this |
278 // operation. | 253 // operation. |
279 ClearFilteringHashSets(); | 254 ClearFilteringHashSets(); |
280 } | 255 } |
281 | 256 |
282 | 257 |
283 void StoreBuffer::SortUniq() { | 258 void StoreBuffer::SortUniq() { |
284 Compact(); | 259 Compact(); |
285 if (old_buffer_is_sorted_) return; | 260 if (old_buffer_is_sorted_) return; |
286 qsort(reinterpret_cast<void*>(old_start_), | 261 std::sort(old_start_, old_top_); |
287 old_top_ - old_start_, | |
288 sizeof(*old_top_), | |
289 &CompareAddresses); | |
290 Uniq(); | 262 Uniq(); |
291 | 263 |
292 old_buffer_is_sorted_ = true; | 264 old_buffer_is_sorted_ = true; |
293 | 265 |
294 // Filtering hash sets are inconsistent with the store buffer after this | 266 // Filtering hash sets are inconsistent with the store buffer after this |
295 // operation. | 267 // operation. |
296 ClearFilteringHashSets(); | 268 ClearFilteringHashSets(); |
297 } | 269 } |
298 | 270 |
299 | 271 |
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
717 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 689 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
718 CheckForFullBuffer(); | 690 CheckForFullBuffer(); |
719 } | 691 } |
720 | 692 |
721 | 693 |
722 void StoreBuffer::CheckForFullBuffer() { | 694 void StoreBuffer::CheckForFullBuffer() { |
723 EnsureSpace(kStoreBufferSize * 2); | 695 EnsureSpace(kStoreBufferSize * 2); |
724 } | 696 } |
725 | 697 |
726 } } // namespace v8::internal | 698 } } // namespace v8::internal |
OLD | NEW |