| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 670 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 681 for (Address* current = start_; current < top; current++) { | 681 for (Address* current = start_; current < top; current++) { |
| 682 ASSERT(!heap_->cell_space()->Contains(*current)); | 682 ASSERT(!heap_->cell_space()->Contains(*current)); |
| 683 ASSERT(!heap_->code_space()->Contains(*current)); | 683 ASSERT(!heap_->code_space()->Contains(*current)); |
| 684 ASSERT(!heap_->old_data_space()->Contains(*current)); | 684 ASSERT(!heap_->old_data_space()->Contains(*current)); |
| 685 uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current); | 685 uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current); |
| 686 // Shift out the last bits including any tags. | 686 // Shift out the last bits including any tags. |
| 687 int_addr >>= kPointerSizeLog2; | 687 int_addr >>= kPointerSizeLog2; |
| 688 int hash1 = | 688 int hash1 = |
| 689 ((int_addr ^ (int_addr >> kHashMapLengthLog2)) & (kHashMapLength - 1)); | 689 ((int_addr ^ (int_addr >> kHashMapLengthLog2)) & (kHashMapLength - 1)); |
| 690 if (hash_map_1_[hash1] == int_addr) continue; | 690 if (hash_map_1_[hash1] == int_addr) continue; |
| 691 int hash2 = | 691 uintptr_t hash2 = (int_addr - (int_addr >> kHashMapLengthLog2)); |
| 692 ((int_addr - (int_addr >> kHashMapLengthLog2)) & (kHashMapLength - 1)); | |
| 693 hash2 ^= hash2 >> (kHashMapLengthLog2 * 2); | 692 hash2 ^= hash2 >> (kHashMapLengthLog2 * 2); |
| 693 hash2 &= (kHashMapLength - 1); |
| 694 if (hash_map_2_[hash2] == int_addr) continue; | 694 if (hash_map_2_[hash2] == int_addr) continue; |
| 695 if (hash_map_1_[hash1] == 0) { | 695 if (hash_map_1_[hash1] == 0) { |
| 696 hash_map_1_[hash1] = int_addr; | 696 hash_map_1_[hash1] = int_addr; |
| 697 } else if (hash_map_2_[hash2] == 0) { | 697 } else if (hash_map_2_[hash2] == 0) { |
| 698 hash_map_2_[hash2] = int_addr; | 698 hash_map_2_[hash2] = int_addr; |
| 699 } else { | 699 } else { |
| 700 // Rather than slowing down we just throw away some entries. This will | 700 // Rather than slowing down we just throw away some entries. This will |
| 701 // cause some duplicates to remain undetected. | 701 // cause some duplicates to remain undetected. |
| 702 hash_map_1_[hash1] = int_addr; | 702 hash_map_1_[hash1] = int_addr; |
| 703 hash_map_2_[hash2] = 0; | 703 hash_map_2_[hash2] = 0; |
| 704 } | 704 } |
| 705 old_buffer_is_sorted_ = false; | 705 old_buffer_is_sorted_ = false; |
| 706 old_buffer_is_filtered_ = false; | 706 old_buffer_is_filtered_ = false; |
| 707 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 707 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 708 ASSERT(old_top_ <= old_limit_); | 708 ASSERT(old_top_ <= old_limit_); |
| 709 } | 709 } |
| 710 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 710 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 711 CheckForFullBuffer(); | 711 CheckForFullBuffer(); |
| 712 } | 712 } |
| 713 | 713 |
| 714 | 714 |
| 715 void StoreBuffer::CheckForFullBuffer() { | 715 void StoreBuffer::CheckForFullBuffer() { |
| 716 EnsureSpace(kStoreBufferSize * 2); | 716 EnsureSpace(kStoreBufferSize * 2); |
| 717 } | 717 } |
| 718 | 718 |
| 719 } } // namespace v8::internal | 719 } } // namespace v8::internal |
| OLD | NEW |