| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 48 ASSERT(top == limit_); | 48 ASSERT(top == limit_); |
| 49 Compact(); | 49 Compact(); |
| 50 } else { | 50 } else { |
| 51 ASSERT(top < limit_); | 51 ASSERT(top < limit_); |
| 52 } | 52 } |
| 53 } | 53 } |
| 54 | 54 |
| 55 | 55 |
| 56 void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) { | 56 void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) { |
| 57 if (store_buffer_rebuilding_enabled_) { | 57 if (store_buffer_rebuilding_enabled_) { |
| 58 ASSERT(!heap_->cell_space()->Contains(addr)); | 58 SLOW_ASSERT(!heap_->cell_space()->Contains(addr) && |
| 59 ASSERT(!heap_->code_space()->Contains(addr)); | 59 !heap_->code_space()->Contains(addr) && |
| 60 ASSERT(!heap_->old_data_space()->Contains(addr)); | 60 !heap_->old_data_space()->Contains(addr) && |
| 61 ASSERT(!heap_->new_space()->Contains(addr)); | 61 !heap_->new_space()->Contains(addr)); |
| 62 Address* top = old_top_; | 62 Address* top = old_top_; |
| 63 *top++ = addr; | 63 *top++ = addr; |
| 64 old_top_ = top; | 64 old_top_ = top; |
| 65 old_buffer_is_sorted_ = false; | 65 old_buffer_is_sorted_ = false; |
| 66 old_buffer_is_filtered_ = false; | 66 old_buffer_is_filtered_ = false; |
| 67 if (top >= old_limit_) { | 67 if (top >= old_limit_) { |
| 68 ASSERT(callback_ != NULL); | 68 ASSERT(callback_ != NULL); |
| 69 (*callback_)(heap_, | 69 (*callback_)(heap_, |
| 70 MemoryChunk::FromAnyPointerAddress(addr), | 70 MemoryChunk::FromAnyPointerAddress(addr), |
| 71 kStoreBufferFullEvent); | 71 kStoreBufferFullEvent); |
| 72 } | 72 } |
| 73 } | 73 } |
| 74 } | 74 } |
| 75 | 75 |
| 76 | 76 |
| 77 } } // namespace v8::internal | 77 } } // namespace v8::internal |
| 78 | 78 |
| 79 #endif // V8_STORE_BUFFER_INL_H_ | 79 #endif // V8_STORE_BUFFER_INL_H_ |
| OLD | NEW |