| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 351 | 351 |
| 352 void StoreBuffer::CheckForFullBuffer() { | 352 void StoreBuffer::CheckForFullBuffer() { |
| 353 if (old_limit_ - old_top_ < kStoreBufferSize * 2) { | 353 if (old_limit_ - old_top_ < kStoreBufferSize * 2) { |
| 354 // After compression we don't have enough space that we can be sure that | 354 // After compression we don't have enough space that we can be sure that |
| 355 // the next two compressions will have enough space in the buffer. We | 355 // the next two compressions will have enough space in the buffer. We |
| 356 // start by trying a more agressive compression. If this frees up at least | 356 // start by trying a more agressive compression. If this frees up at least |
| 357 // half the space then we can keep going, otherwise it is time to brake. | 357 // half the space then we can keep going, otherwise it is time to brake. |
| 358 if (!during_gc_) { | 358 if (!during_gc_) { |
| 359 SortUniq(); | 359 SortUniq(); |
| 360 } | 360 } |
| 361 if (old_limit_ - old_top_ < old_limit_ - old_top_) { | 361 if (old_limit_ - old_top_ > old_top_ - old_start_) { |
| 362 return; | 362 return; |
| 363 } | 363 } |
| 364 // TODO(gc): Set an interrupt to do a GC on the next back edge. | 364 // TODO(gc): Set an interrupt to do a GC on the next back edge. |
| 365 // TODO(gc): Allocate the rest of new space to force a GC on the next | 365 // TODO(gc): Allocate the rest of new space to force a GC on the next |
| 366 // allocation. | 366 // allocation. |
| 367 if (old_limit_ - old_top_ < kStoreBufferSize) { | 367 // TODO(gc): Make the disabling of the store buffer dependendent on |
| 368 // After compression we don't even have enough space for the next | 368 // those two measures failing: |
| 369 // compression to be guaranteed to succeed. | 369 // After compression not enough space was freed up in the store buffer. We |
| 370 // TODO(gc): Set a flag to scan all of memory. | 370 // might as well stop sorting and trying to eliminate duplicates. |
| 371 Counters::store_buffer_overflows.Increment(); | 371 Counters::store_buffer_overflows.Increment(); |
| 372 set_store_buffer_mode(kStoreBufferDisabled); | 372 set_store_buffer_mode(kStoreBufferDisabled); |
| 373 } | |
| 374 } | 373 } |
| 375 } | 374 } |
| 376 | 375 |
| 377 } } // namespace v8::internal | 376 } } // namespace v8::internal |
| OLD | NEW |