OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2362 AlwaysAllocateScope always_allocate(isolate); | 2362 AlwaysAllocateScope always_allocate(isolate); |
2363 intptr_t available = new_space->Capacity() - new_space->Size(); | 2363 intptr_t available = new_space->Capacity() - new_space->Size(); |
2364 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1; | 2364 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1; |
2365 for (intptr_t i = 0; i < number_of_fillers; i++) { | 2365 for (intptr_t i = 0; i < number_of_fillers; i++) { |
2366 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED))); | 2366 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED))); |
2367 } | 2367 } |
2368 } | 2368 } |
2369 | 2369 |
2370 | 2370 |
2371 TEST(GrowAndShrinkNewSpace) { | 2371 TEST(GrowAndShrinkNewSpace) { |
2372 // Avoid shrinking new space in GC epilogue. This can happen if allocation | |
2373 // throughput samples have been taken while executing the benchmark. | |
2374 FLAG_predictable = true; | |
2375 CcTest::InitializeVM(); | 2372 CcTest::InitializeVM(); |
2376 Heap* heap = CcTest::heap(); | 2373 Heap* heap = CcTest::heap(); |
2377 NewSpace* new_space = heap->new_space(); | 2374 NewSpace* new_space = heap->new_space(); |
2378 | 2375 |
2379 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) { | 2376 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) { |
2380 return; | 2377 return; |
2381 } | 2378 } |
2382 | 2379 |
2383 // Explicitly growing should double the space capacity. | 2380 // Explicitly growing should double the space capacity. |
2384 intptr_t old_capacity, new_capacity; | 2381 intptr_t old_capacity, new_capacity; |
(...skipping 1225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3610 // memory is distributed. Since this is non-deterministic because of | 3607 // memory is distributed. Since this is non-deterministic because of |
3611 // concurrent sweeping, we disable it for this test. | 3608 // concurrent sweeping, we disable it for this test. |
3612 i::FLAG_parallel_compaction = false; | 3609 i::FLAG_parallel_compaction = false; |
3613 // Concurrent sweeping adds non determinism, depending on when memory is | 3610 // Concurrent sweeping adds non determinism, depending on when memory is |
3614 // available for further reuse. | 3611 // available for further reuse. |
3615 i::FLAG_concurrent_sweeping = false; | 3612 i::FLAG_concurrent_sweeping = false; |
3616 // Fast evacuation of pages may result in a different page count in old space. | 3613 // Fast evacuation of pages may result in a different page count in old space. |
3617 i::FLAG_page_promotion = false; | 3614 i::FLAG_page_promotion = false; |
3618 CcTest::InitializeVM(); | 3615 CcTest::InitializeVM(); |
3619 Isolate* isolate = CcTest::i_isolate(); | 3616 Isolate* isolate = CcTest::i_isolate(); |
3620 // If there's snapshot available, we don't know whether 20 small arrays will | |
3621 // fit on the initial pages. | |
3622 if (!isolate->snapshot_available()) return; | |
3623 Factory* factory = isolate->factory(); | 3617 Factory* factory = isolate->factory(); |
3624 Heap* heap = isolate->heap(); | 3618 Heap* heap = isolate->heap(); |
3625 v8::HandleScope scope(CcTest::isolate()); | 3619 v8::HandleScope scope(CcTest::isolate()); |
3626 static const int number_of_test_pages = 20; | 3620 static const int number_of_test_pages = 20; |
3627 | 3621 |
3628 // Prepare many pages with low live-bytes count. | 3622 // Prepare many pages with low live-bytes count. |
3629 PagedSpace* old_space = heap->old_space(); | 3623 PagedSpace* old_space = heap->old_space(); |
3630 const int initial_page_count = old_space->CountTotalPages(); | 3624 const int initial_page_count = old_space->CountTotalPages(); |
3631 const int overall_page_count = number_of_test_pages + initial_page_count; | 3625 const int overall_page_count = number_of_test_pages + initial_page_count; |
3632 for (int i = 0; i < number_of_test_pages; i++) { | 3626 for (int i = 0; i < number_of_test_pages; i++) { |
(...skipping 3473 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7106 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); | 7100 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); |
7107 slots[chunk->area_end() - kPointerSize] = false; | 7101 slots[chunk->area_end() - kPointerSize] = false; |
7108 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { | 7102 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { |
7109 CHECK(slots[addr]); | 7103 CHECK(slots[addr]); |
7110 return KEEP_SLOT; | 7104 return KEEP_SLOT; |
7111 }); | 7105 }); |
7112 } | 7106 } |
7113 | 7107 |
7114 } // namespace internal | 7108 } // namespace internal |
7115 } // namespace v8 | 7109 } // namespace v8 |
OLD | NEW |