OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 6514 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6525 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) { | 6525 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) { |
6526 for (int j = 0; j < M; j++) { | 6526 for (int j = 0; j < M; j++) { |
6527 byte_array->set(j, 0x31); | 6527 byte_array->set(j, 0x31); |
6528 } | 6528 } |
6529 } | 6529 } |
6530 // Re-enable old space expansion to avoid OOM crash. | 6530 // Re-enable old space expansion to avoid OOM crash. |
6531 heap->set_force_oom(false); | 6531 heap->set_force_oom(false); |
6532 heap->CollectGarbage(NEW_SPACE); | 6532 heap->CollectGarbage(NEW_SPACE); |
6533 } | 6533 } |
6534 | 6534 |
| 6535 HEAP_TEST(Regress589413) { |
| 6536 FLAG_stress_compaction = true; |
| 6537 FLAG_manual_evacuation_candidates_selection = true; |
| 6538 FLAG_parallel_compaction = false; |
| 6539 FLAG_concurrent_sweeping = false; |
| 6540 CcTest::InitializeVM(); |
| 6541 v8::HandleScope scope(CcTest::isolate()); |
| 6542 Heap* heap = CcTest::heap(); |
| 6543 // Get the heap in clean state. |
| 6544 heap->CollectGarbage(OLD_SPACE); |
| 6545 heap->CollectGarbage(OLD_SPACE); |
| 6546 Isolate* isolate = CcTest::i_isolate(); |
| 6547 Factory* factory = isolate->factory(); |
| 6548 // Fill the new space with byte arrays with elements looking like pointers. |
| 6549 const int M = 256; |
| 6550 ByteArray* byte_array; |
| 6551 while (heap->AllocateByteArray(M).To(&byte_array)) { |
| 6552 for (int j = 0; j < M; j++) { |
| 6553 byte_array->set(j, 0x31); |
| 6554 } |
| 6555 // Add the array in root set. |
| 6556 handle(byte_array); |
| 6557 } |
| 6558 // Make sure the byte arrays will be promoted on the next GC. |
| 6559 heap->CollectGarbage(NEW_SPACE); |
| 6560 // This number is close to large free list category threshold. |
| 6561 const int N = 0x3eee; |
| 6562 { |
| 6563 std::vector<FixedArray*> arrays; |
| 6564 std::set<Page*> pages; |
| 6565 FixedArray* array; |
| 6566 // Fill all pages with fixed arrays. |
| 6567 heap->set_force_oom(true); |
| 6568 while (heap->AllocateFixedArray(N, TENURED).To(&array)) { |
| 6569 arrays.push_back(array); |
| 6570 pages.insert(Page::FromAddress(array->address())); |
| 6571 // Add the array in root set. |
| 6572 handle(array); |
| 6573 } |
| 6574 // Expand and full one complete page with fixed arrays. |
| 6575 heap->set_force_oom(false); |
| 6576 while (heap->AllocateFixedArray(N, TENURED).To(&array)) { |
| 6577 arrays.push_back(array); |
| 6578 pages.insert(Page::FromAddress(array->address())); |
| 6579 // Add the array in root set. |
| 6580 handle(array); |
| 6581 // Do not expand anymore. |
| 6582 heap->set_force_oom(true); |
| 6583 } |
| 6584 // Expand and mark the new page as evacuation candidate. |
| 6585 heap->set_force_oom(false); |
| 6586 { |
| 6587 AlwaysAllocateScope always_allocate(isolate); |
| 6588 Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED); |
| 6589 Page* ec_page = Page::FromAddress(ec_obj->address()); |
| 6590 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); |
| 6591 // Make all arrays point to evacuation candidate so that |
| 6592 // slots are recorded for them. |
| 6593 for (size_t j = 0; j < arrays.size(); j++) { |
| 6594 array = arrays[j]; |
| 6595 for (int i = 0; i < N; i++) { |
| 6596 array->set(i, *ec_obj); |
| 6597 } |
| 6598 } |
| 6599 } |
| 6600 SimulateIncrementalMarking(heap); |
| 6601 for (size_t j = 0; j < arrays.size(); j++) { |
| 6602 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1); |
| 6603 } |
| 6604 } |
| 6605 // Force allocation from the free list. |
| 6606 heap->set_force_oom(true); |
| 6607 heap->CollectGarbage(OLD_SPACE); |
| 6608 } |
| 6609 |
6535 } // namespace internal | 6610 } // namespace internal |
6536 } // namespace v8 | 6611 } // namespace v8 |
OLD | NEW |