| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 428 | 428 |
| 429 | 429 |
| 430 TEST(GarbageCollection) { | 430 TEST(GarbageCollection) { |
| 431 CcTest::InitializeVM(); | 431 CcTest::InitializeVM(); |
| 432 Isolate* isolate = CcTest::i_isolate(); | 432 Isolate* isolate = CcTest::i_isolate(); |
| 433 Heap* heap = isolate->heap(); | 433 Heap* heap = isolate->heap(); |
| 434 Factory* factory = isolate->factory(); | 434 Factory* factory = isolate->factory(); |
| 435 | 435 |
| 436 HandleScope sc(isolate); | 436 HandleScope sc(isolate); |
| 437 // Check GC. | 437 // Check GC. |
| 438 heap->CollectGarbage(NEW_SPACE); | 438 heap->CollectGarbageNewSpace(); |
| 439 | 439 |
| 440 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object()); | 440 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object()); |
| 441 Handle<String> name = factory->InternalizeUtf8String("theFunction"); | 441 Handle<String> name = factory->InternalizeUtf8String("theFunction"); |
| 442 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); | 442 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); |
| 443 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx"); | 443 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx"); |
| 444 Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); | 444 Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); |
| 445 Handle<Smi> twenty_three(Smi::FromInt(23), isolate); | 445 Handle<Smi> twenty_three(Smi::FromInt(23), isolate); |
| 446 Handle<Smi> twenty_four(Smi::FromInt(24), isolate); | 446 Handle<Smi> twenty_four(Smi::FromInt(24), isolate); |
| 447 | 447 |
| 448 { | 448 { |
| 449 HandleScope inner_scope(isolate); | 449 HandleScope inner_scope(isolate); |
| 450 // Allocate a function and keep it in global object's property. | 450 // Allocate a function and keep it in global object's property. |
| 451 Handle<JSFunction> function = factory->NewFunction(name); | 451 Handle<JSFunction> function = factory->NewFunction(name); |
| 452 JSReceiver::SetProperty(global, name, function, SLOPPY).Check(); | 452 JSReceiver::SetProperty(global, name, function, SLOPPY).Check(); |
| 453 // Allocate an object. Unrooted after leaving the scope. | 453 // Allocate an object. Unrooted after leaving the scope. |
| 454 Handle<JSObject> obj = factory->NewJSObject(function); | 454 Handle<JSObject> obj = factory->NewJSObject(function); |
| 455 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); | 455 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); |
| 456 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check(); | 456 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check(); |
| 457 | 457 |
| 458 CHECK_EQ(Smi::FromInt(23), | 458 CHECK_EQ(Smi::FromInt(23), |
| 459 *Object::GetProperty(obj, prop_name).ToHandleChecked()); | 459 *Object::GetProperty(obj, prop_name).ToHandleChecked()); |
| 460 CHECK_EQ(Smi::FromInt(24), | 460 CHECK_EQ(Smi::FromInt(24), |
| 461 *Object::GetProperty(obj, prop_namex).ToHandleChecked()); | 461 *Object::GetProperty(obj, prop_namex).ToHandleChecked()); |
| 462 } | 462 } |
| 463 | 463 |
| 464 heap->CollectGarbage(NEW_SPACE); | 464 heap->CollectGarbageNewSpace(); |
| 465 | 465 |
| 466 // Function should be alive. | 466 // Function should be alive. |
| 467 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); | 467 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); |
| 468 // Check function is retained. | 468 // Check function is retained. |
| 469 Handle<Object> func_value = | 469 Handle<Object> func_value = |
| 470 Object::GetProperty(global, name).ToHandleChecked(); | 470 Object::GetProperty(global, name).ToHandleChecked(); |
| 471 CHECK(func_value->IsJSFunction()); | 471 CHECK(func_value->IsJSFunction()); |
| 472 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); | 472 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); |
| 473 | 473 |
| 474 { | 474 { |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 544 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); | 544 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); |
| 545 Handle<Object> u = factory->NewNumber(1.12344); | 545 Handle<Object> u = factory->NewNumber(1.12344); |
| 546 | 546 |
| 547 h1 = global_handles->Create(*i); | 547 h1 = global_handles->Create(*i); |
| 548 h2 = global_handles->Create(*u); | 548 h2 = global_handles->Create(*u); |
| 549 h3 = global_handles->Create(*i); | 549 h3 = global_handles->Create(*i); |
| 550 h4 = global_handles->Create(*u); | 550 h4 = global_handles->Create(*u); |
| 551 } | 551 } |
| 552 | 552 |
| 553 // after gc, it should survive | 553 // after gc, it should survive |
| 554 heap->CollectGarbage(NEW_SPACE); | 554 heap->CollectGarbageNewSpace(); |
| 555 | 555 |
| 556 CHECK((*h1)->IsString()); | 556 CHECK((*h1)->IsString()); |
| 557 CHECK((*h2)->IsHeapNumber()); | 557 CHECK((*h2)->IsHeapNumber()); |
| 558 CHECK((*h3)->IsString()); | 558 CHECK((*h3)->IsString()); |
| 559 CHECK((*h4)->IsHeapNumber()); | 559 CHECK((*h4)->IsHeapNumber()); |
| 560 | 560 |
| 561 CHECK_EQ(*h3, *h1); | 561 CHECK_EQ(*h3, *h1); |
| 562 GlobalHandles::Destroy(h1.location()); | 562 GlobalHandles::Destroy(h1.location()); |
| 563 GlobalHandles::Destroy(h3.location()); | 563 GlobalHandles::Destroy(h3.location()); |
| 564 | 564 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 602 h1 = global_handles->Create(*i); | 602 h1 = global_handles->Create(*i); |
| 603 h2 = global_handles->Create(*u); | 603 h2 = global_handles->Create(*u); |
| 604 } | 604 } |
| 605 | 605 |
| 606 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); | 606 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); |
| 607 GlobalHandles::MakeWeak(h2.location(), | 607 GlobalHandles::MakeWeak(h2.location(), |
| 608 reinterpret_cast<void*>(&handle_and_id), | 608 reinterpret_cast<void*>(&handle_and_id), |
| 609 &TestWeakGlobalHandleCallback); | 609 &TestWeakGlobalHandleCallback); |
| 610 | 610 |
| 611 // Scavenge treats weak pointers as normal roots. | 611 // Scavenge treats weak pointers as normal roots. |
| 612 heap->CollectGarbage(NEW_SPACE); | 612 heap->CollectGarbageNewSpace(); |
| 613 | 613 |
| 614 CHECK((*h1)->IsString()); | 614 CHECK((*h1)->IsString()); |
| 615 CHECK((*h2)->IsHeapNumber()); | 615 CHECK((*h2)->IsHeapNumber()); |
| 616 | 616 |
| 617 CHECK(!WeakPointerCleared); | 617 CHECK(!WeakPointerCleared); |
| 618 CHECK(!global_handles->IsNearDeath(h2.location())); | 618 CHECK(!global_handles->IsNearDeath(h2.location())); |
| 619 CHECK(!global_handles->IsNearDeath(h1.location())); | 619 CHECK(!global_handles->IsNearDeath(h1.location())); |
| 620 | 620 |
| 621 GlobalHandles::Destroy(h1.location()); | 621 GlobalHandles::Destroy(h1.location()); |
| 622 GlobalHandles::Destroy(h2.location()); | 622 GlobalHandles::Destroy(h2.location()); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 640 | 640 |
| 641 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); | 641 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); |
| 642 Handle<Object> u = factory->NewNumber(1.12344); | 642 Handle<Object> u = factory->NewNumber(1.12344); |
| 643 | 643 |
| 644 h1 = global_handles->Create(*i); | 644 h1 = global_handles->Create(*i); |
| 645 h2 = global_handles->Create(*u); | 645 h2 = global_handles->Create(*u); |
| 646 } | 646 } |
| 647 | 647 |
| 648 // Make sure the objects are promoted. | 648 // Make sure the objects are promoted. |
| 649 heap->CollectGarbage(OLD_SPACE); | 649 heap->CollectGarbage(OLD_SPACE); |
| 650 heap->CollectGarbage(NEW_SPACE); | 650 heap->CollectGarbageNewSpace(); |
| 651 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); | 651 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); |
| 652 | 652 |
| 653 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); | 653 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); |
| 654 GlobalHandles::MakeWeak(h2.location(), | 654 GlobalHandles::MakeWeak(h2.location(), |
| 655 reinterpret_cast<void*>(&handle_and_id), | 655 reinterpret_cast<void*>(&handle_and_id), |
| 656 &TestWeakGlobalHandleCallback); | 656 &TestWeakGlobalHandleCallback); |
| 657 CHECK(!GlobalHandles::IsNearDeath(h1.location())); | 657 CHECK(!GlobalHandles::IsNearDeath(h1.location())); |
| 658 CHECK(!GlobalHandles::IsNearDeath(h2.location())); | 658 CHECK(!GlobalHandles::IsNearDeath(h2.location())); |
| 659 | 659 |
| 660 // Incremental marking potentially marked handles before they turned weak. | 660 // Incremental marking potentially marked handles before they turned weak. |
| (...skipping 26 matching lines...) Expand all Loading... |
| 687 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); | 687 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); |
| 688 h = global_handles->Create(*i); | 688 h = global_handles->Create(*i); |
| 689 } | 689 } |
| 690 | 690 |
| 691 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234); | 691 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234); |
| 692 GlobalHandles::MakeWeak(h.location(), | 692 GlobalHandles::MakeWeak(h.location(), |
| 693 reinterpret_cast<void*>(&handle_and_id), | 693 reinterpret_cast<void*>(&handle_and_id), |
| 694 &TestWeakGlobalHandleCallback); | 694 &TestWeakGlobalHandleCallback); |
| 695 | 695 |
| 696 // Scanvenge does not recognize weak reference. | 696 // Scanvenge does not recognize weak reference. |
| 697 heap->CollectGarbage(NEW_SPACE); | 697 heap->CollectGarbageNewSpace(); |
| 698 | 698 |
| 699 CHECK(!WeakPointerCleared); | 699 CHECK(!WeakPointerCleared); |
| 700 | 700 |
| 701 // Mark-compact treats weak reference properly. | 701 // Mark-compact treats weak reference properly. |
| 702 heap->CollectGarbage(OLD_SPACE); | 702 heap->CollectGarbage(OLD_SPACE); |
| 703 | 703 |
| 704 CHECK(WeakPointerCleared); | 704 CHECK(WeakPointerCleared); |
| 705 } | 705 } |
| 706 | 706 |
| 707 | 707 |
| (...skipping 798 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1506 for (int i = 0; i < kAgingThreshold; i++) { | 1506 for (int i = 0; i < kAgingThreshold; i++) { |
| 1507 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 1507 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 1508 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 1508 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 1509 } | 1509 } |
| 1510 | 1510 |
| 1511 // Simulate incremental marking so that the functions are enqueued as | 1511 // Simulate incremental marking so that the functions are enqueued as |
| 1512 // code flushing candidates. Then kill one of the functions. Finally | 1512 // code flushing candidates. Then kill one of the functions. Finally |
| 1513 // perform a scavenge while incremental marking is still running. | 1513 // perform a scavenge while incremental marking is still running. |
| 1514 SimulateIncrementalMarking(CcTest::heap()); | 1514 SimulateIncrementalMarking(CcTest::heap()); |
| 1515 *function2.location() = NULL; | 1515 *function2.location() = NULL; |
| 1516 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); | 1516 CcTest::heap()->CollectGarbageNewSpace("test scavenge while marking"); |
| 1517 | 1517 |
| 1518 // Simulate one final GC to make sure the candidate queue is sane. | 1518 // Simulate one final GC to make sure the candidate queue is sane. |
| 1519 CcTest::heap()->CollectAllGarbage(); | 1519 CcTest::heap()->CollectAllGarbage(); |
| 1520 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); | 1520 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
| 1521 CHECK(!function->is_compiled() || function->IsOptimized()); | 1521 CHECK(!function->is_compiled() || function->IsOptimized()); |
| 1522 } | 1522 } |
| 1523 | 1523 |
| 1524 | 1524 |
| 1525 TEST(TestCodeFlushingIncrementalAbort) { | 1525 TEST(TestCodeFlushingIncrementalAbort) { |
| 1526 // If we do not flush code this test is invalid. | 1526 // If we do not flush code this test is invalid. |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1780 OptimizeEmptyFunction("f4"); | 1780 OptimizeEmptyFunction("f4"); |
| 1781 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); | 1781 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); |
| 1782 OptimizeEmptyFunction("f5"); | 1782 OptimizeEmptyFunction("f5"); |
| 1783 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); | 1783 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); |
| 1784 | 1784 |
| 1785 // Remove function f1, and | 1785 // Remove function f1, and |
| 1786 CompileRun("f1=null"); | 1786 CompileRun("f1=null"); |
| 1787 | 1787 |
| 1788 // Scavenge treats these references as strong. | 1788 // Scavenge treats these references as strong. |
| 1789 for (int j = 0; j < 10; j++) { | 1789 for (int j = 0; j < 10; j++) { |
| 1790 CcTest::heap()->CollectGarbage(NEW_SPACE); | 1790 CcTest::heap()->CollectGarbageNewSpace(); |
| 1791 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); | 1791 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); |
| 1792 } | 1792 } |
| 1793 | 1793 |
| 1794 // Mark compact handles the weak references. | 1794 // Mark compact handles the weak references. |
| 1795 isolate->compilation_cache()->Clear(); | 1795 isolate->compilation_cache()->Clear(); |
| 1796 heap->CollectAllGarbage(); | 1796 heap->CollectAllGarbage(); |
| 1797 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); | 1797 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); |
| 1798 | 1798 |
| 1799 // Get rid of f3 and f5 in the same way. | 1799 // Get rid of f3 and f5 in the same way. |
| 1800 CompileRun("f3=null"); | 1800 CompileRun("f3=null"); |
| 1801 for (int j = 0; j < 10; j++) { | 1801 for (int j = 0; j < 10; j++) { |
| 1802 CcTest::heap()->CollectGarbage(NEW_SPACE); | 1802 CcTest::heap()->CollectGarbageNewSpace(); |
| 1803 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); | 1803 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); |
| 1804 } | 1804 } |
| 1805 CcTest::heap()->CollectAllGarbage(); | 1805 CcTest::heap()->CollectAllGarbage(); |
| 1806 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); | 1806 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); |
| 1807 CompileRun("f5=null"); | 1807 CompileRun("f5=null"); |
| 1808 for (int j = 0; j < 10; j++) { | 1808 for (int j = 0; j < 10; j++) { |
| 1809 CcTest::heap()->CollectGarbage(NEW_SPACE); | 1809 CcTest::heap()->CollectGarbage(NEW_SPACE); |
| 1810 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); | 1810 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); |
| 1811 } | 1811 } |
| 1812 CcTest::heap()->CollectAllGarbage(); | 1812 CcTest::heap()->CollectAllGarbage(); |
| (...skipping 562 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2375 new_capacity = new_space->TotalCapacity(); | 2375 new_capacity = new_space->TotalCapacity(); |
| 2376 CHECK(old_capacity == new_capacity); | 2376 CHECK(old_capacity == new_capacity); |
| 2377 | 2377 |
| 2378 // Explicitly shrinking should not affect space capacity. | 2378 // Explicitly shrinking should not affect space capacity. |
| 2379 old_capacity = new_space->TotalCapacity(); | 2379 old_capacity = new_space->TotalCapacity(); |
| 2380 new_space->Shrink(); | 2380 new_space->Shrink(); |
| 2381 new_capacity = new_space->TotalCapacity(); | 2381 new_capacity = new_space->TotalCapacity(); |
| 2382 CHECK(old_capacity == new_capacity); | 2382 CHECK(old_capacity == new_capacity); |
| 2383 | 2383 |
| 2384 // Let the scavenger empty the new space. | 2384 // Let the scavenger empty the new space. |
| 2385 heap->CollectGarbage(NEW_SPACE); | 2385 heap->CollectGarbageNewSpace(); |
| 2386 CHECK_LE(new_space->Size(), old_capacity); | 2386 CHECK_LE(new_space->Size(), old_capacity); |
| 2387 | 2387 |
| 2388 // Explicitly shrinking should halve the space capacity. | 2388 // Explicitly shrinking should halve the space capacity. |
| 2389 old_capacity = new_space->TotalCapacity(); | 2389 old_capacity = new_space->TotalCapacity(); |
| 2390 new_space->Shrink(); | 2390 new_space->Shrink(); |
| 2391 new_capacity = new_space->TotalCapacity(); | 2391 new_capacity = new_space->TotalCapacity(); |
| 2392 CHECK(old_capacity == 2 * new_capacity); | 2392 CHECK(old_capacity == 2 * new_capacity); |
| 2393 | 2393 |
| 2394 // Consecutive shrinking should not affect space capacity. | 2394 // Consecutive shrinking should not affect space capacity. |
| 2395 old_capacity = new_space->TotalCapacity(); | 2395 old_capacity = new_space->TotalCapacity(); |
| (...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2830 | 2830 |
| 2831 HEAP_TEST(GCFlags) { | 2831 HEAP_TEST(GCFlags) { |
| 2832 CcTest::InitializeVM(); | 2832 CcTest::InitializeVM(); |
| 2833 Heap* heap = CcTest::heap(); | 2833 Heap* heap = CcTest::heap(); |
| 2834 | 2834 |
| 2835 heap->set_current_gc_flags(Heap::kNoGCFlags); | 2835 heap->set_current_gc_flags(Heap::kNoGCFlags); |
| 2836 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); | 2836 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
| 2837 | 2837 |
| 2838 // Set the flags to check whether we appropriately resets them after the GC. | 2838 // Set the flags to check whether we appropriately resets them after the GC. |
| 2839 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); | 2839 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); |
| 2840 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); | 2840 heap->CollectAllGarbage("GCFlags", Heap::kReduceMemoryFootprintMask); |
| 2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); | 2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
| 2842 | 2842 |
| 2843 MarkCompactCollector* collector = heap->mark_compact_collector(); | 2843 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 2844 if (collector->sweeping_in_progress()) { | 2844 if (collector->sweeping_in_progress()) { |
| 2845 collector->EnsureSweepingCompleted(); | 2845 collector->EnsureSweepingCompleted(); |
| 2846 } | 2846 } |
| 2847 | 2847 |
| 2848 IncrementalMarking* marking = heap->incremental_marking(); | 2848 IncrementalMarking* marking = heap->incremental_marking(); |
| 2849 marking->Stop(); | 2849 marking->Stop(); |
| 2850 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); | 2850 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); |
| 2851 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); | 2851 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); |
| 2852 | 2852 |
| 2853 heap->CollectGarbage(NEW_SPACE); | 2853 heap->CollectGarbageNewSpace(); |
| 2854 // NewSpace scavenges should not overwrite the flags. | 2854 // NewSpace scavenges should not overwrite the flags. |
| 2855 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); | 2855 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); |
| 2856 | 2856 |
| 2857 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 2857 heap->CollectAllGarbage("GCFlags", Heap::kAbortIncrementalMarkingMask); |
| 2858 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); | 2858 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); |
| 2859 } | 2859 } |
| 2860 | 2860 |
| 2861 | 2861 |
| 2862 TEST(IdleNotificationFinishMarking) { | 2862 TEST(IdleNotificationFinishMarking) { |
| 2863 i::FLAG_allow_natives_syntax = true; | 2863 i::FLAG_allow_natives_syntax = true; |
| 2864 CcTest::InitializeVM(); | 2864 CcTest::InitializeVM(); |
| 2865 SimulateFullSpace(CcTest::heap()->old_space()); | 2865 SimulateFullSpace(CcTest::heap()->old_space()); |
| 2866 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); | 2866 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); |
| 2867 marking->Stop(); | 2867 marking->Stop(); |
| (...skipping 865 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3733 CHECK_EQ(1, old_space->CountTotalPages()); | 3733 CHECK_EQ(1, old_space->CountTotalPages()); |
| 3734 for (int i = 0; i < number_of_test_pages; i++) { | 3734 for (int i = 0; i < number_of_test_pages; i++) { |
| 3735 AlwaysAllocateScope always_allocate(isolate); | 3735 AlwaysAllocateScope always_allocate(isolate); |
| 3736 SimulateFullSpace(old_space); | 3736 SimulateFullSpace(old_space); |
| 3737 factory->NewFixedArray(1, TENURED); | 3737 factory->NewFixedArray(1, TENURED); |
| 3738 } | 3738 } |
| 3739 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages()); | 3739 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages()); |
| 3740 | 3740 |
| 3741 // Triggering one GC will cause a lot of garbage to be discovered but | 3741 // Triggering one GC will cause a lot of garbage to be discovered but |
| 3742 // even spread across all allocated pages. | 3742 // even spread across all allocated pages. |
| 3743 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, | 3743 heap->CollectAllGarbage("triggered for preparation", |
| 3744 "triggered for preparation"); | 3744 Heap::kFinalizeIncrementalMarkingMask); |
| 3745 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); | 3745 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); |
| 3746 | 3746 |
| 3747 // Triggering subsequent GCs should cause at least half of the pages | 3747 // Triggering subsequent GCs should cause at least half of the pages |
| 3748 // to be released to the OS after at most two cycles. | 3748 // to be released to the OS after at most two cycles. |
| 3749 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, | 3749 heap->CollectAllGarbage("triggered by test 1", |
| 3750 "triggered by test 1"); | 3750 Heap::kFinalizeIncrementalMarkingMask); |
| 3751 ; |
| 3751 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); | 3752 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); |
| 3752 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, | 3753 heap->CollectAllGarbage("triggered by test 2", |
| 3753 "triggered by test 2"); | 3754 Heap::kFinalizeIncrementalMarkingMask); |
| 3754 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2); | 3755 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2); |
| 3755 | 3756 |
| 3756 // Triggering a last-resort GC should cause all pages to be released to the | 3757 // Triggering a last-resort GC should cause all pages to be released to the |
| 3757 // OS so that other processes can seize the memory. If we get a failure here | 3758 // OS so that other processes can seize the memory. If we get a failure here |
| 3758 // where there are 2 pages left instead of 1, then we should increase the | 3759 // where there are 2 pages left instead of 1, then we should increase the |
| 3759 // size of the first page a little in SizeOfFirstPage in spaces.cc. The | 3760 // size of the first page a little in SizeOfFirstPage in spaces.cc. The |
| 3760 // first page should be small in order to reduce memory used when the VM | 3761 // first page should be small in order to reduce memory used when the VM |
| 3761 // boots, but if the 20 small arrays don't fit on the first page then that's | 3762 // boots, but if the 20 small arrays don't fit on the first page then that's |
| 3762 // an indication that it is too small. | 3763 // an indication that it is too small. |
| 3763 heap->CollectAllAvailableGarbage("triggered really hard"); | 3764 heap->CollectAllAvailableGarbage("triggered really hard"); |
| (...skipping 677 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4441 "obj = fastliteralcase(get_standard_literal(), 2);"); | 4442 "obj = fastliteralcase(get_standard_literal(), 2);"); |
| 4442 | 4443 |
| 4443 // prepare the heap | 4444 // prepare the heap |
| 4444 v8::Local<v8::String> mote_code_string = | 4445 v8::Local<v8::String> mote_code_string = |
| 4445 v8_str("fastliteralcase(mote, 2.5);"); | 4446 v8_str("fastliteralcase(mote, 2.5);"); |
| 4446 | 4447 |
| 4447 v8::Local<v8::String> array_name = v8_str("mote"); | 4448 v8::Local<v8::String> array_name = v8_str("mote"); |
| 4448 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0)); | 4449 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0)); |
| 4449 | 4450 |
| 4450 // First make sure we flip spaces | 4451 // First make sure we flip spaces |
| 4451 CcTest::heap()->CollectGarbage(NEW_SPACE); | 4452 CcTest::heap()->CollectGarbageNewSpace(); |
| 4452 | 4453 |
| 4453 // Allocate the object. | 4454 // Allocate the object. |
| 4454 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); | 4455 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); |
| 4455 array_data->set(0, Smi::FromInt(1)); | 4456 array_data->set(0, Smi::FromInt(1)); |
| 4456 array_data->set(1, Smi::FromInt(2)); | 4457 array_data->set(1, Smi::FromInt(2)); |
| 4457 | 4458 |
| 4458 AllocateAllButNBytes(CcTest::heap()->new_space(), | 4459 AllocateAllButNBytes(CcTest::heap()->new_space(), |
| 4459 JSArray::kSize + AllocationMemento::kSize + | 4460 JSArray::kSize + AllocationMemento::kSize + |
| 4460 kPointerSize); | 4461 kPointerSize); |
| 4461 | 4462 |
| (...skipping 1049 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5511 } | 5512 } |
| 5512 | 5513 |
| 5513 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); | 5514 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); |
| 5514 Handle<WeakCell> weak_cell2; | 5515 Handle<WeakCell> weak_cell2; |
| 5515 { | 5516 { |
| 5516 HandleScope inner_scope(isolate); | 5517 HandleScope inner_scope(isolate); |
| 5517 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor)); | 5518 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor)); |
| 5518 } | 5519 } |
| 5519 CHECK(weak_cell1->value()->IsFixedArray()); | 5520 CHECK(weak_cell1->value()->IsFixedArray()); |
| 5520 CHECK_EQ(*survivor, weak_cell2->value()); | 5521 CHECK_EQ(*survivor, weak_cell2->value()); |
| 5521 heap->CollectGarbage(NEW_SPACE); | 5522 heap->CollectGarbageNewSpace(); |
| 5522 CHECK(weak_cell1->value()->IsFixedArray()); | 5523 CHECK(weak_cell1->value()->IsFixedArray()); |
| 5523 CHECK_EQ(*survivor, weak_cell2->value()); | 5524 CHECK_EQ(*survivor, weak_cell2->value()); |
| 5524 heap->CollectGarbage(NEW_SPACE); | 5525 heap->CollectGarbageNewSpace(); |
| 5525 CHECK(weak_cell1->value()->IsFixedArray()); | 5526 CHECK(weak_cell1->value()->IsFixedArray()); |
| 5526 CHECK_EQ(*survivor, weak_cell2->value()); | 5527 CHECK_EQ(*survivor, weak_cell2->value()); |
| 5527 heap->CollectAllAvailableGarbage(); | 5528 heap->CollectAllAvailableGarbage(); |
| 5528 CHECK(weak_cell1->cleared()); | 5529 CHECK(weak_cell1->cleared()); |
| 5529 CHECK_EQ(*survivor, weak_cell2->value()); | 5530 CHECK_EQ(*survivor, weak_cell2->value()); |
| 5530 } | 5531 } |
| 5531 | 5532 |
| 5532 | 5533 |
| 5533 TEST(WeakCellsWithIncrementalMarking) { | 5534 TEST(WeakCellsWithIncrementalMarking) { |
| 5534 CcTest::InitializeVM(); | 5535 CcTest::InitializeVM(); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 5545 HandleScope inner_scope(isolate); | 5546 HandleScope inner_scope(isolate); |
| 5546 Handle<HeapObject> value = | 5547 Handle<HeapObject> value = |
| 5547 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED); | 5548 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED); |
| 5548 Handle<WeakCell> weak_cell = factory->NewWeakCell(value); | 5549 Handle<WeakCell> weak_cell = factory->NewWeakCell(value); |
| 5549 CHECK(weak_cell->value()->IsFixedArray()); | 5550 CHECK(weak_cell->value()->IsFixedArray()); |
| 5550 IncrementalMarking* marking = heap->incremental_marking(); | 5551 IncrementalMarking* marking = heap->incremental_marking(); |
| 5551 if (marking->IsStopped()) { | 5552 if (marking->IsStopped()) { |
| 5552 heap->StartIncrementalMarking(); | 5553 heap->StartIncrementalMarking(); |
| 5553 } | 5554 } |
| 5554 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD); | 5555 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD); |
| 5555 heap->CollectGarbage(NEW_SPACE); | 5556 heap->CollectGarbageNewSpace(); |
| 5556 CHECK(weak_cell->value()->IsFixedArray()); | 5557 CHECK(weak_cell->value()->IsFixedArray()); |
| 5557 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); | 5558 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); |
| 5558 } | 5559 } |
| 5559 heap->CollectAllGarbage(); | 5560 heap->CollectAllGarbage(); |
| 5560 CHECK_EQ(*survivor, weak_cells[0]->value()); | 5561 CHECK_EQ(*survivor, weak_cells[0]->value()); |
| 5561 for (int i = 1; i < N; i++) { | 5562 for (int i = 1; i < N; i++) { |
| 5562 CHECK(weak_cells[i]->cleared()); | 5563 CHECK(weak_cells[i]->cleared()); |
| 5563 } | 5564 } |
| 5564 } | 5565 } |
| 5565 | 5566 |
| (...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5751 // If we are in a low memory config, we can't grow to two pages and we can't | 5752 // If we are in a low memory config, we can't grow to two pages and we can't |
| 5752 // run this test. This also means the issue we are testing cannot arise, as | 5753 // run this test. This also means the issue we are testing cannot arise, as |
| 5753 // there is no fragmentation. | 5754 // there is no fragmentation. |
| 5754 if (new_space->IsAtMaximumCapacity()) return; | 5755 if (new_space->IsAtMaximumCapacity()) return; |
| 5755 | 5756 |
| 5756 new_space->Grow(); | 5757 new_space->Grow(); |
| 5757 CHECK(new_space->IsAtMaximumCapacity()); | 5758 CHECK(new_space->IsAtMaximumCapacity()); |
| 5758 CHECK(2 * old_capacity == new_space->TotalCapacity()); | 5759 CHECK(2 * old_capacity == new_space->TotalCapacity()); |
| 5759 | 5760 |
| 5760 // Call the scavenger two times to get an empty new space | 5761 // Call the scavenger two times to get an empty new space |
| 5761 heap->CollectGarbage(NEW_SPACE); | 5762 heap->CollectGarbageNewSpace(); |
| 5762 heap->CollectGarbage(NEW_SPACE); | 5763 heap->CollectGarbageNewSpace(); |
| 5763 | 5764 |
| 5764 // First create a few objects which will survive a scavenge, and will get | 5765 // First create a few objects which will survive a scavenge, and will get |
| 5765 // promoted to the old generation later on. These objects will create | 5766 // promoted to the old generation later on. These objects will create |
| 5766 // promotion queue entries at the end of the second semi-space page. | 5767 // promotion queue entries at the end of the second semi-space page. |
| 5767 const int number_handles = 12; | 5768 const int number_handles = 12; |
| 5768 Handle<FixedArray> handles[number_handles]; | 5769 Handle<FixedArray> handles[number_handles]; |
| 5769 for (int i = 0; i < number_handles; i++) { | 5770 for (int i = 0; i < number_handles; i++) { |
| 5770 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); | 5771 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); |
| 5771 } | 5772 } |
| 5772 heap->CollectGarbage(NEW_SPACE); | 5773 heap->CollectGarbageNewSpace(); |
| 5773 | 5774 |
| 5774 // Create the first huge object which will exactly fit the first semi-space | 5775 // Create the first huge object which will exactly fit the first semi-space |
| 5775 // page. | 5776 // page. |
| 5776 int new_linear_size = | 5777 int new_linear_size = |
| 5777 static_cast<int>(*heap->new_space()->allocation_limit_address() - | 5778 static_cast<int>(*heap->new_space()->allocation_limit_address() - |
| 5778 *heap->new_space()->allocation_top_address()); | 5779 *heap->new_space()->allocation_top_address()); |
| 5779 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize; | 5780 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize; |
| 5780 Handle<FixedArray> first = | 5781 Handle<FixedArray> first = |
| 5781 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); | 5782 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); |
| 5782 CHECK(heap->InNewSpace(*first)); | 5783 CHECK(heap->InNewSpace(*first)); |
| 5783 | 5784 |
| 5784 // Create the second huge object of maximum allocatable second semi-space | 5785 // Create the second huge object of maximum allocatable second semi-space |
| 5785 // page size. | 5786 // page size. |
| 5786 new_linear_size = | 5787 new_linear_size = |
| 5787 static_cast<int>(*heap->new_space()->allocation_limit_address() - | 5788 static_cast<int>(*heap->new_space()->allocation_limit_address() - |
| 5788 *heap->new_space()->allocation_top_address()); | 5789 *heap->new_space()->allocation_top_address()); |
| 5789 length = Page::kMaxRegularHeapObjectSize / kPointerSize - | 5790 length = Page::kMaxRegularHeapObjectSize / kPointerSize - |
| 5790 FixedArray::kHeaderSize; | 5791 FixedArray::kHeaderSize; |
| 5791 Handle<FixedArray> second = | 5792 Handle<FixedArray> second = |
| 5792 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); | 5793 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); |
| 5793 CHECK(heap->InNewSpace(*second)); | 5794 CHECK(heap->InNewSpace(*second)); |
| 5794 | 5795 |
| 5795 // This scavenge will corrupt memory if the promotion queue is not | 5796 // This scavenge will corrupt memory if the promotion queue is not |
| 5796 // evacuated. | 5797 // evacuated. |
| 5797 heap->CollectGarbage(NEW_SPACE); | 5798 heap->CollectGarbageNewSpace(); |
| 5798 } | 5799 } |
| 5799 isolate->Dispose(); | 5800 isolate->Dispose(); |
| 5800 } | 5801 } |
| 5801 | 5802 |
| 5802 | 5803 |
| 5803 TEST(Regress388880) { | 5804 TEST(Regress388880) { |
| 5804 i::FLAG_expose_gc = true; | 5805 i::FLAG_expose_gc = true; |
| 5805 CcTest::InitializeVM(); | 5806 CcTest::InitializeVM(); |
| 5806 v8::HandleScope scope(CcTest::isolate()); | 5807 v8::HandleScope scope(CcTest::isolate()); |
| 5807 Isolate* isolate = CcTest::i_isolate(); | 5808 Isolate* isolate = CcTest::i_isolate(); |
| (...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6212 CHECK_EQ(bytes, static_cast<size_t>(array->Size())); | 6213 CHECK_EQ(bytes, static_cast<size_t>(array->Size())); |
| 6213 } | 6214 } |
| 6214 | 6215 |
| 6215 | 6216 |
| 6216 TEST(NewSpaceAllocationCounter) { | 6217 TEST(NewSpaceAllocationCounter) { |
| 6217 CcTest::InitializeVM(); | 6218 CcTest::InitializeVM(); |
| 6218 v8::HandleScope scope(CcTest::isolate()); | 6219 v8::HandleScope scope(CcTest::isolate()); |
| 6219 Isolate* isolate = CcTest::i_isolate(); | 6220 Isolate* isolate = CcTest::i_isolate(); |
| 6220 Heap* heap = isolate->heap(); | 6221 Heap* heap = isolate->heap(); |
| 6221 size_t counter1 = heap->NewSpaceAllocationCounter(); | 6222 size_t counter1 = heap->NewSpaceAllocationCounter(); |
| 6222 heap->CollectGarbage(NEW_SPACE); | 6223 heap->CollectGarbageNewSpace(); |
| 6223 const size_t kSize = 1024; | 6224 const size_t kSize = 1024; |
| 6224 AllocateInSpace(isolate, kSize, NEW_SPACE); | 6225 AllocateInSpace(isolate, kSize, NEW_SPACE); |
| 6225 size_t counter2 = heap->NewSpaceAllocationCounter(); | 6226 size_t counter2 = heap->NewSpaceAllocationCounter(); |
| 6226 CHECK_EQ(kSize, counter2 - counter1); | 6227 CHECK_EQ(kSize, counter2 - counter1); |
| 6227 heap->CollectGarbage(NEW_SPACE); | 6228 heap->CollectGarbageNewSpace(); |
| 6228 size_t counter3 = heap->NewSpaceAllocationCounter(); | 6229 size_t counter3 = heap->NewSpaceAllocationCounter(); |
| 6229 CHECK_EQ(0U, counter3 - counter2); | 6230 CHECK_EQ(0U, counter3 - counter2); |
| 6230 // Test counter overflow. | 6231 // Test counter overflow. |
| 6231 size_t max_counter = -1; | 6232 size_t max_counter = -1; |
| 6232 heap->set_new_space_allocation_counter(max_counter - 10 * kSize); | 6233 heap->set_new_space_allocation_counter(max_counter - 10 * kSize); |
| 6233 size_t start = heap->NewSpaceAllocationCounter(); | 6234 size_t start = heap->NewSpaceAllocationCounter(); |
| 6234 for (int i = 0; i < 20; i++) { | 6235 for (int i = 0; i < 20; i++) { |
| 6235 AllocateInSpace(isolate, kSize, NEW_SPACE); | 6236 AllocateInSpace(isolate, kSize, NEW_SPACE); |
| 6236 size_t counter = heap->NewSpaceAllocationCounter(); | 6237 size_t counter = heap->NewSpaceAllocationCounter(); |
| 6237 CHECK_EQ(kSize, counter - start); | 6238 CHECK_EQ(kSize, counter - start); |
| 6238 start = counter; | 6239 start = counter; |
| 6239 } | 6240 } |
| 6240 } | 6241 } |
| 6241 | 6242 |
| 6242 | 6243 |
| 6243 TEST(OldSpaceAllocationCounter) { | 6244 TEST(OldSpaceAllocationCounter) { |
| 6244 CcTest::InitializeVM(); | 6245 CcTest::InitializeVM(); |
| 6245 v8::HandleScope scope(CcTest::isolate()); | 6246 v8::HandleScope scope(CcTest::isolate()); |
| 6246 Isolate* isolate = CcTest::i_isolate(); | 6247 Isolate* isolate = CcTest::i_isolate(); |
| 6247 Heap* heap = isolate->heap(); | 6248 Heap* heap = isolate->heap(); |
| 6248 size_t counter1 = heap->OldGenerationAllocationCounter(); | 6249 size_t counter1 = heap->OldGenerationAllocationCounter(); |
| 6249 heap->CollectGarbage(NEW_SPACE); | 6250 heap->CollectGarbageNewSpace(); |
| 6250 heap->CollectGarbage(NEW_SPACE); | 6251 heap->CollectGarbageNewSpace(); |
| 6251 const size_t kSize = 1024; | 6252 const size_t kSize = 1024; |
| 6252 AllocateInSpace(isolate, kSize, OLD_SPACE); | 6253 AllocateInSpace(isolate, kSize, OLD_SPACE); |
| 6253 size_t counter2 = heap->OldGenerationAllocationCounter(); | 6254 size_t counter2 = heap->OldGenerationAllocationCounter(); |
| 6254 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. | 6255 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. |
| 6255 CHECK_LE(kSize, counter2 - counter1); | 6256 CHECK_LE(kSize, counter2 - counter1); |
| 6256 heap->CollectGarbage(NEW_SPACE); | 6257 heap->CollectGarbageNewSpace(); |
| 6257 size_t counter3 = heap->OldGenerationAllocationCounter(); | 6258 size_t counter3 = heap->OldGenerationAllocationCounter(); |
| 6258 CHECK_EQ(0u, counter3 - counter2); | 6259 CHECK_EQ(0u, counter3 - counter2); |
| 6259 AllocateInSpace(isolate, kSize, OLD_SPACE); | 6260 AllocateInSpace(isolate, kSize, OLD_SPACE); |
| 6260 heap->CollectGarbage(OLD_SPACE); | 6261 heap->CollectGarbage(OLD_SPACE); |
| 6261 size_t counter4 = heap->OldGenerationAllocationCounter(); | 6262 size_t counter4 = heap->OldGenerationAllocationCounter(); |
| 6262 CHECK_LE(kSize, counter4 - counter3); | 6263 CHECK_LE(kSize, counter4 - counter3); |
| 6263 // Test counter overflow. | 6264 // Test counter overflow. |
| 6264 size_t max_counter = -1; | 6265 size_t max_counter = -1; |
| 6265 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize); | 6266 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize); |
| 6266 size_t start = heap->OldGenerationAllocationCounter(); | 6267 size_t start = heap->OldGenerationAllocationCounter(); |
| (...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6658 { | 6659 { |
| 6659 SharedFunctionInfo::Iterator iterator(isolate); | 6660 SharedFunctionInfo::Iterator iterator(isolate); |
| 6660 while (iterator.Next()) sfi_count--; | 6661 while (iterator.Next()) sfi_count--; |
| 6661 } | 6662 } |
| 6662 | 6663 |
| 6663 CHECK_EQ(0, sfi_count); | 6664 CHECK_EQ(0, sfi_count); |
| 6664 } | 6665 } |
| 6665 | 6666 |
| 6666 } // namespace internal | 6667 } // namespace internal |
| 6667 } // namespace v8 | 6668 } // namespace v8 |
| OLD | NEW |