Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Side by Side Diff: test/cctest/heap/test-heap.cc

Issue 1999753002: [heap] Harden heap-related cctests (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix for win Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/cctest/heap/test-compaction.cc ('k') | test/cctest/heap/test-incremental-marking.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 26 matching lines...) Expand all
37 #include "src/field-type.h" 37 #include "src/field-type.h"
38 #include "src/global-handles.h" 38 #include "src/global-handles.h"
39 #include "src/heap/gc-tracer.h" 39 #include "src/heap/gc-tracer.h"
40 #include "src/heap/memory-reducer.h" 40 #include "src/heap/memory-reducer.h"
41 #include "src/ic/ic.h" 41 #include "src/ic/ic.h"
42 #include "src/macro-assembler.h" 42 #include "src/macro-assembler.h"
43 #include "src/regexp/jsregexp.h" 43 #include "src/regexp/jsregexp.h"
44 #include "src/snapshot/snapshot.h" 44 #include "src/snapshot/snapshot.h"
45 #include "test/cctest/cctest.h" 45 #include "test/cctest/cctest.h"
46 #include "test/cctest/heap/heap-tester.h" 46 #include "test/cctest/heap/heap-tester.h"
47 #include "test/cctest/heap/utils-inl.h" 47 #include "test/cctest/heap/heap-utils.h"
48 #include "test/cctest/test-feedback-vector.h" 48 #include "test/cctest/test-feedback-vector.h"
49 49
50 50
51 namespace v8 { 51 namespace v8 {
52 namespace internal { 52 namespace internal {
53 53
54 static void CheckMap(Map* map, int type, int instance_size) { 54 static void CheckMap(Map* map, int type, int instance_size) {
55 CHECK(map->IsHeapObject()); 55 CHECK(map->IsHeapObject());
56 #ifdef DEBUG 56 #ifdef DEBUG
57 CHECK(CcTest::heap()->Contains(map)); 57 CHECK(CcTest::heap()->Contains(map));
(...skipping 660 matching lines...) Expand 10 before | Expand all | Expand 10 after
718 static const int kFrameSize = 32; 718 static const int kFrameSize = 32;
719 static const int kParameterCount = 2; 719 static const int kParameterCount = 2;
720 720
721 i::FLAG_manual_evacuation_candidates_selection = true; 721 i::FLAG_manual_evacuation_candidates_selection = true;
722 CcTest::InitializeVM(); 722 CcTest::InitializeVM();
723 Isolate* isolate = CcTest::i_isolate(); 723 Isolate* isolate = CcTest::i_isolate();
724 Heap* heap = isolate->heap(); 724 Heap* heap = isolate->heap();
725 Factory* factory = isolate->factory(); 725 Factory* factory = isolate->factory();
726 HandleScope scope(isolate); 726 HandleScope scope(isolate);
727 727
728 SimulateFullSpace(heap->old_space()); 728 heap::SimulateFullSpace(heap->old_space());
729 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED); 729 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
730 for (int i = 0; i < 5; i++) { 730 for (int i = 0; i < 5; i++) {
731 Handle<Object> number = factory->NewHeapNumber(i); 731 Handle<Object> number = factory->NewHeapNumber(i);
732 constant_pool->set(i, *number); 732 constant_pool->set(i, *number);
733 } 733 }
734 734
735 // Allocate and initialize BytecodeArray 735 // Allocate and initialize BytecodeArray
736 Handle<BytecodeArray> array = factory->NewBytecodeArray( 736 Handle<BytecodeArray> array = factory->NewBytecodeArray(
737 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool); 737 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
738 738
(...skipping 604 matching lines...) Expand 10 before | Expand all | Expand 10 after
1343 CHECK(function->shared()->is_compiled()); 1343 CHECK(function->shared()->is_compiled());
1344 1344
1345 // The code will survive at least two GCs. 1345 // The code will survive at least two GCs.
1346 CcTest::heap()->CollectAllGarbage(); 1346 CcTest::heap()->CollectAllGarbage();
1347 CcTest::heap()->CollectAllGarbage(); 1347 CcTest::heap()->CollectAllGarbage();
1348 CHECK(function->shared()->is_compiled()); 1348 CHECK(function->shared()->is_compiled());
1349 1349
1350 // Simulate several GCs that use incremental marking. 1350 // Simulate several GCs that use incremental marking.
1351 const int kAgingThreshold = 6; 1351 const int kAgingThreshold = 6;
1352 for (int i = 0; i < kAgingThreshold; i++) { 1352 for (int i = 0; i < kAgingThreshold; i++) {
1353 SimulateIncrementalMarking(CcTest::heap()); 1353 heap::SimulateIncrementalMarking(CcTest::heap());
1354 CcTest::heap()->CollectAllGarbage(); 1354 CcTest::heap()->CollectAllGarbage();
1355 } 1355 }
1356 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1356 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1357 CHECK(!function->is_compiled() || function->IsOptimized()); 1357 CHECK(!function->is_compiled() || function->IsOptimized());
1358 1358
1359 // This compile will compile the function again. 1359 // This compile will compile the function again.
1360 { v8::HandleScope scope(CcTest::isolate()); 1360 { v8::HandleScope scope(CcTest::isolate());
1361 CompileRun("foo();"); 1361 CompileRun("foo();");
1362 } 1362 }
1363 1363
1364 // Simulate several GCs that use incremental marking but make sure 1364 // Simulate several GCs that use incremental marking but make sure
1365 // the loop breaks once the function is enqueued as a candidate. 1365 // the loop breaks once the function is enqueued as a candidate.
1366 for (int i = 0; i < kAgingThreshold; i++) { 1366 for (int i = 0; i < kAgingThreshold; i++) {
1367 SimulateIncrementalMarking(CcTest::heap()); 1367 heap::SimulateIncrementalMarking(CcTest::heap());
1368 if (!function->next_function_link()->IsUndefined()) break; 1368 if (!function->next_function_link()->IsUndefined()) break;
1369 CcTest::heap()->CollectAllGarbage(); 1369 CcTest::heap()->CollectAllGarbage();
1370 } 1370 }
1371 1371
1372 // Force optimization while incremental marking is active and while 1372 // Force optimization while incremental marking is active and while
1373 // the function is enqueued as a candidate. 1373 // the function is enqueued as a candidate.
1374 { v8::HandleScope scope(CcTest::isolate()); 1374 { v8::HandleScope scope(CcTest::isolate());
1375 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); 1375 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1376 } 1376 }
1377 1377
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1433 // object is still located in new-space. 1433 // object is still located in new-space.
1434 const int kAgingThreshold = 6; 1434 const int kAgingThreshold = 6;
1435 for (int i = 0; i < kAgingThreshold; i++) { 1435 for (int i = 0; i < kAgingThreshold; i++) {
1436 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1436 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1437 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1437 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1438 } 1438 }
1439 1439
1440 // Simulate incremental marking so that the functions are enqueued as 1440 // Simulate incremental marking so that the functions are enqueued as
1441 // code flushing candidates. Then kill one of the functions. Finally 1441 // code flushing candidates. Then kill one of the functions. Finally
1442 // perform a scavenge while incremental marking is still running. 1442 // perform a scavenge while incremental marking is still running.
1443 SimulateIncrementalMarking(CcTest::heap()); 1443 heap::SimulateIncrementalMarking(CcTest::heap());
1444 *function2.location() = NULL; 1444 *function2.location() = NULL;
1445 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); 1445 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1446 1446
1447 // Simulate one final GC to make sure the candidate queue is sane. 1447 // Simulate one final GC to make sure the candidate queue is sane.
1448 CcTest::heap()->CollectAllGarbage(); 1448 CcTest::heap()->CollectAllGarbage();
1449 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1449 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1450 CHECK(!function->is_compiled() || function->IsOptimized()); 1450 CHECK(!function->is_compiled() || function->IsOptimized());
1451 } 1451 }
1452 1452
1453 1453
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1487 CHECK(function->shared()->is_compiled()); 1487 CHECK(function->shared()->is_compiled());
1488 1488
1489 // Bump the code age so that flushing is triggered. 1489 // Bump the code age so that flushing is triggered.
1490 const int kAgingThreshold = 6; 1490 const int kAgingThreshold = 6;
1491 for (int i = 0; i < kAgingThreshold; i++) { 1491 for (int i = 0; i < kAgingThreshold; i++) {
1492 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1492 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1493 } 1493 }
1494 1494
1495 // Simulate incremental marking so that the function is enqueued as 1495 // Simulate incremental marking so that the function is enqueued as
1496 // code flushing candidate. 1496 // code flushing candidate.
1497 SimulateIncrementalMarking(heap); 1497 heap::SimulateIncrementalMarking(heap);
1498 1498
1499 // Enable the debugger and add a breakpoint while incremental marking 1499 // Enable the debugger and add a breakpoint while incremental marking
1500 // is running so that incremental marking aborts and code flushing is 1500 // is running so that incremental marking aborts and code flushing is
1501 // disabled. 1501 // disabled.
1502 int position = 0; 1502 int position = 0;
1503 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate); 1503 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1504 EnableDebugger(CcTest::isolate()); 1504 EnableDebugger(CcTest::isolate());
1505 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position); 1505 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1506 isolate->debug()->ClearAllBreakPoints(); 1506 isolate->debug()->ClearAllBreakPoints();
1507 DisableDebugger(CcTest::isolate()); 1507 DisableDebugger(CcTest::isolate());
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1542 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value); 1542 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1543 CHECK(f_function->is_compiled()); 1543 CHECK(f_function->is_compiled());
1544 1544
1545 // Check g is not compiled. 1545 // Check g is not compiled.
1546 Handle<String> g_name = factory->InternalizeUtf8String("g"); 1546 Handle<String> g_name = factory->InternalizeUtf8String("g");
1547 Handle<Object> g_value = 1547 Handle<Object> g_value =
1548 Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked(); 1548 Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
1549 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value); 1549 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1550 CHECK(!g_function->is_compiled()); 1550 CHECK(!g_function->is_compiled());
1551 1551
1552 SimulateIncrementalMarking(heap); 1552 heap::SimulateIncrementalMarking(heap);
1553 CompileRun("%OptimizeFunctionOnNextCall(f); f();"); 1553 CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1554 1554
1555 // g should now have available an optimized function, unmarked by gc. The 1555 // g should now have available an optimized function, unmarked by gc. The
1556 // CompileLazy built-in will discover it and install it in the closure, and 1556 // CompileLazy built-in will discover it and install it in the closure, and
1557 // the incremental write barrier should be used. 1557 // the incremental write barrier should be used.
1558 CompileRun("g();"); 1558 CompileRun("g();");
1559 CHECK(g_function->is_compiled()); 1559 CHECK(g_function->is_compiled());
1560 } 1560 }
1561 1561
1562 TEST(CompilationCacheCachingBehavior) { 1562 TEST(CompilationCacheCachingBehavior) {
(...skipping 1088 matching lines...) Expand 10 before | Expand all | Expand 10 after
2651 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2651 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2652 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2652 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2653 CHECK(f->IsOptimized()); 2653 CHECK(f->IsOptimized());
2654 2654
2655 // Make sure incremental marking it not running. 2655 // Make sure incremental marking it not running.
2656 CcTest::heap()->incremental_marking()->Stop(); 2656 CcTest::heap()->incremental_marking()->Stop();
2657 2657
2658 CcTest::heap()->StartIncrementalMarking(); 2658 CcTest::heap()->StartIncrementalMarking();
2659 // The following calls will increment CcTest::heap()->global_ic_age(). 2659 // The following calls will increment CcTest::heap()->global_ic_age().
2660 CcTest::isolate()->ContextDisposedNotification(); 2660 CcTest::isolate()->ContextDisposedNotification();
2661 SimulateIncrementalMarking(CcTest::heap()); 2661 heap::SimulateIncrementalMarking(CcTest::heap());
2662 CcTest::heap()->CollectAllGarbage(); 2662 CcTest::heap()->CollectAllGarbage();
2663 2663
2664 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); 2664 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2665 CHECK_EQ(0, f->shared()->opt_count()); 2665 CHECK_EQ(0, f->shared()->opt_count());
2666 CHECK_EQ(0, GetProfilerTicks(f->shared())); 2666 CHECK_EQ(0, GetProfilerTicks(f->shared()));
2667 } 2667 }
2668 2668
2669 2669
2670 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) { 2670 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2671 i::FLAG_stress_compaction = false; 2671 i::FLAG_stress_compaction = false;
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
2736 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2736 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2737 2737
2738 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 2738 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2739 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2739 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2740 } 2740 }
2741 2741
2742 2742
2743 TEST(IdleNotificationFinishMarking) { 2743 TEST(IdleNotificationFinishMarking) {
2744 i::FLAG_allow_natives_syntax = true; 2744 i::FLAG_allow_natives_syntax = true;
2745 CcTest::InitializeVM(); 2745 CcTest::InitializeVM();
2746 SimulateFullSpace(CcTest::heap()->old_space()); 2746 const int initial_gc_count = CcTest::heap()->gc_count();
2747 heap::SimulateFullSpace(CcTest::heap()->old_space());
2747 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2748 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2748 marking->Stop(); 2749 marking->Stop();
2749 CcTest::heap()->StartIncrementalMarking(); 2750 CcTest::heap()->StartIncrementalMarking();
2750 2751
2751 CHECK_EQ(CcTest::heap()->gc_count(), 0); 2752 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2752 2753
2753 // TODO(hpayer): We cannot write proper unit test right now for heap. 2754 // TODO(hpayer): We cannot write proper unit test right now for heap.
2754 // The ideal test would call kMaxIdleMarkingDelayCounter to test the 2755 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2755 // marking delay counter. 2756 // marking delay counter.
2756 2757
2757 // Perform a huge incremental marking step but don't complete marking. 2758 // Perform a huge incremental marking step but don't complete marking.
2758 intptr_t bytes_processed = 0; 2759 intptr_t bytes_processed = 0;
2759 do { 2760 do {
2760 bytes_processed = 2761 bytes_processed =
2761 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 2762 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
(...skipping 14 matching lines...) Expand all
2776 } 2777 }
2777 2778
2778 marking->SetWeakClosureWasOverApproximatedForTesting(true); 2779 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2779 2780
2780 // The next idle notification has to finish incremental marking. 2781 // The next idle notification has to finish incremental marking.
2781 const double kLongIdleTime = 1000.0; 2782 const double kLongIdleTime = 1000.0;
2782 CcTest::isolate()->IdleNotificationDeadline( 2783 CcTest::isolate()->IdleNotificationDeadline(
2783 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() / 2784 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2784 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) + 2785 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2785 kLongIdleTime); 2786 kLongIdleTime);
2786 CHECK_EQ(CcTest::heap()->gc_count(), 1); 2787 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2787 } 2788 }
2788 2789
2789 2790
2790 // Test that HAllocateObject will always return an object in new-space. 2791 // Test that HAllocateObject will always return an object in new-space.
2791 TEST(OptimizedAllocationAlwaysInNewSpace) { 2792 TEST(OptimizedAllocationAlwaysInNewSpace) {
2792 i::FLAG_allow_natives_syntax = true; 2793 i::FLAG_allow_natives_syntax = true;
2793 CcTest::InitializeVM(); 2794 CcTest::InitializeVM();
2794 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return; 2795 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2795 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return; 2796 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2796 v8::HandleScope scope(CcTest::isolate()); 2797 v8::HandleScope scope(CcTest::isolate());
2797 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 2798 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2798 SimulateFullSpace(CcTest::heap()->new_space()); 2799 heap::SimulateFullSpace(CcTest::heap()->new_space());
2799 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); 2800 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2800 v8::Local<v8::Value> res = CompileRun( 2801 v8::Local<v8::Value> res = CompileRun(
2801 "function c(x) {" 2802 "function c(x) {"
2802 " this.x = x;" 2803 " this.x = x;"
2803 " for (var i = 0; i < 32; i++) {" 2804 " for (var i = 0; i < 32; i++) {"
2804 " this['x' + i] = x;" 2805 " this['x' + i] = x;"
2805 " }" 2806 " }"
2806 "}" 2807 "}"
2807 "function f(x) { return new c(x); };" 2808 "function f(x) { return new c(x); };"
2808 "f(1); f(2); f(3);" 2809 "f(1); f(2); f(3);"
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
3263 3264
3264 i::Handle<JSReceiver> root = 3265 i::Handle<JSReceiver> root =
3265 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3266 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3266 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked())); 3267 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3267 3268
3268 // Count number of live transitions before marking. 3269 // Count number of live transitions before marking.
3269 int transitions_before = CountMapTransitions(root->map()); 3270 int transitions_before = CountMapTransitions(root->map());
3270 CompileRun("%DebugPrint(root);"); 3271 CompileRun("%DebugPrint(root);");
3271 CHECK_EQ(transitions_count, transitions_before); 3272 CHECK_EQ(transitions_count, transitions_before);
3272 3273
3273 SimulateIncrementalMarking(CcTest::heap()); 3274 heap::SimulateIncrementalMarking(CcTest::heap());
3274 CcTest::heap()->CollectAllGarbage(); 3275 CcTest::heap()->CollectAllGarbage();
3275 3276
3276 // Count number of live transitions after marking. Note that one transition 3277 // Count number of live transitions after marking. Note that one transition
3277 // is left, because 'o' still holds an instance of one transition target. 3278 // is left, because 'o' still holds an instance of one transition target.
3278 int transitions_after = CountMapTransitions(root->map()); 3279 int transitions_after = CountMapTransitions(root->map());
3279 CompileRun("%DebugPrint(root);"); 3280 CompileRun("%DebugPrint(root);");
3280 CHECK_EQ(1, transitions_after); 3281 CHECK_EQ(1, transitions_after);
3281 } 3282 }
3282 3283
3283 3284
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
3433 i::FLAG_incremental_marking = true; 3434 i::FLAG_incremental_marking = true;
3434 CcTest::InitializeVM(); 3435 CcTest::InitializeVM();
3435 v8::HandleScope scope(CcTest::isolate()); 3436 v8::HandleScope scope(CcTest::isolate());
3436 3437
3437 // Prepare a map transition from the root object together with a yet 3438 // Prepare a map transition from the root object together with a yet
3438 // untransitioned root object. 3439 // untransitioned root object.
3439 CompileRun("var root = new Object;" 3440 CompileRun("var root = new Object;"
3440 "root.foo = 0;" 3441 "root.foo = 0;"
3441 "root = new Object;"); 3442 "root = new Object;");
3442 3443
3443 SimulateIncrementalMarking(CcTest::heap()); 3444 heap::SimulateIncrementalMarking(CcTest::heap());
3444 3445
3445 // Compile a StoreIC that performs the prepared map transition. This 3446 // Compile a StoreIC that performs the prepared map transition. This
3446 // will restart incremental marking and should make sure the root is 3447 // will restart incremental marking and should make sure the root is
3447 // marked grey again. 3448 // marked grey again.
3448 CompileRun("function f(o) {" 3449 CompileRun("function f(o) {"
3449 " o.foo = 0;" 3450 " o.foo = 0;"
3450 "}" 3451 "}"
3451 "f(new Object);" 3452 "f(new Object);"
3452 "f(root);"); 3453 "f(root);");
3453 3454
(...skipping 19 matching lines...) Expand all
3473 i::FLAG_allow_natives_syntax = true; 3474 i::FLAG_allow_natives_syntax = true;
3474 CcTest::InitializeVM(); 3475 CcTest::InitializeVM();
3475 v8::HandleScope scope(CcTest::isolate()); 3476 v8::HandleScope scope(CcTest::isolate());
3476 3477
3477 // Prepare a map transition from the root object together with a yet 3478 // Prepare a map transition from the root object together with a yet
3478 // untransitioned root object. 3479 // untransitioned root object.
3479 CompileRun("var root = new Object;" 3480 CompileRun("var root = new Object;"
3480 "root.foo = 0;" 3481 "root.foo = 0;"
3481 "root = new Object;"); 3482 "root = new Object;");
3482 3483
3483 SimulateIncrementalMarking(CcTest::heap()); 3484 heap::SimulateIncrementalMarking(CcTest::heap());
3484 3485
3485 // Compile an optimized LStoreNamedField that performs the prepared 3486 // Compile an optimized LStoreNamedField that performs the prepared
3486 // map transition. This will restart incremental marking and should 3487 // map transition. This will restart incremental marking and should
3487 // make sure the root is marked grey again. 3488 // make sure the root is marked grey again.
3488 CompileRun("function f(o) {" 3489 CompileRun("function f(o) {"
3489 " o.foo = 0;" 3490 " o.foo = 0;"
3490 "}" 3491 "}"
3491 "f(new Object);" 3492 "f(new Object);"
3492 "f(new Object);" 3493 "f(new Object);"
3493 "%OptimizeFunctionOnNextCall(f);" 3494 "%OptimizeFunctionOnNextCall(f);"
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
3528 i::FLAG_page_promotion = false; 3529 i::FLAG_page_promotion = false;
3529 CcTest::InitializeVM(); 3530 CcTest::InitializeVM();
3530 Isolate* isolate = CcTest::i_isolate(); 3531 Isolate* isolate = CcTest::i_isolate();
3531 Factory* factory = isolate->factory(); 3532 Factory* factory = isolate->factory();
3532 Heap* heap = isolate->heap(); 3533 Heap* heap = isolate->heap();
3533 v8::HandleScope scope(CcTest::isolate()); 3534 v8::HandleScope scope(CcTest::isolate());
3534 static const int number_of_test_pages = 20; 3535 static const int number_of_test_pages = 20;
3535 3536
3536 // Prepare many pages with low live-bytes count. 3537 // Prepare many pages with low live-bytes count.
3537 PagedSpace* old_space = heap->old_space(); 3538 PagedSpace* old_space = heap->old_space();
3538 CHECK_EQ(1, old_space->CountTotalPages()); 3539 const int initial_page_count = old_space->CountTotalPages();
3540 const int overall_page_count = number_of_test_pages + initial_page_count;
3539 for (int i = 0; i < number_of_test_pages; i++) { 3541 for (int i = 0; i < number_of_test_pages; i++) {
3540 AlwaysAllocateScope always_allocate(isolate); 3542 AlwaysAllocateScope always_allocate(isolate);
3541 SimulateFullSpace(old_space); 3543 heap::SimulateFullSpace(old_space);
3542 factory->NewFixedArray(1, TENURED); 3544 factory->NewFixedArray(1, TENURED);
3543 } 3545 }
3544 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages()); 3546 CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3545 3547
3546 // Triggering one GC will cause a lot of garbage to be discovered but 3548 // Triggering one GC will cause a lot of garbage to be discovered but
3547 // even spread across all allocated pages. 3549 // even spread across all allocated pages.
3548 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3550 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3549 "triggered for preparation"); 3551 "triggered for preparation");
3550 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); 3552 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3551 3553
3552 // Triggering subsequent GCs should cause at least half of the pages 3554 // Triggering subsequent GCs should cause at least half of the pages
3553 // to be released to the OS after at most two cycles. 3555 // to be released to the OS after at most two cycles.
3554 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3556 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3555 "triggered by test 1"); 3557 "triggered by test 1");
3556 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); 3558 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3557 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3559 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3558 "triggered by test 2"); 3560 "triggered by test 2");
3559 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2); 3561 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3560 3562
3561 // Triggering a last-resort GC should cause all pages to be released to the 3563 // Triggering a last-resort GC should cause all pages to be released to the
3562 // OS so that other processes can seize the memory. If we get a failure here 3564 // OS so that other processes can seize the memory. If we get a failure here
3563 // where there are 2 pages left instead of 1, then we should increase the 3565 // where there are 2 pages left instead of 1, then we should increase the
3564 // size of the first page a little in SizeOfFirstPage in spaces.cc. The 3566 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3565 // first page should be small in order to reduce memory used when the VM 3567 // first page should be small in order to reduce memory used when the VM
3566 // boots, but if the 20 small arrays don't fit on the first page then that's 3568 // boots, but if the 20 small arrays don't fit on the first page then that's
3567 // an indication that it is too small. 3569 // an indication that it is too small.
3568 heap->CollectAllAvailableGarbage("triggered really hard"); 3570 heap->CollectAllAvailableGarbage("triggered really hard");
3569 CHECK_EQ(1, old_space->CountTotalPages()); 3571 CHECK_EQ(initial_page_count, old_space->CountTotalPages());
3570 } 3572 }
3571 3573
3572 static int forced_gc_counter = 0; 3574 static int forced_gc_counter = 0;
3573 3575
3574 void MockUseCounterCallback(v8::Isolate* isolate, 3576 void MockUseCounterCallback(v8::Isolate* isolate,
3575 v8::Isolate::UseCounterFeature feature) { 3577 v8::Isolate::UseCounterFeature feature) {
3576 isolate->GetCurrentContext(); 3578 isolate->GetCurrentContext();
3577 if (feature == v8::Isolate::kForcedGC) { 3579 if (feature == v8::Isolate::kForcedGC) {
3578 forced_gc_counter++; 3580 forced_gc_counter++;
3579 } 3581 }
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
3642 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector()); 3644 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3643 FeedbackVectorHelper feedback_helper(feedback_vector); 3645 FeedbackVectorHelper feedback_helper(feedback_vector);
3644 3646
3645 int expected_slots = 2; 3647 int expected_slots = 2;
3646 CHECK_EQ(expected_slots, feedback_helper.slot_count()); 3648 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3647 int slot1 = 0; 3649 int slot1 = 0;
3648 int slot2 = 1; 3650 int slot2 = 1;
3649 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell()); 3651 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3650 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell()); 3652 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3651 3653
3652 SimulateIncrementalMarking(CcTest::heap()); 3654 heap::SimulateIncrementalMarking(CcTest::heap());
3653 CcTest::heap()->CollectAllGarbage(); 3655 CcTest::heap()->CollectAllGarbage();
3654 3656
3655 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1))) 3657 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3656 ->cleared()); 3658 ->cleared());
3657 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2))) 3659 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3658 ->cleared()); 3660 ->cleared());
3659 } 3661 }
3660 3662
3661 3663
3662 static Code* FindFirstIC(Code* code, Code::Kind kind) { 3664 static Code* FindFirstIC(Code* code, Code::Kind kind) {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3709 CompileRun( 3711 CompileRun(
3710 "function fun() { this.x = 1; };" 3712 "function fun() { this.x = 1; };"
3711 "function f(o) { return new o(); } f(fun); f(fun);"); 3713 "function f(o) { return new o(); } f(fun); f(fun);");
3712 Handle<JSFunction> f = Handle<JSFunction>::cast( 3714 Handle<JSFunction> f = Handle<JSFunction>::cast(
3713 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3715 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3714 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3716 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3715 3717
3716 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector()); 3718 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3717 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3719 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3718 3720
3719 SimulateIncrementalMarking(CcTest::heap()); 3721 heap::SimulateIncrementalMarking(CcTest::heap());
3720 CcTest::heap()->CollectAllGarbage(); 3722 CcTest::heap()->CollectAllGarbage();
3721 3723
3722 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3724 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3723 } 3725 }
3724 3726
3725 3727
3726 TEST(IncrementalMarkingClearsMonomorphicConstructor) { 3728 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3727 if (i::FLAG_always_opt) return; 3729 if (i::FLAG_always_opt) return;
3728 CcTest::InitializeVM(); 3730 CcTest::InitializeVM();
3729 Isolate* isolate = CcTest::i_isolate(); 3731 Isolate* isolate = CcTest::i_isolate();
(...skipping 16 matching lines...) Expand all
3746 Handle<JSFunction> f = Handle<JSFunction>::cast( 3748 Handle<JSFunction> f = Handle<JSFunction>::cast(
3747 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3749 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3748 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3750 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3749 3751
3750 3752
3751 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector()); 3753 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3752 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3754 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3753 3755
3754 // Fire context dispose notification. 3756 // Fire context dispose notification.
3755 CcTest::isolate()->ContextDisposedNotification(); 3757 CcTest::isolate()->ContextDisposedNotification();
3756 SimulateIncrementalMarking(CcTest::heap()); 3758 heap::SimulateIncrementalMarking(CcTest::heap());
3757 CcTest::heap()->CollectAllGarbage(); 3759 CcTest::heap()->CollectAllGarbage();
3758 3760
3759 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate), 3761 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3760 vector->Get(FeedbackVectorSlot(0))); 3762 vector->Get(FeedbackVectorSlot(0)));
3761 } 3763 }
3762 3764
3763 3765
3764 TEST(IncrementalMarkingPreservesMonomorphicIC) { 3766 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3765 if (i::FLAG_always_opt) return; 3767 if (i::FLAG_always_opt) return;
3766 CcTest::InitializeVM(); 3768 CcTest::InitializeVM();
3767 v8::HandleScope scope(CcTest::isolate()); 3769 v8::HandleScope scope(CcTest::isolate());
3768 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3770 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3769 // Prepare function f that contains a monomorphic IC for object 3771 // Prepare function f that contains a monomorphic IC for object
3770 // originating from the same native context. 3772 // originating from the same native context.
3771 CompileRun("function fun() { this.x = 1; }; var obj = new fun();" 3773 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3772 "function f(o) { return o.x; } f(obj); f(obj);"); 3774 "function f(o) { return o.x; } f(obj); f(obj);");
3773 Handle<JSFunction> f = Handle<JSFunction>::cast( 3775 Handle<JSFunction> f = Handle<JSFunction>::cast(
3774 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3776 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3775 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3777 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3776 3778
3777 CheckVectorIC(f, 0, MONOMORPHIC); 3779 CheckVectorIC(f, 0, MONOMORPHIC);
3778 3780
3779 SimulateIncrementalMarking(CcTest::heap()); 3781 heap::SimulateIncrementalMarking(CcTest::heap());
3780 CcTest::heap()->CollectAllGarbage(); 3782 CcTest::heap()->CollectAllGarbage();
3781 3783
3782 CheckVectorIC(f, 0, MONOMORPHIC); 3784 CheckVectorIC(f, 0, MONOMORPHIC);
3783 } 3785 }
3784 3786
3785 3787
3786 TEST(IncrementalMarkingClearsMonomorphicIC) { 3788 TEST(IncrementalMarkingClearsMonomorphicIC) {
3787 if (i::FLAG_always_opt) return; 3789 if (i::FLAG_always_opt) return;
3788 CcTest::InitializeVM(); 3790 CcTest::InitializeVM();
3789 v8::HandleScope scope(CcTest::isolate()); 3791 v8::HandleScope scope(CcTest::isolate());
(...skipping 11 matching lines...) Expand all
3801 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust()); 3803 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3802 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);"); 3804 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3803 Handle<JSFunction> f = Handle<JSFunction>::cast( 3805 Handle<JSFunction> f = Handle<JSFunction>::cast(
3804 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3806 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3805 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3807 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3806 3808
3807 CheckVectorIC(f, 0, MONOMORPHIC); 3809 CheckVectorIC(f, 0, MONOMORPHIC);
3808 3810
3809 // Fire context dispose notification. 3811 // Fire context dispose notification.
3810 CcTest::isolate()->ContextDisposedNotification(); 3812 CcTest::isolate()->ContextDisposedNotification();
3811 SimulateIncrementalMarking(CcTest::heap()); 3813 heap::SimulateIncrementalMarking(CcTest::heap());
3812 CcTest::heap()->CollectAllGarbage(); 3814 CcTest::heap()->CollectAllGarbage();
3813 3815
3814 CheckVectorICCleared(f, 0); 3816 CheckVectorICCleared(f, 0);
3815 } 3817 }
3816 3818
3817 3819
3818 TEST(IncrementalMarkingPreservesPolymorphicIC) { 3820 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3819 if (i::FLAG_always_opt) return; 3821 if (i::FLAG_always_opt) return;
3820 CcTest::InitializeVM(); 3822 CcTest::InitializeVM();
3821 v8::HandleScope scope(CcTest::isolate()); 3823 v8::HandleScope scope(CcTest::isolate());
(...skipping 17 matching lines...) Expand all
3839 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust()); 3841 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3840 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust()); 3842 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3841 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);"); 3843 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3842 Handle<JSFunction> f = Handle<JSFunction>::cast( 3844 Handle<JSFunction> f = Handle<JSFunction>::cast(
3843 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3845 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3844 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3846 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3845 3847
3846 CheckVectorIC(f, 0, POLYMORPHIC); 3848 CheckVectorIC(f, 0, POLYMORPHIC);
3847 3849
3848 // Fire context dispose notification. 3850 // Fire context dispose notification.
3849 SimulateIncrementalMarking(CcTest::heap()); 3851 heap::SimulateIncrementalMarking(CcTest::heap());
3850 CcTest::heap()->CollectAllGarbage(); 3852 CcTest::heap()->CollectAllGarbage();
3851 3853
3852 CheckVectorIC(f, 0, POLYMORPHIC); 3854 CheckVectorIC(f, 0, POLYMORPHIC);
3853 } 3855 }
3854 3856
3855 3857
3856 TEST(IncrementalMarkingClearsPolymorphicIC) { 3858 TEST(IncrementalMarkingClearsPolymorphicIC) {
3857 if (i::FLAG_always_opt) return; 3859 if (i::FLAG_always_opt) return;
3858 CcTest::InitializeVM(); 3860 CcTest::InitializeVM();
3859 v8::HandleScope scope(CcTest::isolate()); 3861 v8::HandleScope scope(CcTest::isolate());
(...skipping 18 matching lines...) Expand all
3878 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust()); 3880 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3879 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);"); 3881 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3880 Handle<JSFunction> f = Handle<JSFunction>::cast( 3882 Handle<JSFunction> f = Handle<JSFunction>::cast(
3881 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3883 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3882 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3884 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3883 3885
3884 CheckVectorIC(f, 0, POLYMORPHIC); 3886 CheckVectorIC(f, 0, POLYMORPHIC);
3885 3887
3886 // Fire context dispose notification. 3888 // Fire context dispose notification.
3887 CcTest::isolate()->ContextDisposedNotification(); 3889 CcTest::isolate()->ContextDisposedNotification();
3888 SimulateIncrementalMarking(CcTest::heap()); 3890 heap::SimulateIncrementalMarking(CcTest::heap());
3889 CcTest::heap()->CollectAllGarbage(); 3891 CcTest::heap()->CollectAllGarbage();
3890 3892
3891 CheckVectorICCleared(f, 0); 3893 CheckVectorICCleared(f, 0);
3892 } 3894 }
3893 3895
3894 3896
3895 class SourceResource : public v8::String::ExternalOneByteStringResource { 3897 class SourceResource : public v8::String::ExternalOneByteStringResource {
3896 public: 3898 public:
3897 explicit SourceResource(const char* data) 3899 explicit SourceResource(const char* data)
3898 : data_(data), length_(strlen(data)) { } 3900 : data_(data), length_(strlen(data)) { }
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after
4050 for (int i = 0; i < kAgingThreshold; i++) { 4052 for (int i = 0; i < kAgingThreshold; i++) {
4051 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 4053 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4052 } 4054 }
4053 4055
4054 code = inner_scope.CloseAndEscape(Handle<Code>(f->code())); 4056 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4055 } 4057 }
4056 4058
4057 // Simulate incremental marking so that the functions are enqueued as 4059 // Simulate incremental marking so that the functions are enqueued as
4058 // code flushing candidates. Then optimize one function. Finally 4060 // code flushing candidates. Then optimize one function. Finally
4059 // finish the GC to complete code flushing. 4061 // finish the GC to complete code flushing.
4060 SimulateIncrementalMarking(heap); 4062 heap::SimulateIncrementalMarking(heap);
4061 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); 4063 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4062 heap->CollectAllGarbage(); 4064 heap->CollectAllGarbage();
4063 4065
4064 // Unoptimized code is missing and the deoptimizer will go ballistic. 4066 // Unoptimized code is missing and the deoptimizer will go ballistic.
4065 CompileRun("g('bozo');"); 4067 CompileRun("g('bozo');");
4066 } 4068 }
4067 4069
4068 4070
4069 TEST(Regress165495) { 4071 TEST(Regress165495) {
4070 i::FLAG_allow_natives_syntax = true; 4072 i::FLAG_allow_natives_syntax = true;
(...skipping 25 matching lines...) Expand all
4096 const int kAgingThreshold = 6; 4098 const int kAgingThreshold = 6;
4097 for (int i = 0; i < kAgingThreshold; i++) { 4099 for (int i = 0; i < kAgingThreshold; i++) {
4098 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 4100 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4099 } 4101 }
4100 4102
4101 CompileRun("f = null;"); 4103 CompileRun("f = null;");
4102 } 4104 }
4103 4105
4104 // Simulate incremental marking so that unoptimized code is flushed 4106 // Simulate incremental marking so that unoptimized code is flushed
4105 // even though it still is cached in the optimized code map. 4107 // even though it still is cached in the optimized code map.
4106 SimulateIncrementalMarking(heap); 4108 heap::SimulateIncrementalMarking(heap);
4107 heap->CollectAllGarbage(); 4109 heap->CollectAllGarbage();
4108 4110
4109 // Make a new closure that will get code installed from the code map. 4111 // Make a new closure that will get code installed from the code map.
4110 // Unoptimized code is missing and the deoptimizer will go ballistic. 4112 // Unoptimized code is missing and the deoptimizer will go ballistic.
4111 CompileRun("var g = mkClosure(); g('bozo');"); 4113 CompileRun("var g = mkClosure(); g('bozo');");
4112 } 4114 }
4113 4115
4114 4116
4115 TEST(Regress169209) { 4117 TEST(Regress169209) {
4116 i::FLAG_stress_compaction = false; 4118 i::FLAG_stress_compaction = false;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
4164 CHECK(f->is_compiled()); 4166 CHECK(f->is_compiled());
4165 const int kAgingThreshold = 6; 4167 const int kAgingThreshold = 6;
4166 for (int i = 0; i < kAgingThreshold; i++) { 4168 for (int i = 0; i < kAgingThreshold; i++) {
4167 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 4169 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4168 } 4170 }
4169 4171
4170 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate)); 4172 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4171 } 4173 }
4172 4174
4173 // Simulate incremental marking and collect code flushing candidates. 4175 // Simulate incremental marking and collect code flushing candidates.
4174 SimulateIncrementalMarking(heap); 4176 heap::SimulateIncrementalMarking(heap);
4175 CHECK(shared1->code()->gc_metadata() != NULL); 4177 CHECK(shared1->code()->gc_metadata() != NULL);
4176 4178
4177 // Optimize function and make sure the unoptimized code is replaced. 4179 // Optimize function and make sure the unoptimized code is replaced.
4178 CompileRun("%OptimizeFunctionOnNextCall(g);" 4180 CompileRun("%OptimizeFunctionOnNextCall(g);"
4179 "g(false);"); 4181 "g(false);");
4180 4182
4181 // Finish garbage collection cycle. 4183 // Finish garbage collection cycle.
4182 heap->CollectAllGarbage(); 4184 heap->CollectAllGarbage();
4183 CHECK(shared1->code()->gc_metadata() == NULL); 4185 CHECK(shared1->code()->gc_metadata() == NULL);
4184 } 4186 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
4220 .FromJust()); 4222 .FromJust());
4221 4223
4222 // First make sure we flip spaces 4224 // First make sure we flip spaces
4223 CcTest::heap()->CollectGarbage(NEW_SPACE); 4225 CcTest::heap()->CollectGarbage(NEW_SPACE);
4224 4226
4225 // Allocate the object. 4227 // Allocate the object.
4226 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); 4228 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4227 array_data->set(0, Smi::FromInt(1)); 4229 array_data->set(0, Smi::FromInt(1));
4228 array_data->set(1, Smi::FromInt(2)); 4230 array_data->set(1, Smi::FromInt(2));
4229 4231
4230 AllocateAllButNBytes(CcTest::heap()->new_space(), 4232 heap::AllocateAllButNBytes(
4231 JSArray::kSize + AllocationMemento::kSize + 4233 CcTest::heap()->new_space(),
4232 kPointerSize); 4234 JSArray::kSize + AllocationMemento::kSize + kPointerSize);
4233 4235
4234 Handle<JSArray> array = 4236 Handle<JSArray> array =
4235 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS); 4237 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4236 4238
4237 CHECK_EQ(Smi::FromInt(2), array->length()); 4239 CHECK_EQ(Smi::FromInt(2), array->length());
4238 CHECK(array->HasFastSmiOrObjectElements()); 4240 CHECK(array->HasFastSmiOrObjectElements());
4239 4241
4240 // We need filler the size of AllocationMemento object, plus an extra 4242 // We need filler the size of AllocationMemento object, plus an extra
4241 // fill pointer value. 4243 // fill pointer value.
4242 HeapObject* obj = NULL; 4244 HeapObject* obj = NULL;
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
4371 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); 4373 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4372 } 4374 }
4373 shared->optimized_code_map()->Print(); 4375 shared->optimized_code_map()->Print();
4374 4376
4375 // Add the code with a literals array to be evacuated. 4377 // Add the code with a literals array to be evacuated.
4376 Page* evac_page; 4378 Page* evac_page;
4377 { 4379 {
4378 HandleScope inner_scope(isolate); 4380 HandleScope inner_scope(isolate);
4379 AlwaysAllocateScope always_allocate(isolate); 4381 AlwaysAllocateScope always_allocate(isolate);
4380 // Make sure literal is placed on an old-space evacuation candidate. 4382 // Make sure literal is placed on an old-space evacuation candidate.
4381 SimulateFullSpace(heap->old_space()); 4383 heap::SimulateFullSpace(heap->old_space());
4382 4384
4383 // Make sure there the number of literals is > 0. 4385 // Make sure there the number of literals is > 0.
4384 Handle<LiteralsArray> lit = 4386 Handle<LiteralsArray> lit =
4385 LiteralsArray::New(isolate, vector, 23, TENURED); 4387 LiteralsArray::New(isolate, vector, 23, TENURED);
4386 4388
4387 evac_page = Page::FromAddress(lit->address()); 4389 evac_page = Page::FromAddress(lit->address());
4388 BailoutId id = BailoutId(100); 4390 BailoutId id = BailoutId(100);
4389 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); 4391 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4390 } 4392 }
4391 4393
4392 // Heap is ready, force {lit_page} to become an evacuation candidate and 4394 // Heap is ready, force {lit_page} to become an evacuation candidate and
4393 // simulate incremental marking to enqueue optimized code map. 4395 // simulate incremental marking to enqueue optimized code map.
4394 FLAG_manual_evacuation_candidates_selection = true; 4396 FLAG_manual_evacuation_candidates_selection = true;
4395 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 4397 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4396 SimulateIncrementalMarking(heap); 4398 heap::SimulateIncrementalMarking(heap);
4397 4399
4398 // No matter whether reachable or not, {boomer} is doomed. 4400 // No matter whether reachable or not, {boomer} is doomed.
4399 Handle<Object> boomer(shared->optimized_code_map(), isolate); 4401 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4400 4402
4401 // Add the code several times to the optimized code map. This will leave old 4403 // Add the code several times to the optimized code map. This will leave old
4402 // copies of the optimized code map unreachable but still marked. 4404 // copies of the optimized code map unreachable but still marked.
4403 for (int i = 3; i < 6; ++i) { 4405 for (int i = 3; i < 6; ++i) {
4404 HandleScope inner_scope(isolate); 4406 HandleScope inner_scope(isolate);
4405 BailoutId id = BailoutId(i); 4407 BailoutId id = BailoutId(i);
4406 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); 4408 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
4583 4585
4584 4586
4585 TEST(LargeObjectSlotRecording) { 4587 TEST(LargeObjectSlotRecording) {
4586 FLAG_manual_evacuation_candidates_selection = true; 4588 FLAG_manual_evacuation_candidates_selection = true;
4587 CcTest::InitializeVM(); 4589 CcTest::InitializeVM();
4588 Isolate* isolate = CcTest::i_isolate(); 4590 Isolate* isolate = CcTest::i_isolate();
4589 Heap* heap = isolate->heap(); 4591 Heap* heap = isolate->heap();
4590 HandleScope scope(isolate); 4592 HandleScope scope(isolate);
4591 4593
4592 // Create an object on an evacuation candidate. 4594 // Create an object on an evacuation candidate.
4593 SimulateFullSpace(heap->old_space()); 4595 heap::SimulateFullSpace(heap->old_space());
4594 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED); 4596 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4595 Page* evac_page = Page::FromAddress(lit->address()); 4597 Page* evac_page = Page::FromAddress(lit->address());
4596 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 4598 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4597 FixedArray* old_location = *lit; 4599 FixedArray* old_location = *lit;
4598 4600
4599 // Allocate a large object. 4601 // Allocate a large object.
4600 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB); 4602 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4601 CHECK(size > Page::kMaxRegularHeapObjectSize); 4603 CHECK(size > Page::kMaxRegularHeapObjectSize);
4602 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED); 4604 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4603 CHECK(heap->lo_space()->Contains(*lo)); 4605 CHECK(heap->lo_space()->Contains(*lo));
4604 4606
4605 // Start incremental marking to active write barrier. 4607 // Start incremental marking to active write barrier.
4606 SimulateIncrementalMarking(heap, false); 4608 heap::SimulateIncrementalMarking(heap, false);
4607 heap->incremental_marking()->AdvanceIncrementalMarking( 4609 heap->incremental_marking()->AdvanceIncrementalMarking(
4608 10000000, IncrementalMarking::IdleStepActions()); 4610 10000000, IncrementalMarking::IdleStepActions());
4609 4611
4610 // Create references from the large object to the object on the evacuation 4612 // Create references from the large object to the object on the evacuation
4611 // candidate. 4613 // candidate.
4612 const int kStep = size / 10; 4614 const int kStep = size / 10;
4613 for (int i = 0; i < size; i += kStep) { 4615 for (int i = 0; i < size; i += kStep) {
4614 lo->set(i, *lit); 4616 lo->set(i, *lit);
4615 CHECK(lo->get(i) == old_location); 4617 CHECK(lo->get(i) == old_location);
4616 } 4618 }
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after
4873 4875
4874 v8::internal::Heap* heap = CcTest::heap(); 4876 v8::internal::Heap* heap = CcTest::heap();
4875 4877
4876 // Get a clean slate regarding optimized functions on the heap. 4878 // Get a clean slate regarding optimized functions on the heap.
4877 i::Deoptimizer::DeoptimizeAll(isolate); 4879 i::Deoptimizer::DeoptimizeAll(isolate);
4878 heap->CollectAllGarbage(); 4880 heap->CollectAllGarbage();
4879 4881
4880 if (!isolate->use_crankshaft()) return; 4882 if (!isolate->use_crankshaft()) return;
4881 HandleScope outer_scope(heap->isolate()); 4883 HandleScope outer_scope(heap->isolate());
4882 for (int i = 0; i < 3; i++) { 4884 for (int i = 0; i < 3; i++) {
4883 SimulateIncrementalMarking(heap); 4885 heap::SimulateIncrementalMarking(heap);
4884 { 4886 {
4885 LocalContext context; 4887 LocalContext context;
4886 HandleScope scope(heap->isolate()); 4888 HandleScope scope(heap->isolate());
4887 EmbeddedVector<char, 256> source; 4889 EmbeddedVector<char, 256> source;
4888 SNPrintF(source, 4890 SNPrintF(source,
4889 "function bar%d() {" 4891 "function bar%d() {"
4890 " return foo%d(1);" 4892 " return foo%d(1);"
4891 "};" 4893 "};"
4892 "function foo%d(x) { with (x) { return 1 + x; } };" 4894 "function foo%d(x) { with (x) { return 1 + x; } };"
4893 "bar%d();" 4895 "bar%d();"
(...skipping 628 matching lines...) Expand 10 before | Expand all | Expand 10 after
5522 const int kMaxObjects = 10000; 5524 const int kMaxObjects = 10000;
5523 const int kFixedArrayLen = 512; 5525 const int kFixedArrayLen = 512;
5524 Handle<FixedArray> objects[kMaxObjects]; 5526 Handle<FixedArray> objects[kMaxObjects];
5525 for (int i = 0; (i < kMaxObjects) && 5527 for (int i = 0; (i < kMaxObjects) &&
5526 heap->CanExpandOldGeneration(old_space->AreaSize()); 5528 heap->CanExpandOldGeneration(old_space->AreaSize());
5527 i++) { 5529 i++) {
5528 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED); 5530 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5529 Page::FromAddress(objects[i]->address()) 5531 Page::FromAddress(objects[i]->address())
5530 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 5532 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5531 } 5533 }
5532 SimulateFullSpace(old_space); 5534 heap::SimulateFullSpace(old_space);
5533 heap->CollectGarbage(OLD_SPACE); 5535 heap->CollectGarbage(OLD_SPACE);
5534 // If we get this far, we've successfully aborted compaction. Any further 5536 // If we get this far, we've successfully aborted compaction. Any further
5535 // allocations might trigger OOM. 5537 // allocations might trigger OOM.
5536 } 5538 }
5537 isolate->Exit(); 5539 isolate->Exit();
5538 isolate->Dispose(); 5540 isolate->Dispose();
5539 } 5541 }
5540 5542
5541 5543
5542 TEST(Regress357137) { 5544 TEST(Regress357137) {
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
5635 const int number_handles = 12; 5637 const int number_handles = 12;
5636 Handle<FixedArray> handles[number_handles]; 5638 Handle<FixedArray> handles[number_handles];
5637 for (int i = 0; i < number_handles; i++) { 5639 for (int i = 0; i < number_handles; i++) {
5638 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5640 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5639 } 5641 }
5640 5642
5641 heap->CollectGarbage(NEW_SPACE); 5643 heap->CollectGarbage(NEW_SPACE);
5642 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); 5644 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5643 5645
5644 // Fill-up the first semi-space page. 5646 // Fill-up the first semi-space page.
5645 FillUpOnePage(new_space); 5647 heap::FillUpOnePage(new_space);
5646 5648
5647 // Create a small object to initialize the bump pointer on the second 5649 // Create a small object to initialize the bump pointer on the second
5648 // semi-space page. 5650 // semi-space page.
5649 Handle<FixedArray> small = 5651 Handle<FixedArray> small =
5650 i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5652 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5651 CHECK(heap->InNewSpace(*small)); 5653 CHECK(heap->InNewSpace(*small));
5652 5654
5653 // Fill-up the second semi-space page. 5655 // Fill-up the second semi-space page.
5654 FillUpOnePage(new_space); 5656 heap::FillUpOnePage(new_space);
5655 5657
5656 // This scavenge will corrupt memory if the promotion queue is not 5658 // This scavenge will corrupt memory if the promotion queue is not
5657 // evacuated. 5659 // evacuated.
5658 heap->CollectGarbage(NEW_SPACE); 5660 heap->CollectGarbage(NEW_SPACE);
5659 } 5661 }
5660 isolate->Dispose(); 5662 isolate->Dispose();
5661 } 5663 }
5662 5664
5663 5665
5664 TEST(Regress388880) { 5666 TEST(Regress388880) {
5665 i::FLAG_expose_gc = true; 5667 i::FLAG_expose_gc = true;
5666 CcTest::InitializeVM(); 5668 CcTest::InitializeVM();
5667 v8::HandleScope scope(CcTest::isolate()); 5669 v8::HandleScope scope(CcTest::isolate());
5668 Isolate* isolate = CcTest::i_isolate(); 5670 Isolate* isolate = CcTest::i_isolate();
5669 Factory* factory = isolate->factory(); 5671 Factory* factory = isolate->factory();
5670 Heap* heap = isolate->heap(); 5672 Heap* heap = isolate->heap();
5671 5673
5672 Handle<Map> map1 = Map::Create(isolate, 1); 5674 Handle<Map> map1 = Map::Create(isolate, 1);
5673 Handle<String> name = factory->NewStringFromStaticChars("foo"); 5675 Handle<String> name = factory->NewStringFromStaticChars("foo");
5674 name = factory->InternalizeString(name); 5676 name = factory->InternalizeString(name);
5675 Handle<Map> map2 = 5677 Handle<Map> map2 =
5676 Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE, 5678 Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE,
5677 Representation::Tagged(), OMIT_TRANSITION) 5679 Representation::Tagged(), OMIT_TRANSITION)
5678 .ToHandleChecked(); 5680 .ToHandleChecked();
5679 5681
5680 int desired_offset = Page::kPageSize - map1->instance_size(); 5682 int desired_offset = Page::kPageSize - map1->instance_size();
5681 5683
5682 // Allocate padding objects in old pointer space so, that object allocated 5684 // Allocate padding objects in old pointer space so, that object allocated
5683 // afterwards would end at the end of the page. 5685 // afterwards would end at the end of the page.
5684 SimulateFullSpace(heap->old_space()); 5686 heap::SimulateFullSpace(heap->old_space());
5685 int padding_size = desired_offset - Page::kObjectStartOffset; 5687 int padding_size = desired_offset - Page::kObjectStartOffset;
5686 CreatePadding(heap, padding_size, TENURED); 5688 heap::CreatePadding(heap, padding_size, TENURED);
5687 5689
5688 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED); 5690 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5689 o->set_properties(*factory->empty_fixed_array()); 5691 o->set_properties(*factory->empty_fixed_array());
5690 5692
5691 // Ensure that the object allocated where we need it. 5693 // Ensure that the object allocated where we need it.
5692 Page* page = Page::FromAddress(o->address()); 5694 Page* page = Page::FromAddress(o->address());
5693 CHECK_EQ(desired_offset, page->Offset(o->address())); 5695 CHECK_EQ(desired_offset, page->Offset(o->address()));
5694 5696
5695 // Now we have an object right at the end of the page. 5697 // Now we have an object right at the end of the page.
5696 5698
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
5823 } 5825 }
5824 5826
5825 5827
5826 void CheckMapRetainingFor(int n) { 5828 void CheckMapRetainingFor(int n) {
5827 FLAG_retain_maps_for_n_gc = n; 5829 FLAG_retain_maps_for_n_gc = n;
5828 Isolate* isolate = CcTest::i_isolate(); 5830 Isolate* isolate = CcTest::i_isolate();
5829 Heap* heap = isolate->heap(); 5831 Heap* heap = isolate->heap();
5830 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap); 5832 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5831 CHECK(!weak_cell->cleared()); 5833 CHECK(!weak_cell->cleared());
5832 for (int i = 0; i < n; i++) { 5834 for (int i = 0; i < n; i++) {
5833 SimulateIncrementalMarking(heap); 5835 heap::SimulateIncrementalMarking(heap);
5834 heap->CollectGarbage(OLD_SPACE); 5836 heap->CollectGarbage(OLD_SPACE);
5835 } 5837 }
5836 CHECK(!weak_cell->cleared()); 5838 CHECK(!weak_cell->cleared());
5837 SimulateIncrementalMarking(heap); 5839 heap::SimulateIncrementalMarking(heap);
5838 heap->CollectGarbage(OLD_SPACE); 5840 heap->CollectGarbage(OLD_SPACE);
5839 CHECK(weak_cell->cleared()); 5841 CHECK(weak_cell->cleared());
5840 } 5842 }
5841 5843
5842 5844
5843 TEST(MapRetaining) { 5845 TEST(MapRetaining) {
5844 CcTest::InitializeVM(); 5846 CcTest::InitializeVM();
5845 v8::HandleScope scope(CcTest::isolate()); 5847 v8::HandleScope scope(CcTest::isolate());
5846 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc); 5848 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5847 CheckMapRetainingFor(0); 5849 CheckMapRetainingFor(0);
5848 CheckMapRetainingFor(1); 5850 CheckMapRetainingFor(1);
5849 CheckMapRetainingFor(7); 5851 CheckMapRetainingFor(7);
5850 } 5852 }
5851 5853
5852 5854
5853 TEST(RegressArrayListGC) { 5855 TEST(RegressArrayListGC) {
5854 FLAG_retain_maps_for_n_gc = 1; 5856 FLAG_retain_maps_for_n_gc = 1;
5855 FLAG_incremental_marking = 0; 5857 FLAG_incremental_marking = 0;
5856 FLAG_gc_global = true; 5858 FLAG_gc_global = true;
5857 CcTest::InitializeVM(); 5859 CcTest::InitializeVM();
5858 v8::HandleScope scope(CcTest::isolate()); 5860 v8::HandleScope scope(CcTest::isolate());
5859 Isolate* isolate = CcTest::i_isolate(); 5861 Isolate* isolate = CcTest::i_isolate();
5860 Heap* heap = isolate->heap(); 5862 Heap* heap = isolate->heap();
5861 AddRetainedMap(isolate, heap); 5863 AddRetainedMap(isolate, heap);
5862 Handle<Map> map = Map::Create(isolate, 1); 5864 Handle<Map> map = Map::Create(isolate, 1);
5863 heap->CollectGarbage(OLD_SPACE); 5865 heap->CollectGarbage(OLD_SPACE);
5864 // Force GC in old space on next addition of retained map. 5866 // Force GC in old space on next addition of retained map.
5865 Map::WeakCellForMap(map); 5867 Map::WeakCellForMap(map);
5866 SimulateFullSpace(CcTest::heap()->new_space()); 5868 heap::SimulateFullSpace(CcTest::heap()->new_space());
5867 for (int i = 0; i < 10; i++) { 5869 for (int i = 0; i < 10; i++) {
5868 heap->AddRetainedMap(map); 5870 heap->AddRetainedMap(map);
5869 } 5871 }
5870 heap->CollectGarbage(OLD_SPACE); 5872 heap->CollectGarbage(OLD_SPACE);
5871 } 5873 }
5872 5874
5873 5875
5874 #ifdef DEBUG 5876 #ifdef DEBUG
5875 TEST(PathTracer) { 5877 TEST(PathTracer) {
5876 CcTest::InitializeVM(); 5878 CcTest::InitializeVM();
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
6139 } 6141 }
6140 } 6142 }
6141 6143
6142 6144
6143 TEST(NewSpaceAllocationThroughput) { 6145 TEST(NewSpaceAllocationThroughput) {
6144 CcTest::InitializeVM(); 6146 CcTest::InitializeVM();
6145 v8::HandleScope scope(CcTest::isolate()); 6147 v8::HandleScope scope(CcTest::isolate());
6146 Isolate* isolate = CcTest::i_isolate(); 6148 Isolate* isolate = CcTest::i_isolate();
6147 Heap* heap = isolate->heap(); 6149 Heap* heap = isolate->heap();
6148 GCTracer* tracer = heap->tracer(); 6150 GCTracer* tracer = heap->tracer();
6151 tracer->ResetForTesting();
6149 int time1 = 100; 6152 int time1 = 100;
6150 size_t counter1 = 1000; 6153 size_t counter1 = 1000;
6151 tracer->SampleAllocation(time1, counter1, 0); 6154 tracer->SampleAllocation(time1, counter1, 0);
6152 int time2 = 200; 6155 int time2 = 200;
6153 size_t counter2 = 2000; 6156 size_t counter2 = 2000;
6154 tracer->SampleAllocation(time2, counter2, 0); 6157 tracer->SampleAllocation(time2, counter2, 0);
6155 size_t throughput = 6158 size_t throughput =
6156 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(); 6159 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6157 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput); 6160 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6158 int time3 = 1000; 6161 int time3 = 1000;
6159 size_t counter3 = 30000; 6162 size_t counter3 = 30000;
6160 tracer->SampleAllocation(time3, counter3, 0); 6163 tracer->SampleAllocation(time3, counter3, 0);
6161 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(); 6164 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6162 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput); 6165 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6163 } 6166 }
6164 6167
6165 6168
6166 TEST(NewSpaceAllocationThroughput2) { 6169 TEST(NewSpaceAllocationThroughput2) {
6167 CcTest::InitializeVM(); 6170 CcTest::InitializeVM();
6168 v8::HandleScope scope(CcTest::isolate()); 6171 v8::HandleScope scope(CcTest::isolate());
6169 Isolate* isolate = CcTest::i_isolate(); 6172 Isolate* isolate = CcTest::i_isolate();
6170 Heap* heap = isolate->heap(); 6173 Heap* heap = isolate->heap();
6171 GCTracer* tracer = heap->tracer(); 6174 GCTracer* tracer = heap->tracer();
6175 tracer->ResetForTesting();
6172 int time1 = 100; 6176 int time1 = 100;
6173 size_t counter1 = 1000; 6177 size_t counter1 = 1000;
6174 tracer->SampleAllocation(time1, counter1, 0); 6178 tracer->SampleAllocation(time1, counter1, 0);
6175 int time2 = 200; 6179 int time2 = 200;
6176 size_t counter2 = 2000; 6180 size_t counter2 = 2000;
6177 tracer->SampleAllocation(time2, counter2, 0); 6181 tracer->SampleAllocation(time2, counter2, 0);
6178 size_t throughput = 6182 size_t throughput =
6179 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100); 6183 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6180 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput); 6184 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6181 int time3 = 1000; 6185 int time3 = 1000;
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
6297 "g2();" 6301 "g2();"
6298 "check(g1, g2);"); 6302 "check(g1, g2);");
6299 } 6303 }
6300 6304
6301 TEST(OldGenerationAllocationThroughput) { 6305 TEST(OldGenerationAllocationThroughput) {
6302 CcTest::InitializeVM(); 6306 CcTest::InitializeVM();
6303 v8::HandleScope scope(CcTest::isolate()); 6307 v8::HandleScope scope(CcTest::isolate());
6304 Isolate* isolate = CcTest::i_isolate(); 6308 Isolate* isolate = CcTest::i_isolate();
6305 Heap* heap = isolate->heap(); 6309 Heap* heap = isolate->heap();
6306 GCTracer* tracer = heap->tracer(); 6310 GCTracer* tracer = heap->tracer();
6311 tracer->ResetForTesting();
6307 int time1 = 100; 6312 int time1 = 100;
6308 size_t counter1 = 1000; 6313 size_t counter1 = 1000;
6309 tracer->SampleAllocation(time1, 0, counter1); 6314 tracer->SampleAllocation(time1, 0, counter1);
6310 int time2 = 200; 6315 int time2 = 200;
6311 size_t counter2 = 2000; 6316 size_t counter2 = 2000;
6312 tracer->SampleAllocation(time2, 0, counter2); 6317 tracer->SampleAllocation(time2, 0, counter2);
6313 size_t throughput = static_cast<size_t>( 6318 size_t throughput = static_cast<size_t>(
6314 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100)); 6319 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
6315 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput); 6320 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6316 int time3 = 1000; 6321 int time3 = 1000;
6317 size_t counter3 = 30000; 6322 size_t counter3 = 30000;
6318 tracer->SampleAllocation(time3, 0, counter3); 6323 tracer->SampleAllocation(time3, 0, counter3);
6319 throughput = static_cast<size_t>( 6324 throughput = static_cast<size_t>(
6320 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100)); 6325 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100));
6321 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput); 6326 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6322 } 6327 }
6323 6328
6324 6329
6325 TEST(AllocationThroughput) { 6330 TEST(AllocationThroughput) {
6326 CcTest::InitializeVM(); 6331 CcTest::InitializeVM();
6327 v8::HandleScope scope(CcTest::isolate()); 6332 v8::HandleScope scope(CcTest::isolate());
6328 Isolate* isolate = CcTest::i_isolate(); 6333 Isolate* isolate = CcTest::i_isolate();
6329 Heap* heap = isolate->heap(); 6334 Heap* heap = isolate->heap();
6330 GCTracer* tracer = heap->tracer(); 6335 GCTracer* tracer = heap->tracer();
6336 tracer->ResetForTesting();
6331 int time1 = 100; 6337 int time1 = 100;
6332 size_t counter1 = 1000; 6338 size_t counter1 = 1000;
6333 tracer->SampleAllocation(time1, counter1, counter1); 6339 tracer->SampleAllocation(time1, counter1, counter1);
6334 int time2 = 200; 6340 int time2 = 200;
6335 size_t counter2 = 2000; 6341 size_t counter2 = 2000;
6336 tracer->SampleAllocation(time2, counter2, counter2); 6342 tracer->SampleAllocation(time2, counter2, counter2);
6337 size_t throughput = static_cast<size_t>( 6343 size_t throughput = static_cast<size_t>(
6338 tracer->AllocationThroughputInBytesPerMillisecond(100)); 6344 tracer->AllocationThroughputInBytesPerMillisecond(100));
6339 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput); 6345 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6340 int time3 = 1000; 6346 int time3 = 1000;
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
6438 v8::HandleScope scope(isolate); 6444 v8::HandleScope scope(isolate);
6439 Heap* heap = CcTest::heap(); 6445 Heap* heap = CcTest::heap();
6440 LocalContext context; 6446 LocalContext context;
6441 6447
6442 v8::Persistent<Value> parent; 6448 v8::Persistent<Value> parent;
6443 v8::Persistent<Value> child; 6449 v8::Persistent<Value> child;
6444 6450
6445 parent.Reset(isolate, v8::Object::New(isolate)); 6451 parent.Reset(isolate, v8::Object::New(isolate));
6446 child.Reset(isolate, v8::Object::New(isolate)); 6452 child.Reset(isolate, v8::Object::New(isolate));
6447 6453
6448 SimulateFullSpace(heap->old_space()); 6454 heap::SimulateFullSpace(heap->old_space());
6449 heap->CollectGarbage(OLD_SPACE); 6455 heap->CollectGarbage(OLD_SPACE);
6450 { 6456 {
6451 UniqueId id = MakeUniqueId(parent); 6457 UniqueId id = MakeUniqueId(parent);
6452 isolate->SetObjectGroupId(parent, id); 6458 isolate->SetObjectGroupId(parent, id);
6453 isolate->SetReferenceFromGroup(id, child); 6459 isolate->SetReferenceFromGroup(id, child);
6454 } 6460 }
6455 // The CollectGarbage call above starts sweeper threads. 6461 // The CollectGarbage call above starts sweeper threads.
6456 // The crash will happen if the following two functions 6462 // The crash will happen if the following two functions
6457 // are called before sweeping finishes. 6463 // are called before sweeping finishes.
6458 heap->StartIncrementalMarking(); 6464 heap->StartIncrementalMarking();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
6497 const int N = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / 6503 const int N = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) /
6498 kPointerSize; 6504 kPointerSize;
6499 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED); 6505 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6500 CHECK(heap->old_space()->Contains(*array)); 6506 CHECK(heap->old_space()->Contains(*array));
6501 Handle<Object> number = factory->NewHeapNumber(1.0); 6507 Handle<Object> number = factory->NewHeapNumber(1.0);
6502 CHECK(heap->InNewSpace(*number)); 6508 CHECK(heap->InNewSpace(*number));
6503 for (int i = 0; i < N; i++) { 6509 for (int i = 0; i < N; i++) {
6504 array->set(i, *number); 6510 array->set(i, *number);
6505 } 6511 }
6506 heap->CollectGarbage(OLD_SPACE); 6512 heap->CollectGarbage(OLD_SPACE);
6507 SimulateFullSpace(heap->old_space()); 6513 heap::SimulateFullSpace(heap->old_space());
6508 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1); 6514 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6509 heap->mark_compact_collector()->EnsureSweepingCompleted(); 6515 heap->mark_compact_collector()->EnsureSweepingCompleted();
6510 ByteArray* byte_array; 6516 ByteArray* byte_array;
6511 const int M = 256; 6517 const int M = 256;
6512 // Don't allow old space expansion. The test works without this flag too, 6518 // Don't allow old space expansion. The test works without this flag too,
6513 // but becomes very slow. 6519 // but becomes very slow.
6514 heap->set_force_oom(true); 6520 heap->set_force_oom(true);
6515 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) { 6521 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6516 for (int j = 0; j < M; j++) { 6522 for (int j = 0; j < M; j++) {
6517 byte_array->set(j, 0x31); 6523 byte_array->set(j, 0x31);
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
6580 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 6586 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
6581 // Make all arrays point to evacuation candidate so that 6587 // Make all arrays point to evacuation candidate so that
6582 // slots are recorded for them. 6588 // slots are recorded for them.
6583 for (size_t j = 0; j < arrays.size(); j++) { 6589 for (size_t j = 0; j < arrays.size(); j++) {
6584 array = arrays[j]; 6590 array = arrays[j];
6585 for (int i = 0; i < N; i++) { 6591 for (int i = 0; i < N; i++) {
6586 array->set(i, *ec_obj); 6592 array->set(i, *ec_obj);
6587 } 6593 }
6588 } 6594 }
6589 } 6595 }
6590 SimulateIncrementalMarking(heap); 6596 heap::SimulateIncrementalMarking(heap);
6591 for (size_t j = 0; j < arrays.size(); j++) { 6597 for (size_t j = 0; j < arrays.size(); j++) {
6592 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1); 6598 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
6593 } 6599 }
6594 } 6600 }
6595 // Force allocation from the free list. 6601 // Force allocation from the free list.
6596 heap->set_force_oom(true); 6602 heap->set_force_oom(true);
6597 heap->CollectGarbage(OLD_SPACE); 6603 heap->CollectGarbage(OLD_SPACE);
6598 } 6604 }
6599 6605
6600 UNINITIALIZED_TEST(PagePromotion) { 6606 UNINITIALIZED_TEST(PagePromotion) {
6601 FLAG_page_promotion = true; 6607 FLAG_page_promotion = true;
6602 FLAG_page_promotion_threshold = 0; // % 6608 FLAG_page_promotion_threshold = 0; // %
6603 i::FLAG_min_semi_space_size = 8 * (Page::kPageSize / MB); 6609 i::FLAG_min_semi_space_size = 8 * (Page::kPageSize / MB);
6604 // We cannot optimize for size as we require a new space with more than one 6610 // We cannot optimize for size as we require a new space with more than one
6605 // page. 6611 // page.
6606 i::FLAG_optimize_for_size = false; 6612 i::FLAG_optimize_for_size = false;
6607 // Set max_semi_space_size because it could've been initialized by an 6613 // Set max_semi_space_size because it could've been initialized by an
6608 // implication of optimize_for_size. 6614 // implication of optimize_for_size.
6609 i::FLAG_max_semi_space_size = i::FLAG_min_semi_space_size; 6615 i::FLAG_max_semi_space_size = i::FLAG_min_semi_space_size;
6610 v8::Isolate::CreateParams create_params; 6616 v8::Isolate::CreateParams create_params;
6611 create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); 6617 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6612 v8::Isolate* isolate = v8::Isolate::New(create_params); 6618 v8::Isolate* isolate = v8::Isolate::New(create_params);
6613 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); 6619 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
6614 { 6620 {
6615 v8::Isolate::Scope isolate_scope(isolate); 6621 v8::Isolate::Scope isolate_scope(isolate);
6616 v8::HandleScope handle_scope(isolate); 6622 v8::HandleScope handle_scope(isolate);
6617 v8::Context::New(isolate)->Enter(); 6623 v8::Context::New(isolate)->Enter();
6618 Heap* heap = i_isolate->heap(); 6624 Heap* heap = i_isolate->heap();
6625
6626 // Clean up any left over objects from cctest initialization.
6627 heap->CollectAllGarbage();
6628 heap->CollectAllGarbage();
6629
6619 std::vector<Handle<FixedArray>> handles; 6630 std::vector<Handle<FixedArray>> handles;
6620 SimulateFullSpace(heap->new_space(), &handles); 6631 heap::SimulateFullSpace(heap->new_space(), &handles);
6621 heap->CollectGarbage(NEW_SPACE); 6632 heap->CollectGarbage(NEW_SPACE);
6622 CHECK_GT(handles.size(), 0u); 6633 CHECK_GT(handles.size(), 0u);
6623 // First object in handle should be on the first page. 6634 // First object in handle should be on the first page.
6624 Handle<FixedArray> first_object = handles.front(); 6635 Handle<FixedArray> first_object = handles.front();
6625 Page* first_page = Page::FromAddress(first_object->address()); 6636 Page* first_page = Page::FromAddress(first_object->address());
6626 // The age mark should not be on the first page. 6637 // The age mark should not be on the first page.
6627 CHECK(!first_page->ContainsLimit(heap->new_space()->age_mark())); 6638 CHECK(!first_page->ContainsLimit(heap->new_space()->age_mark()));
6628 // To perform a sanity check on live bytes we need to mark the heap. 6639 // To perform a sanity check on live bytes we need to mark the heap.
6629 SimulateIncrementalMarking(heap, true); 6640 heap::SimulateIncrementalMarking(heap, true);
6630 // Sanity check that the page meets the requirements for promotion. 6641 // Sanity check that the page meets the requirements for promotion.
6631 const int threshold_bytes = 6642 const int threshold_bytes =
6632 FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100; 6643 FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
6633 CHECK_GE(first_page->LiveBytes(), threshold_bytes); 6644 CHECK_GE(first_page->LiveBytes(), threshold_bytes);
6634 6645
6635 // Actual checks: The page is in new space first, but is moved to old space 6646 // Actual checks: The page is in new space first, but is moved to old space
6636 // during a full GC. 6647 // during a full GC.
6637 CHECK(heap->new_space()->ContainsSlow(first_page->address())); 6648 CHECK(heap->new_space()->ContainsSlow(first_page->address()));
6638 CHECK(!heap->old_space()->ContainsSlow(first_page->address())); 6649 CHECK(!heap->old_space()->ContainsSlow(first_page->address()));
6639 heap->CollectGarbage(OLD_SPACE); 6650 heap->CollectGarbage(OLD_SPACE);
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
6750 6761
6751 intptr_t size_before = heap->SizeOfObjects(); 6762 intptr_t size_before = heap->SizeOfObjects();
6752 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000); 6763 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
6753 array->Shrink(1); 6764 array->Shrink(1);
6754 intptr_t size_after = heap->SizeOfObjects(); 6765 intptr_t size_after = heap->SizeOfObjects();
6755 CHECK_EQ(size_after, size_before + array->Size()); 6766 CHECK_EQ(size_after, size_before + array->Size());
6756 } 6767 }
6757 6768
6758 } // namespace internal 6769 } // namespace internal
6759 } // namespace v8 6770 } // namespace v8
OLDNEW
« no previous file with comments | « test/cctest/heap/test-compaction.cc ('k') | test/cctest/heap/test-incremental-marking.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698