Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(406)

Side by Side Diff: test/cctest/test-heap.cc

Issue 11498012: Make unit tests resilient against GC Stress. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « test/cctest/test-alloc.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 2
3 #include <stdlib.h> 3 #include <stdlib.h>
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "compilation-cache.h" 7 #include "compilation-cache.h"
8 #include "execution.h" 8 #include "execution.h"
9 #include "factory.h" 9 #include "factory.h"
10 #include "macro-assembler.h" 10 #include "macro-assembler.h"
11 #include "global-handles.h" 11 #include "global-handles.h"
12 #include "stub-cache.h" 12 #include "stub-cache.h"
13 #include "cctest.h" 13 #include "cctest.h"
14 14
15 using namespace v8::internal; 15 using namespace v8::internal;
16 16
17 static v8::Persistent<v8::Context> env; 17 static v8::Persistent<v8::Context> env;
18 18
19 static void InitializeVM() { 19 static void InitializeVM() {
20 if (env.IsEmpty()) env = v8::Context::New(); 20 if (env.IsEmpty()) env = v8::Context::New();
21 v8::HandleScope scope; 21 v8::HandleScope scope;
22 env->Enter(); 22 env->Enter();
23 } 23 }
24 24
25 25
26 // Go through all incremental marking steps in one swoop. 26 // Go through all incremental marking steps in one swoop.
27 static void SimulateIncrementalMarking() { 27 static void SimulateIncrementalMarking() {
28 IncrementalMarking* marking = HEAP->incremental_marking(); 28 IncrementalMarking* marking = HEAP->incremental_marking();
29 CHECK(marking->IsStopped()); 29 CHECK(marking->IsMarking() || marking->IsStopped());
30 marking->Start(); 30 if (marking->IsStopped()) {
31 marking->Start();
32 }
31 CHECK(marking->IsMarking()); 33 CHECK(marking->IsMarking());
32 while (!marking->IsComplete()) { 34 while (!marking->IsComplete()) {
33 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD); 35 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
34 } 36 }
35 CHECK(marking->IsComplete()); 37 CHECK(marking->IsComplete());
36 } 38 }
37 39
38 40
39 static void CheckMap(Map* map, int type, int instance_size) { 41 static void CheckMap(Map* map, int type, int instance_size) {
40 CHECK(map->IsHeapObject()); 42 CHECK(map->IsHeapObject());
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after
406 { 408 {
407 HandleScope scope; 409 HandleScope scope;
408 410
409 Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk")); 411 Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
410 Handle<Object> u = FACTORY->NewNumber(1.12344); 412 Handle<Object> u = FACTORY->NewNumber(1.12344);
411 413
412 h1 = global_handles->Create(*i); 414 h1 = global_handles->Create(*i);
413 h2 = global_handles->Create(*u); 415 h2 = global_handles->Create(*u);
414 } 416 }
415 417
418 // Make sure the objects are promoted.
416 HEAP->CollectGarbage(OLD_POINTER_SPACE); 419 HEAP->CollectGarbage(OLD_POINTER_SPACE);
417 HEAP->CollectGarbage(NEW_SPACE); 420 HEAP->CollectGarbage(NEW_SPACE);
418 // Make sure the object is promoted. 421 CHECK(!HEAP->InNewSpace(*h1) && !HEAP->InNewSpace(*h2));
419 422
420 global_handles->MakeWeak(h2.location(), 423 global_handles->MakeWeak(h2.location(),
421 reinterpret_cast<void*>(1234), 424 reinterpret_cast<void*>(1234),
422 &TestWeakGlobalHandleCallback); 425 &TestWeakGlobalHandleCallback);
423 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 426 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
424 CHECK(!GlobalHandles::IsNearDeath(h2.location())); 427 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
425 428
426 HEAP->CollectGarbage(OLD_POINTER_SPACE); 429 // Incremental marking potentially marked handles before they turned weak.
430 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
427 431
428 CHECK((*h1)->IsString()); 432 CHECK((*h1)->IsString());
429 433
430 CHECK(WeakPointerCleared); 434 CHECK(WeakPointerCleared);
431 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 435 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
432 436
433 global_handles->Destroy(h1.location()); 437 global_handles->Destroy(h1.location());
434 } 438 }
435 439
436 440
(...skipping 591 matching lines...) Expand 10 before | Expand all | Expand 10 after
1028 CHECK(function->shared()->is_compiled()); 1032 CHECK(function->shared()->is_compiled());
1029 1033
1030 // The code will survive at least two GCs. 1034 // The code will survive at least two GCs.
1031 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 1035 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1032 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 1036 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1033 CHECK(function->shared()->is_compiled()); 1037 CHECK(function->shared()->is_compiled());
1034 1038
1035 // Simulate several GCs that use incremental marking. 1039 // Simulate several GCs that use incremental marking.
1036 const int kAgingThreshold = 6; 1040 const int kAgingThreshold = 6;
1037 for (int i = 0; i < kAgingThreshold; i++) { 1041 for (int i = 0; i < kAgingThreshold; i++) {
1038 HEAP->incremental_marking()->Abort();
1039 SimulateIncrementalMarking(); 1042 SimulateIncrementalMarking();
1040 HEAP->CollectAllGarbage(Heap::kNoGCFlags); 1043 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1041 } 1044 }
1042 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1045 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1043 CHECK(!function->is_compiled() || function->IsOptimized()); 1046 CHECK(!function->is_compiled() || function->IsOptimized());
1044 1047
1045 // This compile will compile the function again. 1048 // This compile will compile the function again.
1046 { v8::HandleScope scope; 1049 { v8::HandleScope scope;
1047 CompileRun("foo();"); 1050 CompileRun("foo();");
1048 } 1051 }
1049 1052
1050 // Simulate several GCs that use incremental marking but make sure 1053 // Simulate several GCs that use incremental marking but make sure
1051 // the loop breaks once the function is enqueued as a candidate. 1054 // the loop breaks once the function is enqueued as a candidate.
1052 for (int i = 0; i < kAgingThreshold; i++) { 1055 for (int i = 0; i < kAgingThreshold; i++) {
1053 HEAP->incremental_marking()->Abort();
1054 SimulateIncrementalMarking(); 1056 SimulateIncrementalMarking();
1055 if (!function->next_function_link()->IsUndefined()) break; 1057 if (!function->next_function_link()->IsUndefined()) break;
1056 HEAP->CollectAllGarbage(Heap::kNoGCFlags); 1058 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1057 } 1059 }
1058 1060
1059 // Force optimization while incremental marking is active and while 1061 // Force optimization while incremental marking is active and while
1060 // the function is enqueued as a candidate. 1062 // the function is enqueued as a candidate.
1061 { v8::HandleScope scope; 1063 { v8::HandleScope scope;
1062 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); 1064 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1063 } 1065 }
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
1218 count++; 1220 count++;
1219 object = JSFunction::cast(object)->next_function_link(); 1221 object = JSFunction::cast(object)->next_function_link();
1220 } 1222 }
1221 return count; 1223 return count;
1222 } 1224 }
1223 1225
1224 1226
1225 TEST(TestInternalWeakLists) { 1227 TEST(TestInternalWeakLists) {
1226 v8::V8::Initialize(); 1228 v8::V8::Initialize();
1227 1229
1230 // Some flags turn Scavenge collections into Mark-sweep collections
1231 // and hence are incompatible with this test case.
1232 if (FLAG_gc_global || FLAG_stress_compaction) return;
1233
1228 static const int kNumTestContexts = 10; 1234 static const int kNumTestContexts = 10;
1229 1235
1230 v8::HandleScope scope; 1236 v8::HandleScope scope;
1231 v8::Persistent<v8::Context> ctx[kNumTestContexts]; 1237 v8::Persistent<v8::Context> ctx[kNumTestContexts];
1232 1238
1233 CHECK_EQ(0, CountNativeContexts()); 1239 CHECK_EQ(0, CountNativeContexts());
1234 1240
1235 // Create a number of global contests which gets linked together. 1241 // Create a number of global contests which gets linked together.
1236 for (int i = 0; i < kNumTestContexts; i++) { 1242 for (int i = 0; i < kNumTestContexts; i++) {
1237 ctx[i] = v8::Context::New(); 1243 ctx[i] = v8::Context::New();
(...skipping 701 matching lines...) Expand 10 before | Expand all | Expand 10 after
1939 CHECK_EQ(0, f->shared()->opt_count()); 1945 CHECK_EQ(0, f->shared()->opt_count());
1940 CHECK_EQ(0, f->shared()->code()->profiler_ticks()); 1946 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
1941 } 1947 }
1942 1948
1943 1949
1944 // Test that HAllocateObject will always return an object in new-space. 1950 // Test that HAllocateObject will always return an object in new-space.
1945 TEST(OptimizedAllocationAlwaysInNewSpace) { 1951 TEST(OptimizedAllocationAlwaysInNewSpace) {
1946 i::FLAG_allow_natives_syntax = true; 1952 i::FLAG_allow_natives_syntax = true;
1947 InitializeVM(); 1953 InitializeVM();
1948 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return; 1954 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
1955 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
1949 v8::HandleScope scope; 1956 v8::HandleScope scope;
1950 1957
1951 SimulateFullSpace(HEAP->new_space()); 1958 SimulateFullSpace(HEAP->new_space());
1952 AlwaysAllocateScope always_allocate; 1959 AlwaysAllocateScope always_allocate;
1953 v8::Local<v8::Value> res = CompileRun( 1960 v8::Local<v8::Value> res = CompileRun(
1954 "function c(x) {" 1961 "function c(x) {"
1955 " this.x = x;" 1962 " this.x = x;"
1956 " for (var i = 0; i < 32; i++) {" 1963 " for (var i = 0; i < 32; i++) {"
1957 " this['x' + i] = x;" 1964 " this['x' + i] = x;"
1958 " }" 1965 " }"
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
2114 for (int i = 0; i < number_of_test_pages; i++) { 2121 for (int i = 0; i < number_of_test_pages; i++) {
2115 AlwaysAllocateScope always_allocate; 2122 AlwaysAllocateScope always_allocate;
2116 SimulateFullSpace(old_pointer_space); 2123 SimulateFullSpace(old_pointer_space);
2117 FACTORY->NewFixedArray(1, TENURED); 2124 FACTORY->NewFixedArray(1, TENURED);
2118 } 2125 }
2119 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages()); 2126 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2120 2127
2121 // Triggering one GC will cause a lot of garbage to be discovered but 2128 // Triggering one GC will cause a lot of garbage to be discovered but
2122 // even spread across all allocated pages. 2129 // even spread across all allocated pages.
2123 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation"); 2130 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
2124 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages()); 2131 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2125 2132
2126 // Triggering subsequent GCs should cause at least half of the pages 2133 // Triggering subsequent GCs should cause at least half of the pages
2127 // to be released to the OS after at most two cycles. 2134 // to be released to the OS after at most two cycles.
2128 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1"); 2135 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
2129 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages()); 2136 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2130 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2"); 2137 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
2131 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2); 2138 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
2132 2139
2133 // Triggering a last-resort GC should cause all pages to be released to the 2140 // Triggering a last-resort GC should cause all pages to be released to the
2134 // OS so that other processes can seize the memory. If we get a failure here 2141 // OS so that other processes can seize the memory. If we get a failure here
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after
2561 // Simulate incremental marking so that the functions are enqueued as 2568 // Simulate incremental marking so that the functions are enqueued as
2562 // code flushing candidates. Then optimize one function. Finally 2569 // code flushing candidates. Then optimize one function. Finally
2563 // finish the GC to complete code flushing. 2570 // finish the GC to complete code flushing.
2564 SimulateIncrementalMarking(); 2571 SimulateIncrementalMarking();
2565 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); 2572 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
2566 HEAP->CollectAllGarbage(Heap::kNoGCFlags); 2573 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2567 2574
2568 // Unoptimized code is missing and the deoptimizer will go ballistic. 2575 // Unoptimized code is missing and the deoptimizer will go ballistic.
2569 CompileRun("g('bozo');"); 2576 CompileRun("g('bozo');");
2570 } 2577 }
OLDNEW
« no previous file with comments | « test/cctest/test-alloc.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698