Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(723)

Side by Side Diff: test/cctest/test-serialize.cc

Issue 6639024: Get rid of distinction between below- and above-watermark in page allocation.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2007-2010 the V8 project authors. All rights reserved. 1 // Copyright 2007-2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after
356 356
357 v8::Persistent<v8::Context> env = v8::Context::New(); 357 v8::Persistent<v8::Context> env = v8::Context::New();
358 ASSERT(!env.IsEmpty()); 358 ASSERT(!env.IsEmpty());
359 env->Enter(); 359 env->Enter();
360 // Make sure all builtin scripts are cached. 360 // Make sure all builtin scripts are cached.
361 { HandleScope scope; 361 { HandleScope scope;
362 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 362 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
363 Bootstrapper::NativesSourceLookup(i); 363 Bootstrapper::NativesSourceLookup(i);
364 } 364 }
365 } 365 }
366 Heap::CollectAllGarbage(true); 366 Heap::CollectAllGarbage(Heap::kForceCompactionMask);
367 Heap::CollectAllGarbage(true); 367 Heap::CollectAllGarbage(Heap::kForceCompactionMask);
368 368
369 Object* raw_foo; 369 Object* raw_foo;
370 { 370 {
371 v8::HandleScope handle_scope; 371 v8::HandleScope handle_scope;
372 v8::Local<v8::String> foo = v8::String::New("foo"); 372 v8::Local<v8::String> foo = v8::String::New("foo");
373 ASSERT(!foo.IsEmpty()); 373 ASSERT(!foo.IsEmpty());
374 raw_foo = *(v8::Utils::OpenHandle(*foo)); 374 raw_foo = *(v8::Utils::OpenHandle(*foo));
375 } 375 }
376 376
377 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; 377 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
479 ASSERT(!env.IsEmpty()); 479 ASSERT(!env.IsEmpty());
480 env->Enter(); 480 env->Enter();
481 // Make sure all builtin scripts are cached. 481 // Make sure all builtin scripts are cached.
482 { HandleScope scope; 482 { HandleScope scope;
483 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 483 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
484 Bootstrapper::NativesSourceLookup(i); 484 Bootstrapper::NativesSourceLookup(i);
485 } 485 }
486 } 486 }
487 // If we don't do this then we end up with a stray root pointing at the 487 // If we don't do this then we end up with a stray root pointing at the
488 // context even after we have disposed of env. 488 // context even after we have disposed of env.
489 Heap::CollectAllGarbage(true); 489 Heap::CollectAllGarbage(Heap::kForceCompactionMask);
490 490
491 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; 491 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
492 Vector<char> startup_name = Vector<char>::New(file_name_length + 1); 492 Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
493 OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file); 493 OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
494 494
495 env->Exit(); 495 env->Exit();
496 496
497 Object* raw_context = *(v8::Utils::OpenHandle(*env)); 497 Object* raw_context = *(v8::Utils::OpenHandle(*env));
498 498
499 env.Dispose(); 499 env.Dispose();
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
550 CHECK(root2->IsContext()); 550 CHECK(root2->IsContext());
551 CHECK(*root_handle != root2); 551 CHECK(*root_handle != root2);
552 } 552 }
553 } 553 }
554 } 554 }
555 555
556 556
557 TEST(LinearAllocation) { 557 TEST(LinearAllocation) {
558 v8::V8::Initialize(); 558 v8::V8::Initialize();
559 int new_space_max = 512 * KB; 559 int new_space_max = 512 * KB;
560 int paged_space_max = Page::kMaxHeapObjectSize;
560 561
561 for (int size = 1000; size < 5 * MB; size += size >> 1) { 562 for (int size = 1000; size < 5 * MB; size += size >> 1) {
563 size &= ~8; // Round.
562 int new_space_size = (size < new_space_max) ? size : new_space_max; 564 int new_space_size = (size < new_space_max) ? size : new_space_max;
565 int paged_space_size = (size < paged_space_max) ? size : paged_space_max;
563 Heap::ReserveSpace( 566 Heap::ReserveSpace(
564 new_space_size, 567 new_space_size,
565 size, // Old pointer space. 568 paged_space_size, // Old pointer space.
566 size, // Old data space. 569 paged_space_size, // Old data space.
567 size, // Code space. 570 paged_space_size, // Code space.
568 size, // Map space. 571 paged_space_size, // Map space.
569 size, // Cell space. 572 paged_space_size, // Cell space.
570 size); // Large object space. 573 size); // Large object space.
571 LinearAllocationScope linear_allocation_scope; 574 LinearAllocationScope linear_allocation_scope;
572 const int kSmallFixedArrayLength = 4; 575 const int kSmallFixedArrayLength = 4;
573 const int kSmallFixedArraySize = 576 const int kSmallFixedArraySize =
574 FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize; 577 FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize;
575 const int kSmallStringLength = 16; 578 const int kSmallStringLength = 16;
576 const int kSmallStringSize = 579 const int kSmallStringSize =
577 (SeqAsciiString::kHeaderSize + kSmallStringLength + 580 (SeqAsciiString::kHeaderSize + kSmallStringLength +
578 kObjectAlignmentMask) & ~kObjectAlignmentMask; 581 kObjectAlignmentMask) & ~kObjectAlignmentMask;
579 const int kMapSize = Map::kSize; 582 const int kMapSize = Map::kSize;
580 583
581 Object* new_last = NULL; 584 Object* new_last = NULL;
582 for (int i = 0; 585 for (int i = 0;
583 i + kSmallFixedArraySize <= new_space_size; 586 i + kSmallFixedArraySize <= new_space_size;
584 i += kSmallFixedArraySize) { 587 i += kSmallFixedArraySize) {
585 Object* obj = 588 Object* obj =
586 Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked(); 589 Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked();
587 if (new_last != NULL) { 590 if (new_last != NULL) {
588 CHECK(reinterpret_cast<char*>(obj) == 591 CHECK(reinterpret_cast<char*>(obj) ==
589 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize); 592 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize);
590 } 593 }
591 new_last = obj; 594 new_last = obj;
592 } 595 }
593 596
594 Object* pointer_last = NULL; 597 Object* pointer_last = NULL;
595 for (int i = 0; 598 for (int i = 0;
596 i + kSmallFixedArraySize <= size; 599 i + kSmallFixedArraySize <= paged_space_size;
597 i += kSmallFixedArraySize) { 600 i += kSmallFixedArraySize) {
598 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength, 601 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength,
599 TENURED)->ToObjectChecked(); 602 TENURED)->ToObjectChecked();
600 int old_page_fullness = i % Page::kPageSize; 603 int old_page_fullness = i % Page::kPageSize;
601 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize; 604 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize;
602 if (page_fullness < old_page_fullness || 605 if (page_fullness < old_page_fullness ||
603 page_fullness > Page::kObjectAreaSize) { 606 page_fullness > Page::kObjectAreaSize) {
604 i = RoundUp(i, Page::kPageSize); 607 i = RoundUp(i, Page::kPageSize);
605 pointer_last = NULL; 608 pointer_last = NULL;
606 } 609 }
607 if (pointer_last != NULL) { 610 if (pointer_last != NULL) {
608 CHECK(reinterpret_cast<char*>(obj) == 611 CHECK(reinterpret_cast<char*>(obj) ==
609 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize); 612 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize);
610 } 613 }
611 pointer_last = obj; 614 pointer_last = obj;
612 } 615 }
613 616
614 Object* data_last = NULL; 617 Object* data_last = NULL;
615 for (int i = 0; i + kSmallStringSize <= size; i += kSmallStringSize) { 618 for (int i = 0;
619 i + kSmallStringSize <= paged_space_size;
620 i += kSmallStringSize) {
616 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength, 621 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength,
617 TENURED)->ToObjectChecked(); 622 TENURED)->ToObjectChecked();
618 int old_page_fullness = i % Page::kPageSize; 623 int old_page_fullness = i % Page::kPageSize;
619 int page_fullness = (i + kSmallStringSize) % Page::kPageSize; 624 int page_fullness = (i + kSmallStringSize) % Page::kPageSize;
620 if (page_fullness < old_page_fullness || 625 if (page_fullness < old_page_fullness ||
621 page_fullness > Page::kObjectAreaSize) { 626 page_fullness > Page::kObjectAreaSize) {
622 i = RoundUp(i, Page::kPageSize); 627 i = RoundUp(i, Page::kPageSize);
623 data_last = NULL; 628 data_last = NULL;
624 } 629 }
625 if (data_last != NULL) { 630 if (data_last != NULL) {
626 CHECK(reinterpret_cast<char*>(obj) == 631 CHECK(reinterpret_cast<char*>(obj) ==
627 reinterpret_cast<char*>(data_last) + kSmallStringSize); 632 reinterpret_cast<char*>(data_last) + kSmallStringSize);
628 } 633 }
629 data_last = obj; 634 data_last = obj;
630 } 635 }
631 636
632 Object* map_last = NULL; 637 Object* map_last = NULL;
633 for (int i = 0; i + kMapSize <= size; i += kMapSize) { 638 for (int i = 0; i + kMapSize <= paged_space_size; i += kMapSize) {
634 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE, 639 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE,
635 42 * kPointerSize)->ToObjectChecked(); 640 42 * kPointerSize)->ToObjectChecked();
636 int old_page_fullness = i % Page::kPageSize; 641 int old_page_fullness = i % Page::kPageSize;
637 int page_fullness = (i + kMapSize) % Page::kPageSize; 642 int page_fullness = (i + kMapSize) % Page::kPageSize;
638 if (page_fullness < old_page_fullness || 643 if (page_fullness < old_page_fullness ||
639 page_fullness > Page::kObjectAreaSize) { 644 page_fullness > Page::kObjectAreaSize) {
640 i = RoundUp(i, Page::kPageSize); 645 i = RoundUp(i, Page::kPageSize);
641 map_last = NULL; 646 map_last = NULL;
642 } 647 }
643 if (map_last != NULL) { 648 if (map_last != NULL) {
(...skipping 24 matching lines...) Expand all
668 TEST(TestThatAlwaysFails) { 673 TEST(TestThatAlwaysFails) {
669 bool ArtificialFailure = false; 674 bool ArtificialFailure = false;
670 CHECK(ArtificialFailure); 675 CHECK(ArtificialFailure);
671 } 676 }
672 677
673 678
674 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) { 679 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) {
675 bool ArtificialFailure2 = false; 680 bool ArtificialFailure2 = false;
676 CHECK(ArtificialFailure2); 681 CHECK(ArtificialFailure2);
677 } 682 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698