| OLD | NEW |
| 1 // Copyright 2007-2010 the V8 project authors. All rights reserved. | 1 // Copyright 2007-2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 357 | 357 |
| 358 v8::Persistent<v8::Context> env = v8::Context::New(); | 358 v8::Persistent<v8::Context> env = v8::Context::New(); |
| 359 ASSERT(!env.IsEmpty()); | 359 ASSERT(!env.IsEmpty()); |
| 360 env->Enter(); | 360 env->Enter(); |
| 361 // Make sure all builtin scripts are cached. | 361 // Make sure all builtin scripts are cached. |
| 362 { HandleScope scope; | 362 { HandleScope scope; |
| 363 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { | 363 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { |
| 364 Bootstrapper::NativesSourceLookup(i); | 364 Bootstrapper::NativesSourceLookup(i); |
| 365 } | 365 } |
| 366 } | 366 } |
| 367 Heap::CollectAllGarbage(true); | 367 Heap::CollectAllGarbage(Heap::kForceCompactionMask); |
| 368 Heap::CollectAllGarbage(true); | 368 Heap::CollectAllGarbage(Heap::kForceCompactionMask); |
| 369 | 369 |
| 370 Object* raw_foo; | 370 Object* raw_foo; |
| 371 { | 371 { |
| 372 v8::HandleScope handle_scope; | 372 v8::HandleScope handle_scope; |
| 373 v8::Local<v8::String> foo = v8::String::New("foo"); | 373 v8::Local<v8::String> foo = v8::String::New("foo"); |
| 374 ASSERT(!foo.IsEmpty()); | 374 ASSERT(!foo.IsEmpty()); |
| 375 raw_foo = *(v8::Utils::OpenHandle(*foo)); | 375 raw_foo = *(v8::Utils::OpenHandle(*foo)); |
| 376 } | 376 } |
| 377 | 377 |
| 378 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; | 378 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 480 ASSERT(!env.IsEmpty()); | 480 ASSERT(!env.IsEmpty()); |
| 481 env->Enter(); | 481 env->Enter(); |
| 482 // Make sure all builtin scripts are cached. | 482 // Make sure all builtin scripts are cached. |
| 483 { HandleScope scope; | 483 { HandleScope scope; |
| 484 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { | 484 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { |
| 485 Bootstrapper::NativesSourceLookup(i); | 485 Bootstrapper::NativesSourceLookup(i); |
| 486 } | 486 } |
| 487 } | 487 } |
| 488 // If we don't do this then we end up with a stray root pointing at the | 488 // If we don't do this then we end up with a stray root pointing at the |
| 489 // context even after we have disposed of env. | 489 // context even after we have disposed of env. |
| 490 Heap::CollectAllGarbage(true); | 490 Heap::CollectAllGarbage(Heap::kForceCompactionMask); |
| 491 | 491 |
| 492 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; | 492 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; |
| 493 Vector<char> startup_name = Vector<char>::New(file_name_length + 1); | 493 Vector<char> startup_name = Vector<char>::New(file_name_length + 1); |
| 494 OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file); | 494 OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file); |
| 495 | 495 |
| 496 env->Exit(); | 496 env->Exit(); |
| 497 | 497 |
| 498 Object* raw_context = *(v8::Utils::OpenHandle(*env)); | 498 Object* raw_context = *(v8::Utils::OpenHandle(*env)); |
| 499 | 499 |
| 500 env.Dispose(); | 500 env.Dispose(); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 551 CHECK(root2->IsContext()); | 551 CHECK(root2->IsContext()); |
| 552 CHECK(*root_handle != root2); | 552 CHECK(*root_handle != root2); |
| 553 } | 553 } |
| 554 } | 554 } |
| 555 } | 555 } |
| 556 | 556 |
| 557 | 557 |
| 558 TEST(LinearAllocation) { | 558 TEST(LinearAllocation) { |
| 559 v8::V8::Initialize(); | 559 v8::V8::Initialize(); |
| 560 int new_space_max = 512 * KB; | 560 int new_space_max = 512 * KB; |
| 561 int paged_space_max = Page::kMaxHeapObjectSize; |
| 561 | 562 |
| 562 for (int size = 1000; size < 5 * MB; size += size >> 1) { | 563 for (int size = 1000; size < 5 * MB; size += size >> 1) { |
| 564 size &= ~8; // Round. |
| 563 int new_space_size = (size < new_space_max) ? size : new_space_max; | 565 int new_space_size = (size < new_space_max) ? size : new_space_max; |
| 566 int paged_space_size = (size < paged_space_max) ? size : paged_space_max; |
| 564 Heap::ReserveSpace( | 567 Heap::ReserveSpace( |
| 565 new_space_size, | 568 new_space_size, |
| 566 size, // Old pointer space. | 569 paged_space_size, // Old pointer space. |
| 567 size, // Old data space. | 570 paged_space_size, // Old data space. |
| 568 size, // Code space. | 571 paged_space_size, // Code space. |
| 569 size, // Map space. | 572 paged_space_size, // Map space. |
| 570 size, // Cell space. | 573 paged_space_size, // Cell space. |
| 571 size); // Large object space. | 574 size); // Large object space. |
| 572 LinearAllocationScope linear_allocation_scope; | 575 LinearAllocationScope linear_allocation_scope; |
| 573 const int kSmallFixedArrayLength = 4; | 576 const int kSmallFixedArrayLength = 4; |
| 574 const int kSmallFixedArraySize = | 577 const int kSmallFixedArraySize = |
| 575 FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize; | 578 FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize; |
| 576 const int kSmallStringLength = 16; | 579 const int kSmallStringLength = 16; |
| 577 const int kSmallStringSize = | 580 const int kSmallStringSize = |
| 578 (SeqAsciiString::kHeaderSize + kSmallStringLength + | 581 (SeqAsciiString::kHeaderSize + kSmallStringLength + |
| 579 kObjectAlignmentMask) & ~kObjectAlignmentMask; | 582 kObjectAlignmentMask) & ~kObjectAlignmentMask; |
| 580 const int kMapSize = Map::kSize; | 583 const int kMapSize = Map::kSize; |
| 581 | 584 |
| 582 Object* new_last = NULL; | 585 Object* new_last = NULL; |
| 583 for (int i = 0; | 586 for (int i = 0; |
| 584 i + kSmallFixedArraySize <= new_space_size; | 587 i + kSmallFixedArraySize <= new_space_size; |
| 585 i += kSmallFixedArraySize) { | 588 i += kSmallFixedArraySize) { |
| 586 Object* obj = | 589 Object* obj = |
| 587 Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked(); | 590 Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked(); |
| 588 if (new_last != NULL) { | 591 if (new_last != NULL) { |
| 589 CHECK(reinterpret_cast<char*>(obj) == | 592 CHECK(reinterpret_cast<char*>(obj) == |
| 590 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize); | 593 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize); |
| 591 } | 594 } |
| 592 new_last = obj; | 595 new_last = obj; |
| 593 } | 596 } |
| 594 | 597 |
| 595 Object* pointer_last = NULL; | 598 Object* pointer_last = NULL; |
| 596 for (int i = 0; | 599 for (int i = 0; |
| 597 i + kSmallFixedArraySize <= size; | 600 i + kSmallFixedArraySize <= paged_space_size; |
| 598 i += kSmallFixedArraySize) { | 601 i += kSmallFixedArraySize) { |
| 599 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength, | 602 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength, |
| 600 TENURED)->ToObjectChecked(); | 603 TENURED)->ToObjectChecked(); |
| 601 int old_page_fullness = i % Page::kPageSize; | 604 int old_page_fullness = i % Page::kPageSize; |
| 602 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize; | 605 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize; |
| 603 if (page_fullness < old_page_fullness || | 606 if (page_fullness < old_page_fullness || |
| 604 page_fullness > Page::kObjectAreaSize) { | 607 page_fullness > Page::kObjectAreaSize) { |
| 605 i = RoundUp(i, Page::kPageSize); | 608 i = RoundUp(i, Page::kPageSize); |
| 606 pointer_last = NULL; | 609 pointer_last = NULL; |
| 607 } | 610 } |
| 608 if (pointer_last != NULL) { | 611 if (pointer_last != NULL) { |
| 609 CHECK(reinterpret_cast<char*>(obj) == | 612 CHECK(reinterpret_cast<char*>(obj) == |
| 610 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize); | 613 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize); |
| 611 } | 614 } |
| 612 pointer_last = obj; | 615 pointer_last = obj; |
| 613 } | 616 } |
| 614 | 617 |
| 615 Object* data_last = NULL; | 618 Object* data_last = NULL; |
| 616 for (int i = 0; i + kSmallStringSize <= size; i += kSmallStringSize) { | 619 for (int i = 0; |
| 620 i + kSmallStringSize <= paged_space_size; |
| 621 i += kSmallStringSize) { |
| 617 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength, | 622 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength, |
| 618 TENURED)->ToObjectChecked(); | 623 TENURED)->ToObjectChecked(); |
| 619 int old_page_fullness = i % Page::kPageSize; | 624 int old_page_fullness = i % Page::kPageSize; |
| 620 int page_fullness = (i + kSmallStringSize) % Page::kPageSize; | 625 int page_fullness = (i + kSmallStringSize) % Page::kPageSize; |
| 621 if (page_fullness < old_page_fullness || | 626 if (page_fullness < old_page_fullness || |
| 622 page_fullness > Page::kObjectAreaSize) { | 627 page_fullness > Page::kObjectAreaSize) { |
| 623 i = RoundUp(i, Page::kPageSize); | 628 i = RoundUp(i, Page::kPageSize); |
| 624 data_last = NULL; | 629 data_last = NULL; |
| 625 } | 630 } |
| 626 if (data_last != NULL) { | 631 if (data_last != NULL) { |
| 627 CHECK(reinterpret_cast<char*>(obj) == | 632 CHECK(reinterpret_cast<char*>(obj) == |
| 628 reinterpret_cast<char*>(data_last) + kSmallStringSize); | 633 reinterpret_cast<char*>(data_last) + kSmallStringSize); |
| 629 } | 634 } |
| 630 data_last = obj; | 635 data_last = obj; |
| 631 } | 636 } |
| 632 | 637 |
| 633 Object* map_last = NULL; | 638 Object* map_last = NULL; |
| 634 for (int i = 0; i + kMapSize <= size; i += kMapSize) { | 639 for (int i = 0; i + kMapSize <= paged_space_size; i += kMapSize) { |
| 635 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE, | 640 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE, |
| 636 42 * kPointerSize)->ToObjectChecked(); | 641 42 * kPointerSize)->ToObjectChecked(); |
| 637 int old_page_fullness = i % Page::kPageSize; | 642 int old_page_fullness = i % Page::kPageSize; |
| 638 int page_fullness = (i + kMapSize) % Page::kPageSize; | 643 int page_fullness = (i + kMapSize) % Page::kPageSize; |
| 639 if (page_fullness < old_page_fullness || | 644 if (page_fullness < old_page_fullness || |
| 640 page_fullness > Page::kObjectAreaSize) { | 645 page_fullness > Page::kObjectAreaSize) { |
| 641 i = RoundUp(i, Page::kPageSize); | 646 i = RoundUp(i, Page::kPageSize); |
| 642 map_last = NULL; | 647 map_last = NULL; |
| 643 } | 648 } |
| 644 if (map_last != NULL) { | 649 if (map_last != NULL) { |
| (...skipping 24 matching lines...) Expand all Loading... |
| 669 TEST(TestThatAlwaysFails) { | 674 TEST(TestThatAlwaysFails) { |
| 670 bool ArtificialFailure = false; | 675 bool ArtificialFailure = false; |
| 671 CHECK(ArtificialFailure); | 676 CHECK(ArtificialFailure); |
| 672 } | 677 } |
| 673 | 678 |
| 674 | 679 |
| 675 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) { | 680 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) { |
| 676 bool ArtificialFailure2 = false; | 681 bool ArtificialFailure2 = false; |
| 677 CHECK(ArtificialFailure2); | 682 CHECK(ArtificialFailure2); |
| 678 } | 683 } |
| OLD | NEW |