| OLD | NEW |
| 1 // Copyright 2007-2010 the V8 project authors. All rights reserved. | 1 // Copyright 2007-2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 558 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 569 const int kSmallStringLength = 16; | 569 const int kSmallStringLength = 16; |
| 570 const int kSmallStringSize = | 570 const int kSmallStringSize = |
| 571 (SeqAsciiString::kHeaderSize + kSmallStringLength + | 571 (SeqAsciiString::kHeaderSize + kSmallStringLength + |
| 572 kObjectAlignmentMask) & ~kObjectAlignmentMask; | 572 kObjectAlignmentMask) & ~kObjectAlignmentMask; |
| 573 const int kMapSize = Map::kSize; | 573 const int kMapSize = Map::kSize; |
| 574 | 574 |
| 575 Object* new_last = NULL; | 575 Object* new_last = NULL; |
| 576 for (int i = 0; | 576 for (int i = 0; |
| 577 i + kSmallFixedArraySize <= new_space_size; | 577 i + kSmallFixedArraySize <= new_space_size; |
| 578 i += kSmallFixedArraySize) { | 578 i += kSmallFixedArraySize) { |
| 579 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength); | 579 Object* obj = |
| 580 Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked(); |
| 580 if (new_last != NULL) { | 581 if (new_last != NULL) { |
| 581 CHECK(reinterpret_cast<char*>(obj) == | 582 CHECK(reinterpret_cast<char*>(obj) == |
| 582 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize); | 583 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize); |
| 583 } | 584 } |
| 584 new_last = obj; | 585 new_last = obj; |
| 585 } | 586 } |
| 586 | 587 |
| 587 Object* pointer_last = NULL; | 588 Object* pointer_last = NULL; |
| 588 for (int i = 0; | 589 for (int i = 0; |
| 589 i + kSmallFixedArraySize <= size; | 590 i + kSmallFixedArraySize <= size; |
| 590 i += kSmallFixedArraySize) { | 591 i += kSmallFixedArraySize) { |
| 591 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength, TENURED); | 592 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength, |
| 593 TENURED)->ToObjectChecked(); |
| 592 int old_page_fullness = i % Page::kPageSize; | 594 int old_page_fullness = i % Page::kPageSize; |
| 593 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize; | 595 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize; |
| 594 if (page_fullness < old_page_fullness || | 596 if (page_fullness < old_page_fullness || |
| 595 page_fullness > Page::kObjectAreaSize) { | 597 page_fullness > Page::kObjectAreaSize) { |
| 596 i = RoundUp(i, Page::kPageSize); | 598 i = RoundUp(i, Page::kPageSize); |
| 597 pointer_last = NULL; | 599 pointer_last = NULL; |
| 598 } | 600 } |
| 599 if (pointer_last != NULL) { | 601 if (pointer_last != NULL) { |
| 600 CHECK(reinterpret_cast<char*>(obj) == | 602 CHECK(reinterpret_cast<char*>(obj) == |
| 601 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize); | 603 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize); |
| 602 } | 604 } |
| 603 pointer_last = obj; | 605 pointer_last = obj; |
| 604 } | 606 } |
| 605 | 607 |
| 606 Object* data_last = NULL; | 608 Object* data_last = NULL; |
| 607 for (int i = 0; i + kSmallStringSize <= size; i += kSmallStringSize) { | 609 for (int i = 0; i + kSmallStringSize <= size; i += kSmallStringSize) { |
| 608 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength, TENURED); | 610 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength, |
| 611 TENURED)->ToObjectChecked(); |
| 609 int old_page_fullness = i % Page::kPageSize; | 612 int old_page_fullness = i % Page::kPageSize; |
| 610 int page_fullness = (i + kSmallStringSize) % Page::kPageSize; | 613 int page_fullness = (i + kSmallStringSize) % Page::kPageSize; |
| 611 if (page_fullness < old_page_fullness || | 614 if (page_fullness < old_page_fullness || |
| 612 page_fullness > Page::kObjectAreaSize) { | 615 page_fullness > Page::kObjectAreaSize) { |
| 613 i = RoundUp(i, Page::kPageSize); | 616 i = RoundUp(i, Page::kPageSize); |
| 614 data_last = NULL; | 617 data_last = NULL; |
| 615 } | 618 } |
| 616 if (data_last != NULL) { | 619 if (data_last != NULL) { |
| 617 CHECK(reinterpret_cast<char*>(obj) == | 620 CHECK(reinterpret_cast<char*>(obj) == |
| 618 reinterpret_cast<char*>(data_last) + kSmallStringSize); | 621 reinterpret_cast<char*>(data_last) + kSmallStringSize); |
| 619 } | 622 } |
| 620 data_last = obj; | 623 data_last = obj; |
| 621 } | 624 } |
| 622 | 625 |
| 623 Object* map_last = NULL; | 626 Object* map_last = NULL; |
| 624 for (int i = 0; i + kMapSize <= size; i += kMapSize) { | 627 for (int i = 0; i + kMapSize <= size; i += kMapSize) { |
| 625 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE, 42 * kPointerSize); | 628 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE, |
| 629 42 * kPointerSize)->ToObjectChecked(); |
| 626 int old_page_fullness = i % Page::kPageSize; | 630 int old_page_fullness = i % Page::kPageSize; |
| 627 int page_fullness = (i + kMapSize) % Page::kPageSize; | 631 int page_fullness = (i + kMapSize) % Page::kPageSize; |
| 628 if (page_fullness < old_page_fullness || | 632 if (page_fullness < old_page_fullness || |
| 629 page_fullness > Page::kObjectAreaSize) { | 633 page_fullness > Page::kObjectAreaSize) { |
| 630 i = RoundUp(i, Page::kPageSize); | 634 i = RoundUp(i, Page::kPageSize); |
| 631 map_last = NULL; | 635 map_last = NULL; |
| 632 } | 636 } |
| 633 if (map_last != NULL) { | 637 if (map_last != NULL) { |
| 634 CHECK(reinterpret_cast<char*>(obj) == | 638 CHECK(reinterpret_cast<char*>(obj) == |
| 635 reinterpret_cast<char*>(map_last) + kMapSize); | 639 reinterpret_cast<char*>(map_last) + kMapSize); |
| 636 } | 640 } |
| 637 map_last = obj; | 641 map_last = obj; |
| 638 } | 642 } |
| 639 | 643 |
| 640 if (size > Page::kObjectAreaSize) { | 644 if (size > Page::kObjectAreaSize) { |
| 641 // Support for reserving space in large object space is not there yet, | 645 // Support for reserving space in large object space is not there yet, |
| 642 // but using an always-allocate scope is fine for now. | 646 // but using an always-allocate scope is fine for now. |
| 643 AlwaysAllocateScope always; | 647 AlwaysAllocateScope always; |
| 644 int large_object_array_length = | 648 int large_object_array_length = |
| 645 (size - FixedArray::kHeaderSize) / kPointerSize; | 649 (size - FixedArray::kHeaderSize) / kPointerSize; |
| 646 Object* obj = Heap::AllocateFixedArray(large_object_array_length, | 650 Object* obj = Heap::AllocateFixedArray(large_object_array_length, |
| 647 TENURED); | 651 TENURED)->ToObjectChecked(); |
| 648 CHECK(!obj->IsFailure()); | 652 CHECK(!obj->IsFailure()); |
| 649 } | 653 } |
| 650 } | 654 } |
| 651 } | 655 } |
| 652 | 656 |
| 653 | 657 |
| 654 TEST(TestThatAlwaysSucceeds) { | 658 TEST(TestThatAlwaysSucceeds) { |
| 655 } | 659 } |
| 656 | 660 |
| 657 | 661 |
| 658 TEST(TestThatAlwaysFails) { | 662 TEST(TestThatAlwaysFails) { |
| 659 bool ArtificialFailure = false; | 663 bool ArtificialFailure = false; |
| 660 CHECK(ArtificialFailure); | 664 CHECK(ArtificialFailure); |
| 661 } | 665 } |
| 662 | 666 |
| 663 | 667 |
| 664 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) { | 668 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) { |
| 665 bool ArtificialFailure2 = false; | 669 bool ArtificialFailure2 = false; |
| 666 CHECK(ArtificialFailure2); | 670 CHECK(ArtificialFailure2); |
| 667 } | 671 } |
| OLD | NEW |