OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
427 | 427 |
428 CHECK(!lo->IsEmpty()); | 428 CHECK(!lo->IsEmpty()); |
429 | 429 |
430 CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE).IsRetry()); | 430 CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE).IsRetry()); |
431 } | 431 } |
432 | 432 |
433 | 433 |
434 TEST(SizeOfFirstPageIsLargeEnough) { | 434 TEST(SizeOfFirstPageIsLargeEnough) { |
435 if (i::FLAG_always_opt) return; | 435 if (i::FLAG_always_opt) return; |
436 // Bootstrapping without a snapshot causes more allocations. | 436 // Bootstrapping without a snapshot causes more allocations. |
437 if (!i::Snapshot::HaveASnapshotToStartFrom()) return; | 437 if (!i::Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return; |
438 CcTest::InitializeVM(); | 438 CcTest::InitializeVM(); |
439 Isolate* isolate = CcTest::i_isolate(); | 439 Isolate* isolate = CcTest::i_isolate(); |
440 | 440 |
441 // Freshly initialized VM gets by with one page per space. | 441 // Freshly initialized VM gets by with one page per space. |
442 for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { | 442 for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { |
443 // Debug code can be very large, so skip CODE_SPACE if we are generating it. | 443 // Debug code can be very large, so skip CODE_SPACE if we are generating it. |
444 if (i == CODE_SPACE && i::FLAG_debug_code) continue; | 444 if (i == CODE_SPACE && i::FLAG_debug_code) continue; |
445 CHECK_EQ(1, isolate->heap()->paged_space(i)->CountTotalPages()); | 445 CHECK_EQ(1, isolate->heap()->paged_space(i)->CountTotalPages()); |
446 } | 446 } |
447 | 447 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
490 | 490 |
491 // Turn the allocation into a proper object so isolate teardown won't | 491 // Turn the allocation into a proper object so isolate teardown won't |
492 // crash. | 492 // crash. |
493 HeapObject* free_space = NULL; | 493 HeapObject* free_space = NULL; |
494 CHECK(allocation.To(&free_space)); | 494 CHECK(allocation.To(&free_space)); |
495 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); | 495 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); |
496 } | 496 } |
497 } | 497 } |
498 isolate->Dispose(); | 498 isolate->Dispose(); |
499 } | 499 } |
OLD | NEW |