OLD | NEW |
1 // Copyright 2008 the V8 project authors. All rights reserved. | 1 // Copyright 2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
523 v8::Local<v8::Value> result = CompileRun(code); | 523 v8::Local<v8::Value> result = CompileRun(code); |
524 CHECK(result->IsUndefined()); | 524 CHECK(result->IsUndefined()); |
525 } | 525 } |
526 | 526 |
527 | 527 |
528 static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) { | 528 static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) { |
529 space->LowerInlineAllocationLimit(0); | 529 space->LowerInlineAllocationLimit(0); |
530 } | 530 } |
531 | 531 |
532 | 532 |
| 533 static int LenFromSize(int size) { |
| 534 return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; |
| 535 } |
| 536 |
| 537 |
| 538 static inline void CreatePadding(i::Heap* heap, int padding_size, |
| 539 i::PretenureFlag tenure) { |
| 540 const int max_number_of_objects = 20; |
| 541 v8::internal::Handle<v8::internal::FixedArray> |
| 542 big_objects[max_number_of_objects]; |
| 543 i::Isolate* isolate = heap->isolate(); |
| 544 int allocate_memory; |
| 545 int length; |
| 546 int free_memory = padding_size; |
| 547 if (tenure == i::TENURED) { |
| 548 int current_free_memory = |
| 549 static_cast<int>(*heap->old_space()->allocation_limit_address() - |
| 550 *heap->old_space()->allocation_top_address()); |
| 551 CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
| 552 } else { |
| 553 DisableInlineAllocationSteps(heap->new_space()); |
| 554 int current_free_memory = |
| 555 static_cast<int>(*heap->new_space()->allocation_limit_address() - |
| 556 *heap->new_space()->allocation_top_address()); |
| 557 CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
| 558 } |
| 559 for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { |
| 560 if (free_memory > i::Page::kMaxRegularHeapObjectSize) { |
| 561 allocate_memory = i::Page::kMaxRegularHeapObjectSize; |
| 562 length = LenFromSize(allocate_memory); |
| 563 } else { |
| 564 allocate_memory = free_memory; |
| 565 length = LenFromSize(allocate_memory); |
| 566 if (length <= 0) { |
| 567 // Not enough room to create another fixed array. Let's create a filler. |
| 568 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), |
| 569 free_memory); |
| 570 break; |
| 571 } |
| 572 } |
| 573 big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); |
| 574 CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || |
| 575 (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); |
| 576 free_memory -= allocate_memory; |
| 577 } |
| 578 } |
| 579 |
| 580 |
533 // Helper function that simulates a full new-space in the heap. | 581 // Helper function that simulates a full new-space in the heap. |
534 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { | 582 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { |
535 DisableInlineAllocationSteps(space); | 583 DisableInlineAllocationSteps(space); |
536 v8::internal::AllocationResult allocation = space->AllocateRawUnaligned( | 584 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
537 v8::internal::Page::kMaxRegularHeapObjectSize); | 585 *space->allocation_top_address()); |
538 if (allocation.IsRetry()) return false; | 586 if (space_remaining == 0) return false; |
539 v8::internal::HeapObject* free_space = NULL; | 587 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); |
540 CHECK(allocation.To(&free_space)); | |
541 space->heap()->CreateFillerObjectAt( | |
542 free_space->address(), v8::internal::Page::kMaxRegularHeapObjectSize); | |
543 return true; | 588 return true; |
544 } | 589 } |
545 | 590 |
546 | 591 |
547 // Helper function that simulates a fill new-space in the heap. | 592 // Helper function that simulates a fill new-space in the heap. |
548 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, | 593 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, |
549 int extra_bytes) { | 594 int extra_bytes) { |
550 DisableInlineAllocationSteps(space); | 595 DisableInlineAllocationSteps(space); |
551 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | 596 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
552 *space->allocation_top_address()); | 597 *space->allocation_top_address()); |
553 CHECK(space_remaining >= extra_bytes); | 598 CHECK(space_remaining >= extra_bytes); |
554 int new_linear_size = space_remaining - extra_bytes; | 599 int new_linear_size = space_remaining - extra_bytes; |
555 if (new_linear_size == 0) return; | 600 if (new_linear_size == 0) return; |
556 v8::internal::AllocationResult allocation = | 601 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); |
557 space->AllocateRawUnaligned(new_linear_size); | |
558 v8::internal::HeapObject* free_space = NULL; | |
559 CHECK(allocation.To(&free_space)); | |
560 space->heap()->CreateFillerObjectAt(free_space->address(), new_linear_size); | |
561 } | 602 } |
562 | 603 |
563 | 604 |
564 static inline void FillCurrentPage(v8::internal::NewSpace* space) { | 605 static inline void FillCurrentPage(v8::internal::NewSpace* space) { |
565 AllocateAllButNBytes(space, 0); | 606 AllocateAllButNBytes(space, 0); |
566 } | 607 } |
567 | 608 |
568 | 609 |
569 static inline void SimulateFullSpace(v8::internal::NewSpace* space) { | 610 static inline void SimulateFullSpace(v8::internal::NewSpace* space) { |
570 FillCurrentPage(space); | 611 FillCurrentPage(space); |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
646 HandleAndZoneScope() {} | 687 HandleAndZoneScope() {} |
647 | 688 |
648 // Prefixing the below with main_ reduces a lot of naming clashes. | 689 // Prefixing the below with main_ reduces a lot of naming clashes. |
649 i::Zone* main_zone() { return &main_zone_; } | 690 i::Zone* main_zone() { return &main_zone_; } |
650 | 691 |
651 private: | 692 private: |
652 i::Zone main_zone_; | 693 i::Zone main_zone_; |
653 }; | 694 }; |
654 | 695 |
655 #endif // ifndef CCTEST_H_ | 696 #endif // ifndef CCTEST_H_ |
OLD | NEW |