OLD | NEW |
1 // Copyright 2008 the V8 project authors. All rights reserved. | 1 // Copyright 2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
523 v8::Local<v8::Value> result = CompileRun(code); | 523 v8::Local<v8::Value> result = CompileRun(code); |
524 CHECK(result->IsUndefined()); | 524 CHECK(result->IsUndefined()); |
525 } | 525 } |
526 | 526 |
527 | 527 |
528 static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) { | 528 static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) { |
529 space->LowerInlineAllocationLimit(0); | 529 space->LowerInlineAllocationLimit(0); |
530 } | 530 } |
531 | 531 |
532 | 532 |
533 static int LenFromSize(int size) { | |
534 return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; | |
535 } | |
536 | |
537 | |
538 static inline void CreatePadding(i::Heap* heap, int padding_size, | |
539 i::PretenureFlag tenure) { | |
540 const int max_number_of_objects = 20; | |
541 v8::internal::Handle<v8::internal::FixedArray> | |
542 big_objects[max_number_of_objects]; | |
543 i::Isolate* isolate = heap->isolate(); | |
544 int allocate_memory; | |
545 int length; | |
546 int free_memory = padding_size; | |
547 if (tenure == i::TENURED) { | |
548 int current_free_memory = | |
549 static_cast<int>(*heap->old_space()->allocation_limit_address() - | |
550 *heap->old_space()->allocation_top_address()); | |
551 CHECK(padding_size <= current_free_memory || current_free_memory == 0); | |
552 } else { | |
553 DisableInlineAllocationSteps(heap->new_space()); | |
554 int current_free_memory = | |
555 static_cast<int>(*heap->new_space()->allocation_limit_address() - | |
556 *heap->new_space()->allocation_top_address()); | |
557 CHECK(padding_size <= current_free_memory || current_free_memory == 0); | |
558 } | |
559 for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { | |
560 if (free_memory > i::Page::kMaxRegularHeapObjectSize) { | |
561 allocate_memory = i::Page::kMaxRegularHeapObjectSize; | |
562 length = LenFromSize(allocate_memory); | |
563 } else { | |
564 allocate_memory = free_memory; | |
565 length = LenFromSize(allocate_memory); | |
566 if (length <= 0) { | |
567 // Not enough room to create another fixed array. Let's create a filler. | |
568 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), | |
569 free_memory); | |
570 break; | |
571 } | |
572 } | |
573 big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); | |
574 CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || | |
575 (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); | |
576 free_memory -= allocate_memory; | |
577 } | |
578 } | |
579 | |
580 | |
581 // Helper function that simulates a full new-space in the heap. | 533 // Helper function that simulates a full new-space in the heap. |
582 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { | 534 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { |
583 DisableInlineAllocationSteps(space); | 535 DisableInlineAllocationSteps(space); |
584 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | 536 v8::internal::AllocationResult allocation = space->AllocateRawUnaligned( |
585 *space->allocation_top_address()); | 537 v8::internal::Page::kMaxRegularHeapObjectSize); |
586 if (space_remaining == 0) return false; | 538 if (allocation.IsRetry()) return false; |
587 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); | 539 v8::internal::HeapObject* free_space = NULL; |
| 540 CHECK(allocation.To(&free_space)); |
| 541 space->heap()->CreateFillerObjectAt( |
| 542 free_space->address(), v8::internal::Page::kMaxRegularHeapObjectSize); |
588 return true; | 543 return true; |
589 } | 544 } |
590 | 545 |
591 | 546 |
592 // Helper function that simulates a fill new-space in the heap. | 547 // Helper function that simulates a fill new-space in the heap. |
593 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, | 548 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, |
594 int extra_bytes) { | 549 int extra_bytes) { |
595 DisableInlineAllocationSteps(space); | 550 DisableInlineAllocationSteps(space); |
596 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | 551 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
597 *space->allocation_top_address()); | 552 *space->allocation_top_address()); |
598 CHECK(space_remaining >= extra_bytes); | 553 CHECK(space_remaining >= extra_bytes); |
599 int new_linear_size = space_remaining - extra_bytes; | 554 int new_linear_size = space_remaining - extra_bytes; |
600 if (new_linear_size == 0) return; | 555 if (new_linear_size == 0) return; |
601 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); | 556 v8::internal::AllocationResult allocation = |
| 557 space->AllocateRawUnaligned(new_linear_size); |
| 558 v8::internal::HeapObject* free_space = NULL; |
| 559 CHECK(allocation.To(&free_space)); |
| 560 space->heap()->CreateFillerObjectAt(free_space->address(), new_linear_size); |
602 } | 561 } |
603 | 562 |
604 | 563 |
605 static inline void FillCurrentPage(v8::internal::NewSpace* space) { | 564 static inline void FillCurrentPage(v8::internal::NewSpace* space) { |
606 AllocateAllButNBytes(space, 0); | 565 AllocateAllButNBytes(space, 0); |
607 } | 566 } |
608 | 567 |
609 | 568 |
610 static inline void SimulateFullSpace(v8::internal::NewSpace* space) { | 569 static inline void SimulateFullSpace(v8::internal::NewSpace* space) { |
611 FillCurrentPage(space); | 570 FillCurrentPage(space); |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
687 HandleAndZoneScope() {} | 646 HandleAndZoneScope() {} |
688 | 647 |
689 // Prefixing the below with main_ reduces a lot of naming clashes. | 648 // Prefixing the below with main_ reduces a lot of naming clashes. |
690 i::Zone* main_zone() { return &main_zone_; } | 649 i::Zone* main_zone() { return &main_zone_; } |
691 | 650 |
692 private: | 651 private: |
693 i::Zone main_zone_; | 652 i::Zone main_zone_; |
694 }; | 653 }; |
695 | 654 |
696 #endif // ifndef CCTEST_H_ | 655 #endif // ifndef CCTEST_H_ |
OLD | NEW |