OLD | NEW |
1 // Copyright 2008 the V8 project authors. All rights reserved. | 1 // Copyright 2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 507 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
518 CHECK(result->SameValue(expected)); | 518 CHECK(result->SameValue(expected)); |
519 } | 519 } |
520 | 520 |
521 | 521 |
522 static inline void ExpectUndefined(const char* code) { | 522 static inline void ExpectUndefined(const char* code) { |
523 v8::Local<v8::Value> result = CompileRun(code); | 523 v8::Local<v8::Value> result = CompileRun(code); |
524 CHECK(result->IsUndefined()); | 524 CHECK(result->IsUndefined()); |
525 } | 525 } |
526 | 526 |
527 | 527 |
528 static inline void DisableInlineAllocationSteps(v8::internal::NewSpace* space) { | |
529 space->LowerInlineAllocationLimit(0); | |
530 } | |
531 | |
532 | |
533 static int LenFromSize(int size) { | 528 static int LenFromSize(int size) { |
534 return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; | 529 return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; |
535 } | 530 } |
536 | 531 |
537 | 532 |
538 static inline void CreatePadding(i::Heap* heap, int padding_size, | 533 static inline void CreatePadding(i::Heap* heap, int padding_size, |
539 i::PretenureFlag tenure) { | 534 i::PretenureFlag tenure) { |
540 const int max_number_of_objects = 20; | 535 const int max_number_of_objects = 20; |
541 v8::internal::Handle<v8::internal::FixedArray> | 536 v8::internal::Handle<v8::internal::FixedArray> |
542 big_objects[max_number_of_objects]; | 537 big_objects[max_number_of_objects]; |
543 i::Isolate* isolate = heap->isolate(); | 538 i::Isolate* isolate = heap->isolate(); |
544 int allocate_memory; | 539 int allocate_memory; |
545 int length; | 540 int length; |
546 int free_memory = padding_size; | 541 int free_memory = padding_size; |
547 if (tenure == i::TENURED) { | 542 if (tenure == i::TENURED) { |
548 int current_free_memory = | 543 int current_free_memory = |
549 static_cast<int>(*heap->old_space()->allocation_limit_address() - | 544 static_cast<int>(*heap->old_space()->allocation_limit_address() - |
550 *heap->old_space()->allocation_top_address()); | 545 *heap->old_space()->allocation_top_address()); |
551 CHECK(padding_size <= current_free_memory || current_free_memory == 0); | 546 CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
552 } else { | 547 } else { |
553 DisableInlineAllocationSteps(heap->new_space()); | 548 heap->new_space()->DisableInlineAllocationSteps(); |
554 int current_free_memory = | 549 int current_free_memory = |
555 static_cast<int>(*heap->new_space()->allocation_limit_address() - | 550 static_cast<int>(*heap->new_space()->allocation_limit_address() - |
556 *heap->new_space()->allocation_top_address()); | 551 *heap->new_space()->allocation_top_address()); |
557 CHECK(padding_size <= current_free_memory || current_free_memory == 0); | 552 CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
558 } | 553 } |
559 for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { | 554 for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { |
560 if (free_memory > i::Page::kMaxRegularHeapObjectSize) { | 555 if (free_memory > i::Page::kMaxRegularHeapObjectSize) { |
561 allocate_memory = i::Page::kMaxRegularHeapObjectSize; | 556 allocate_memory = i::Page::kMaxRegularHeapObjectSize; |
562 length = LenFromSize(allocate_memory); | 557 length = LenFromSize(allocate_memory); |
563 } else { | 558 } else { |
564 allocate_memory = free_memory; | 559 allocate_memory = free_memory; |
565 length = LenFromSize(allocate_memory); | 560 length = LenFromSize(allocate_memory); |
566 if (length <= 0) { | 561 if (length <= 0) { |
567 // Not enough room to create another fixed array. Let's create a filler. | 562 // Not enough room to create another fixed array. Let's create a filler. |
568 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), | 563 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), |
569 free_memory); | 564 free_memory); |
570 break; | 565 break; |
571 } | 566 } |
572 } | 567 } |
573 big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); | 568 big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); |
574 CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || | 569 CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || |
575 (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); | 570 (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); |
576 free_memory -= allocate_memory; | 571 free_memory -= allocate_memory; |
577 } | 572 } |
578 } | 573 } |
579 | 574 |
580 | 575 |
581 // Helper function that simulates a full new-space in the heap. | 576 // Helper function that simulates a full new-space in the heap. |
582 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { | 577 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { |
583 DisableInlineAllocationSteps(space); | 578 space->DisableInlineAllocationSteps(); |
584 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | 579 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
585 *space->allocation_top_address()); | 580 *space->allocation_top_address()); |
586 if (space_remaining == 0) return false; | 581 if (space_remaining == 0) return false; |
587 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); | 582 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); |
588 return true; | 583 return true; |
589 } | 584 } |
590 | 585 |
591 | 586 |
592 // Helper function that simulates a fill new-space in the heap. | 587 // Helper function that simulates a fill new-space in the heap. |
593 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, | 588 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, |
594 int extra_bytes) { | 589 int extra_bytes) { |
595 DisableInlineAllocationSteps(space); | 590 space->DisableInlineAllocationSteps(); |
596 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | 591 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
597 *space->allocation_top_address()); | 592 *space->allocation_top_address()); |
598 CHECK(space_remaining >= extra_bytes); | 593 CHECK(space_remaining >= extra_bytes); |
599 int new_linear_size = space_remaining - extra_bytes; | 594 int new_linear_size = space_remaining - extra_bytes; |
600 if (new_linear_size == 0) return; | 595 if (new_linear_size == 0) return; |
601 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); | 596 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); |
602 } | 597 } |
603 | 598 |
604 | 599 |
605 static inline void FillCurrentPage(v8::internal::NewSpace* space) { | 600 static inline void FillCurrentPage(v8::internal::NewSpace* space) { |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
687 HandleAndZoneScope() {} | 682 HandleAndZoneScope() {} |
688 | 683 |
689 // Prefixing the below with main_ reduces a lot of naming clashes. | 684 // Prefixing the below with main_ reduces a lot of naming clashes. |
690 i::Zone* main_zone() { return &main_zone_; } | 685 i::Zone* main_zone() { return &main_zone_; } |
691 | 686 |
692 private: | 687 private: |
693 i::Zone main_zone_; | 688 i::Zone main_zone_; |
694 }; | 689 }; |
695 | 690 |
696 #endif // ifndef CCTEST_H_ | 691 #endif // ifndef CCTEST_H_ |
OLD | NEW |