OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 735 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
746 | 746 |
747 static HeapObject* AllocateUnaligned(NewSpace* space, int size) { | 747 static HeapObject* AllocateUnaligned(NewSpace* space, int size) { |
748 AllocationResult allocation = space->AllocateRawUnaligned(size); | 748 AllocationResult allocation = space->AllocateRawUnaligned(size); |
749 CHECK(!allocation.IsRetry()); | 749 CHECK(!allocation.IsRetry()); |
750 HeapObject* filler = NULL; | 750 HeapObject* filler = NULL; |
751 CHECK(allocation.To(&filler)); | 751 CHECK(allocation.To(&filler)); |
752 space->heap()->CreateFillerObjectAt(filler->address(), size); | 752 space->heap()->CreateFillerObjectAt(filler->address(), size); |
753 return filler; | 753 return filler; |
754 } | 754 } |
755 | 755 |
756 class Observer : public InlineAllocationObserver { | 756 static HeapObject* AllocateUnaligned(PagedSpace* space, int size) { |
| 757 AllocationResult allocation = space->AllocateRaw(size, kDoubleUnaligned); |
| 758 CHECK(!allocation.IsRetry()); |
| 759 HeapObject* filler = NULL; |
| 760 CHECK(allocation.To(&filler)); |
| 761 space->heap()->CreateFillerObjectAt(filler->address(), size); |
| 762 return filler; |
| 763 } |
| 764 |
| 765 static HeapObject* AllocateUnaligned(LargeObjectSpace* space, int size) { |
| 766 AllocationResult allocation = space->AllocateRaw(size, EXECUTABLE); |
| 767 CHECK(!allocation.IsRetry()); |
| 768 HeapObject* filler = NULL; |
| 769 CHECK(allocation.To(&filler)); |
| 770 return filler; |
| 771 } |
| 772 |
| 773 class Observer : public AllocationObserver { |
757 public: | 774 public: |
758 explicit Observer(intptr_t step_size) | 775 explicit Observer(intptr_t step_size) |
759 : InlineAllocationObserver(step_size), count_(0) {} | 776 : AllocationObserver(step_size), count_(0) {} |
760 | 777 |
761 void Step(int bytes_allocated, Address, size_t) override { count_++; } | 778 void Step(int bytes_allocated, Address, size_t) override { count_++; } |
762 | 779 |
763 int count() const { return count_; } | 780 int count() const { return count_; } |
764 | 781 |
765 private: | 782 private: |
766 int count_; | 783 int count_; |
767 }; | 784 }; |
768 | 785 |
| 786 template <typename T> |
| 787 void testAllocationObserver(Isolate* i_isolate, T* space) { |
| 788 Observer observer1(128); |
| 789 space->AddAllocationObserver(&observer1); |
769 | 790 |
770 UNINITIALIZED_TEST(InlineAllocationObserver) { | 791 // The observer should not get notified if we have only allocated less than |
| 792 // 128 bytes. |
| 793 AllocateUnaligned(space, 64); |
| 794 CHECK_EQ(observer1.count(), 0); |
| 795 |
| 796 // The observer should get called when we have allocated exactly 128 bytes. |
| 797 AllocateUnaligned(space, 64); |
| 798 CHECK_EQ(observer1.count(), 1); |
| 799 |
| 800 // Another >128 bytes should get another notification. |
| 801 AllocateUnaligned(space, 136); |
| 802 CHECK_EQ(observer1.count(), 2); |
| 803 |
| 804 // Allocating a large object should get only one notification. |
| 805 AllocateUnaligned(space, 1024); |
| 806 CHECK_EQ(observer1.count(), 3); |
| 807 |
| 808 // Allocating another 2048 bytes in small objects should get 16 |
| 809 // notifications. |
| 810 for (int i = 0; i < 64; ++i) { |
| 811 AllocateUnaligned(space, 32); |
| 812 } |
| 813 CHECK_EQ(observer1.count(), 19); |
| 814 |
| 815 // Multiple observers should work. |
| 816 Observer observer2(96); |
| 817 space->AddAllocationObserver(&observer2); |
| 818 |
| 819 AllocateUnaligned(space, 2048); |
| 820 CHECK_EQ(observer1.count(), 20); |
| 821 CHECK_EQ(observer2.count(), 1); |
| 822 |
| 823 AllocateUnaligned(space, 104); |
| 824 CHECK_EQ(observer1.count(), 20); |
| 825 CHECK_EQ(observer2.count(), 2); |
| 826 |
| 827 // Callback should stop getting called after an observer is removed. |
| 828 space->RemoveAllocationObserver(&observer1); |
| 829 |
| 830 AllocateUnaligned(space, 384); |
| 831 CHECK_EQ(observer1.count(), 20); // no more notifications. |
| 832 CHECK_EQ(observer2.count(), 3); // this one is still active. |
| 833 |
| 834 // Ensure that PauseInlineAllocationObserversScope work correctly. |
| 835 AllocateUnaligned(space, 48); |
| 836 CHECK_EQ(observer2.count(), 3); |
| 837 { |
| 838 PauseAllocationObserversScope pause_observers(i_isolate->heap()); |
| 839 CHECK_EQ(observer2.count(), 3); |
| 840 AllocateUnaligned(space, 384); |
| 841 CHECK_EQ(observer2.count(), 3); |
| 842 } |
| 843 CHECK_EQ(observer2.count(), 3); |
| 844 // Coupled with the 48 bytes allocated before the pause, another 48 bytes |
| 845 // allocated here should trigger a notification. |
| 846 AllocateUnaligned(space, 48); |
| 847 CHECK_EQ(observer2.count(), 4); |
| 848 |
| 849 space->RemoveAllocationObserver(&observer2); |
| 850 AllocateUnaligned(space, 384); |
| 851 CHECK_EQ(observer1.count(), 20); |
| 852 CHECK_EQ(observer2.count(), 4); |
| 853 } |
| 854 |
| 855 UNINITIALIZED_TEST(AllocationObserver) { |
771 v8::Isolate::CreateParams create_params; | 856 v8::Isolate::CreateParams create_params; |
772 create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); | 857 create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); |
773 v8::Isolate* isolate = v8::Isolate::New(create_params); | 858 v8::Isolate* isolate = v8::Isolate::New(create_params); |
774 { | 859 { |
775 v8::Isolate::Scope isolate_scope(isolate); | 860 v8::Isolate::Scope isolate_scope(isolate); |
776 v8::HandleScope handle_scope(isolate); | 861 v8::HandleScope handle_scope(isolate); |
777 v8::Context::New(isolate)->Enter(); | 862 v8::Context::New(isolate)->Enter(); |
778 | 863 |
779 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); | 864 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); |
780 | 865 |
781 NewSpace* new_space = i_isolate->heap()->new_space(); | 866 testAllocationObserver<NewSpace>(i_isolate, i_isolate->heap()->new_space()); |
782 | 867 // Old space is used but the code path is shared for all |
783 Observer observer1(128); | 868 // classes inheriting from PagedSpace. |
784 new_space->AddInlineAllocationObserver(&observer1); | 869 testAllocationObserver<PagedSpace>(i_isolate, |
785 | 870 i_isolate->heap()->old_space()); |
786 // The observer should not get notified if we have only allocated less than | 871 testAllocationObserver<LargeObjectSpace>(i_isolate, |
787 // 128 bytes. | 872 i_isolate->heap()->lo_space()); |
788 AllocateUnaligned(new_space, 64); | |
789 CHECK_EQ(observer1.count(), 0); | |
790 | |
791 // The observer should get called when we have allocated exactly 128 bytes. | |
792 AllocateUnaligned(new_space, 64); | |
793 CHECK_EQ(observer1.count(), 1); | |
794 | |
795 // Another >128 bytes should get another notification. | |
796 AllocateUnaligned(new_space, 136); | |
797 CHECK_EQ(observer1.count(), 2); | |
798 | |
799 // Allocating a large object should get only one notification. | |
800 AllocateUnaligned(new_space, 1024); | |
801 CHECK_EQ(observer1.count(), 3); | |
802 | |
803 // Allocating another 2048 bytes in small objects should get 16 | |
804 // notifications. | |
805 for (int i = 0; i < 64; ++i) { | |
806 AllocateUnaligned(new_space, 32); | |
807 } | |
808 CHECK_EQ(observer1.count(), 19); | |
809 | |
810 // Multiple observers should work. | |
811 Observer observer2(96); | |
812 new_space->AddInlineAllocationObserver(&observer2); | |
813 | |
814 AllocateUnaligned(new_space, 2048); | |
815 CHECK_EQ(observer1.count(), 20); | |
816 CHECK_EQ(observer2.count(), 1); | |
817 | |
818 AllocateUnaligned(new_space, 104); | |
819 CHECK_EQ(observer1.count(), 20); | |
820 CHECK_EQ(observer2.count(), 2); | |
821 | |
822 // Callback should stop getting called after an observer is removed. | |
823 new_space->RemoveInlineAllocationObserver(&observer1); | |
824 | |
825 AllocateUnaligned(new_space, 384); | |
826 CHECK_EQ(observer1.count(), 20); // no more notifications. | |
827 CHECK_EQ(observer2.count(), 3); // this one is still active. | |
828 | |
829 // Ensure that PauseInlineAllocationObserversScope work correctly. | |
830 AllocateUnaligned(new_space, 48); | |
831 CHECK_EQ(observer2.count(), 3); | |
832 { | |
833 PauseInlineAllocationObserversScope pause_observers(new_space); | |
834 CHECK_EQ(observer2.count(), 3); | |
835 AllocateUnaligned(new_space, 384); | |
836 CHECK_EQ(observer2.count(), 3); | |
837 } | |
838 CHECK_EQ(observer2.count(), 3); | |
839 // Coupled with the 48 bytes allocated before the pause, another 48 bytes | |
840 // allocated here should trigger a notification. | |
841 AllocateUnaligned(new_space, 48); | |
842 CHECK_EQ(observer2.count(), 4); | |
843 | |
844 new_space->RemoveInlineAllocationObserver(&observer2); | |
845 AllocateUnaligned(new_space, 384); | |
846 CHECK_EQ(observer1.count(), 20); | |
847 CHECK_EQ(observer2.count(), 4); | |
848 } | 873 } |
849 isolate->Dispose(); | 874 isolate->Dispose(); |
850 } | 875 } |
851 | 876 |
852 | 877 |
853 UNINITIALIZED_TEST(InlineAllocationObserverCadence) { | 878 UNINITIALIZED_TEST(InlineAllocationObserverCadence) { |
854 v8::Isolate::CreateParams create_params; | 879 v8::Isolate::CreateParams create_params; |
855 create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); | 880 create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); |
856 v8::Isolate* isolate = v8::Isolate::New(create_params); | 881 v8::Isolate* isolate = v8::Isolate::New(create_params); |
857 { | 882 { |
858 v8::Isolate::Scope isolate_scope(isolate); | 883 v8::Isolate::Scope isolate_scope(isolate); |
859 v8::HandleScope handle_scope(isolate); | 884 v8::HandleScope handle_scope(isolate); |
860 v8::Context::New(isolate)->Enter(); | 885 v8::Context::New(isolate)->Enter(); |
861 | 886 |
862 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); | 887 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); |
863 | 888 |
864 NewSpace* new_space = i_isolate->heap()->new_space(); | 889 NewSpace* new_space = i_isolate->heap()->new_space(); |
865 | 890 |
866 Observer observer1(512); | 891 Observer observer1(512); |
867 new_space->AddInlineAllocationObserver(&observer1); | 892 new_space->AddAllocationObserver(&observer1); |
868 Observer observer2(576); | 893 Observer observer2(576); |
869 new_space->AddInlineAllocationObserver(&observer2); | 894 new_space->AddAllocationObserver(&observer2); |
870 | 895 |
871 for (int i = 0; i < 512; ++i) { | 896 for (int i = 0; i < 512; ++i) { |
872 AllocateUnaligned(new_space, 32); | 897 AllocateUnaligned(new_space, 32); |
873 } | 898 } |
874 | 899 |
875 new_space->RemoveInlineAllocationObserver(&observer1); | 900 new_space->RemoveAllocationObserver(&observer1); |
876 new_space->RemoveInlineAllocationObserver(&observer2); | 901 new_space->RemoveAllocationObserver(&observer2); |
877 | 902 |
878 CHECK_EQ(observer1.count(), 32); | 903 CHECK_EQ(observer1.count(), 32); |
879 CHECK_EQ(observer2.count(), 28); | 904 CHECK_EQ(observer2.count(), 28); |
880 } | 905 } |
881 isolate->Dispose(); | 906 isolate->Dispose(); |
882 } | 907 } |
883 | 908 |
884 } // namespace internal | 909 } // namespace internal |
885 } // namespace v8 | 910 } // namespace v8 |
OLD | NEW |