OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 | 2 |
3 #include <stdlib.h> | 3 #include <stdlib.h> |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "execution.h" | 7 #include "execution.h" |
8 #include "factory.h" | 8 #include "factory.h" |
9 #include "macro-assembler.h" | 9 #include "macro-assembler.h" |
10 #include "global-handles.h" | 10 #include "global-handles.h" |
11 #include "cctest.h" | 11 #include "cctest.h" |
(...skipping 653 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
665 | 665 |
666 // Allocate the object. | 666 // Allocate the object. |
667 Handle<JSObject> object = FACTORY->NewJSObject(function); | 667 Handle<JSObject> object = FACTORY->NewJSObject(function); |
668 Handle<JSArray> array = Handle<JSArray>::cast(object); | 668 Handle<JSArray> array = Handle<JSArray>::cast(object); |
669 // We just initialized the VM, no heap allocation failure yet. | 669 // We just initialized the VM, no heap allocation failure yet. |
670 Object* ok = array->Initialize(0)->ToObjectChecked(); | 670 Object* ok = array->Initialize(0)->ToObjectChecked(); |
671 | 671 |
672 // Set array length to 0. | 672 // Set array length to 0. |
673 ok = array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked(); | 673 ok = array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked(); |
674 CHECK_EQ(Smi::FromInt(0), array->length()); | 674 CHECK_EQ(Smi::FromInt(0), array->length()); |
675 CHECK(array->HasFastElements()); // Must be in fast mode. | 675 // Must be in fast mode. |
| 676 CHECK(array->HasFastTypeElements()); |
676 | 677 |
677 // array[length] = name. | 678 // array[length] = name. |
678 ok = array->SetElement(0, *name, kNonStrictMode, true)->ToObjectChecked(); | 679 ok = array->SetElement(0, *name, kNonStrictMode, true)->ToObjectChecked(); |
679 CHECK_EQ(Smi::FromInt(1), array->length()); | 680 CHECK_EQ(Smi::FromInt(1), array->length()); |
680 CHECK_EQ(array->GetElement(0), *name); | 681 CHECK_EQ(array->GetElement(0), *name); |
681 | 682 |
682 // Set array length with larger than smi value. | 683 // Set array length with larger than smi value. |
683 Handle<Object> length = | 684 Handle<Object> length = |
684 FACTORY->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1); | 685 FACTORY->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1); |
685 ok = array->SetElementsLength(*length)->ToObjectChecked(); | 686 ok = array->SetElementsLength(*length)->ToObjectChecked(); |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
831 delete[] str; | 832 delete[] str; |
832 | 833 |
833 // Add a Map object to look for. | 834 // Add a Map object to look for. |
834 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map()); | 835 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map()); |
835 | 836 |
836 CHECK_EQ(objs_count, next_objs_index); | 837 CHECK_EQ(objs_count, next_objs_index); |
837 CHECK_EQ(objs_count, ObjectsFoundInHeap(objs, objs_count)); | 838 CHECK_EQ(objs_count, ObjectsFoundInHeap(objs, objs_count)); |
838 } | 839 } |
839 | 840 |
840 | 841 |
841 TEST(LargeObjectSpaceContains) { | |
842 InitializeVM(); | |
843 | |
844 HEAP->CollectGarbage(NEW_SPACE); | |
845 | |
846 Address current_top = HEAP->new_space()->top(); | |
847 Page* page = Page::FromAddress(current_top); | |
848 Address current_page = page->address(); | |
849 Address next_page = current_page + Page::kPageSize; | |
850 int bytes_to_page = static_cast<int>(next_page - current_top); | |
851 if (bytes_to_page <= FixedArray::kHeaderSize) { | |
852 // Alas, need to cross another page to be able to | |
853 // put desired value. | |
854 next_page += Page::kPageSize; | |
855 bytes_to_page = static_cast<int>(next_page - current_top); | |
856 } | |
857 CHECK(bytes_to_page > FixedArray::kHeaderSize); | |
858 | |
859 intptr_t* flags_ptr = &Page::FromAddress(next_page)->flags_; | |
860 Address flags_addr = reinterpret_cast<Address>(flags_ptr); | |
861 | |
862 int bytes_to_allocate = | |
863 static_cast<int>(flags_addr - current_top) + kPointerSize; | |
864 | |
865 int n_elements = (bytes_to_allocate - FixedArray::kHeaderSize) / | |
866 kPointerSize; | |
867 CHECK_EQ(bytes_to_allocate, FixedArray::SizeFor(n_elements)); | |
868 FixedArray* array = FixedArray::cast( | |
869 HEAP->AllocateFixedArray(n_elements)->ToObjectChecked()); | |
870 | |
871 int index = n_elements - 1; | |
872 CHECK_EQ(flags_ptr, | |
873 HeapObject::RawField(array, FixedArray::OffsetOfElementAt(index))); | |
874 array->set(index, Smi::FromInt(0)); | |
875 // This chould have turned next page into LargeObjectPage: | |
876 // CHECK(Page::FromAddress(next_page)->IsLargeObjectPage()); | |
877 | |
878 HeapObject* addr = HeapObject::FromAddress(next_page + 2 * kPointerSize); | |
879 CHECK(HEAP->new_space()->Contains(addr)); | |
880 CHECK(!HEAP->lo_space()->Contains(addr)); | |
881 } | |
882 | |
883 | |
884 TEST(EmptyHandleEscapeFrom) { | 842 TEST(EmptyHandleEscapeFrom) { |
885 InitializeVM(); | 843 InitializeVM(); |
886 | 844 |
887 v8::HandleScope scope; | 845 v8::HandleScope scope; |
888 Handle<JSObject> runaway; | 846 Handle<JSObject> runaway; |
889 | 847 |
890 { | 848 { |
891 v8::HandleScope nested; | 849 v8::HandleScope nested; |
892 Handle<JSObject> empty; | 850 Handle<JSObject> empty; |
893 runaway = empty.EscapeFrom(&nested); | 851 runaway = empty.EscapeFrom(&nested); |
894 } | 852 } |
895 | 853 |
896 CHECK(runaway.is_null()); | 854 CHECK(runaway.is_null()); |
897 } | 855 } |
898 | 856 |
899 | 857 |
900 static int LenFromSize(int size) { | 858 static int LenFromSize(int size) { |
901 return (size - FixedArray::kHeaderSize) / kPointerSize; | 859 return (size - FixedArray::kHeaderSize) / kPointerSize; |
902 } | 860 } |
903 | 861 |
904 | 862 |
905 TEST(Regression39128) { | 863 TEST(Regression39128) { |
906 // Test case for crbug.com/39128. | 864 // Test case for crbug.com/39128. |
907 InitializeVM(); | 865 InitializeVM(); |
908 | 866 |
909 // Increase the chance of 'bump-the-pointer' allocation in old space. | 867 // Increase the chance of 'bump-the-pointer' allocation in old space. |
910 bool force_compaction = true; | 868 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
911 HEAP->CollectAllGarbage(force_compaction); | |
912 | 869 |
913 v8::HandleScope scope; | 870 v8::HandleScope scope; |
914 | 871 |
915 // The plan: create JSObject which references objects in new space. | 872 // The plan: create JSObject which references objects in new space. |
916 // Then clone this object (forcing it to go into old space) and check | 873 // Then clone this object (forcing it to go into old space) and check |
917 // that region dirty marks are updated correctly. | 874 // that region dirty marks are updated correctly. |
918 | 875 |
919 // Step 1: prepare a map for the object. We add 1 inobject property to it. | 876 // Step 1: prepare a map for the object. We add 1 inobject property to it. |
920 Handle<JSFunction> object_ctor( | 877 Handle<JSFunction> object_ctor( |
921 Isolate::Current()->global_context()->object_function()); | 878 Isolate::Current()->global_context()->object_function()); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
968 // in old pointer space. | 925 // in old pointer space. |
969 Address old_pointer_space_top = HEAP->old_pointer_space()->top(); | 926 Address old_pointer_space_top = HEAP->old_pointer_space()->top(); |
970 AlwaysAllocateScope aa_scope; | 927 AlwaysAllocateScope aa_scope; |
971 Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked(); | 928 Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked(); |
972 JSObject* clone = JSObject::cast(clone_obj); | 929 JSObject* clone = JSObject::cast(clone_obj); |
973 if (clone->address() != old_pointer_space_top) { | 930 if (clone->address() != old_pointer_space_top) { |
974 // Alas, got allocated from free list, we cannot do checks. | 931 // Alas, got allocated from free list, we cannot do checks. |
975 return; | 932 return; |
976 } | 933 } |
977 CHECK(HEAP->old_pointer_space()->Contains(clone->address())); | 934 CHECK(HEAP->old_pointer_space()->Contains(clone->address())); |
978 | |
979 // Step 5: verify validity of region dirty marks. | |
980 Address clone_addr = clone->address(); | |
981 Page* page = Page::FromAddress(clone_addr); | |
982 // Check that region covering inobject property 1 is marked dirty. | |
983 CHECK(page->IsRegionDirty(clone_addr + (object_size - kPointerSize))); | |
984 } | 935 } |
985 | 936 |
986 | 937 |
987 TEST(TestCodeFlushing) { | 938 TEST(TestCodeFlushing) { |
988 i::FLAG_allow_natives_syntax = true; | 939 i::FLAG_allow_natives_syntax = true; |
989 // If we do not flush code this test is invalid. | 940 // If we do not flush code this test is invalid. |
990 if (!FLAG_flush_code) return; | 941 if (!FLAG_flush_code) return; |
991 InitializeVM(); | 942 InitializeVM(); |
992 v8::HandleScope scope; | 943 v8::HandleScope scope; |
993 const char* source = "function foo() {" | 944 const char* source = "function foo() {" |
994 " var x = 42;" | 945 " var x = 42;" |
995 " var y = 42;" | 946 " var y = 42;" |
996 " var z = x + y;" | 947 " var z = x + y;" |
997 "};" | 948 "};" |
998 "foo()"; | 949 "foo()"; |
999 Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo"); | 950 Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo"); |
1000 | 951 |
1001 // This compile will add the code to the compilation cache. | 952 // This compile will add the code to the compilation cache. |
1002 { v8::HandleScope scope; | 953 { v8::HandleScope scope; |
1003 CompileRun(source); | 954 CompileRun(source); |
1004 } | 955 } |
1005 | 956 |
1006 // Check function is compiled. | 957 // Check function is compiled. |
1007 Object* func_value = Isolate::Current()->context()->global()-> | 958 Object* func_value = Isolate::Current()->context()->global()-> |
1008 GetProperty(*foo_name)->ToObjectChecked(); | 959 GetProperty(*foo_name)->ToObjectChecked(); |
1009 CHECK(func_value->IsJSFunction()); | 960 CHECK(func_value->IsJSFunction()); |
1010 Handle<JSFunction> function(JSFunction::cast(func_value)); | 961 Handle<JSFunction> function(JSFunction::cast(func_value)); |
1011 CHECK(function->shared()->is_compiled()); | 962 CHECK(function->shared()->is_compiled()); |
1012 | 963 |
1013 HEAP->CollectAllGarbage(true); | 964 // TODO(1609) Currently incremental marker does not support code flushing. |
1014 HEAP->CollectAllGarbage(true); | 965 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
| 966 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1015 | 967 |
1016 CHECK(function->shared()->is_compiled()); | 968 CHECK(function->shared()->is_compiled()); |
1017 | 969 |
1018 HEAP->CollectAllGarbage(true); | 970 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1019 HEAP->CollectAllGarbage(true); | 971 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1020 HEAP->CollectAllGarbage(true); | 972 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1021 HEAP->CollectAllGarbage(true); | 973 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1022 HEAP->CollectAllGarbage(true); | 974 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1023 HEAP->CollectAllGarbage(true); | 975 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask); |
1024 | 976 |
1025 // foo should no longer be in the compilation cache | 977 // foo should no longer be in the compilation cache |
1026 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); | 978 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
1027 CHECK(!function->is_compiled() || function->IsOptimized()); | 979 CHECK(!function->is_compiled() || function->IsOptimized()); |
1028 // Call foo to get it recompiled. | 980 // Call foo to get it recompiled. |
1029 CompileRun("foo()"); | 981 CompileRun("foo()"); |
1030 CHECK(function->shared()->is_compiled()); | 982 CHECK(function->shared()->is_compiled()); |
1031 CHECK(function->is_compiled()); | 983 CHECK(function->is_compiled()); |
1032 } | 984 } |
1033 | 985 |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1102 // Remove function f1, and | 1054 // Remove function f1, and |
1103 CompileRun("f1=null"); | 1055 CompileRun("f1=null"); |
1104 | 1056 |
1105 // Scavenge treats these references as strong. | 1057 // Scavenge treats these references as strong. |
1106 for (int j = 0; j < 10; j++) { | 1058 for (int j = 0; j < 10; j++) { |
1107 HEAP->PerformScavenge(); | 1059 HEAP->PerformScavenge(); |
1108 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i])); | 1060 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i])); |
1109 } | 1061 } |
1110 | 1062 |
1111 // Mark compact handles the weak references. | 1063 // Mark compact handles the weak references. |
1112 HEAP->CollectAllGarbage(true); | 1064 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1113 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); | 1065 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); |
1114 | 1066 |
1115 // Get rid of f3 and f5 in the same way. | 1067 // Get rid of f3 and f5 in the same way. |
1116 CompileRun("f3=null"); | 1068 CompileRun("f3=null"); |
1117 for (int j = 0; j < 10; j++) { | 1069 for (int j = 0; j < 10; j++) { |
1118 HEAP->PerformScavenge(); | 1070 HEAP->PerformScavenge(); |
1119 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); | 1071 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); |
1120 } | 1072 } |
1121 HEAP->CollectAllGarbage(true); | 1073 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1122 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i])); | 1074 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i])); |
1123 CompileRun("f5=null"); | 1075 CompileRun("f5=null"); |
1124 for (int j = 0; j < 10; j++) { | 1076 for (int j = 0; j < 10; j++) { |
1125 HEAP->PerformScavenge(); | 1077 HEAP->PerformScavenge(); |
1126 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i])); | 1078 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i])); |
1127 } | 1079 } |
1128 HEAP->CollectAllGarbage(true); | 1080 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1129 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i])); | 1081 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i])); |
1130 | 1082 |
1131 ctx[i]->Exit(); | 1083 ctx[i]->Exit(); |
1132 } | 1084 } |
1133 | 1085 |
1134 // Force compilation cache cleanup. | 1086 // Force compilation cache cleanup. |
1135 HEAP->CollectAllGarbage(true); | 1087 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1136 | 1088 |
1137 // Dispose the global contexts one by one. | 1089 // Dispose the global contexts one by one. |
1138 for (int i = 0; i < kNumTestContexts; i++) { | 1090 for (int i = 0; i < kNumTestContexts; i++) { |
1139 ctx[i].Dispose(); | 1091 ctx[i].Dispose(); |
1140 ctx[i].Clear(); | 1092 ctx[i].Clear(); |
1141 | 1093 |
1142 // Scavenge treats these references as strong. | 1094 // Scavenge treats these references as strong. |
1143 for (int j = 0; j < 10; j++) { | 1095 for (int j = 0; j < 10; j++) { |
1144 HEAP->PerformScavenge(); | 1096 HEAP->PerformScavenge(); |
1145 CHECK_EQ(kNumTestContexts - i, CountGlobalContexts()); | 1097 CHECK_EQ(kNumTestContexts - i, CountGlobalContexts()); |
1146 } | 1098 } |
1147 | 1099 |
1148 // Mark compact handles the weak references. | 1100 // Mark compact handles the weak references. |
1149 HEAP->CollectAllGarbage(true); | 1101 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1150 CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts()); | 1102 CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts()); |
1151 } | 1103 } |
1152 | 1104 |
1153 CHECK_EQ(0, CountGlobalContexts()); | 1105 CHECK_EQ(0, CountGlobalContexts()); |
1154 } | 1106 } |
1155 | 1107 |
1156 | 1108 |
1157 // Count the number of global contexts in the weak list of global contexts | 1109 // Count the number of global contexts in the weak list of global contexts |
1158 // causing a GC after the specified number of elements. | 1110 // causing a GC after the specified number of elements. |
1159 static int CountGlobalContextsWithGC(int n) { | 1111 static int CountGlobalContextsWithGC(int n) { |
1160 int count = 0; | 1112 int count = 0; |
1161 Handle<Object> object(HEAP->global_contexts_list()); | 1113 Handle<Object> object(HEAP->global_contexts_list()); |
1162 while (!object->IsUndefined()) { | 1114 while (!object->IsUndefined()) { |
1163 count++; | 1115 count++; |
1164 if (count == n) HEAP->CollectAllGarbage(true); | 1116 if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1165 object = | 1117 object = |
1166 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK)); | 1118 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK)); |
1167 } | 1119 } |
1168 return count; | 1120 return count; |
1169 } | 1121 } |
1170 | 1122 |
1171 | 1123 |
1172 // Count the number of user functions in the weak list of optimized | 1124 // Count the number of user functions in the weak list of optimized |
1173 // functions attached to a global context causing a GC after the | 1125 // functions attached to a global context causing a GC after the |
1174 // specified number of elements. | 1126 // specified number of elements. |
1175 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context, | 1127 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context, |
1176 int n) { | 1128 int n) { |
1177 int count = 0; | 1129 int count = 0; |
1178 Handle<Context> icontext = v8::Utils::OpenHandle(*context); | 1130 Handle<Context> icontext = v8::Utils::OpenHandle(*context); |
1179 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST)); | 1131 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST)); |
1180 while (object->IsJSFunction() && | 1132 while (object->IsJSFunction() && |
1181 !Handle<JSFunction>::cast(object)->IsBuiltin()) { | 1133 !Handle<JSFunction>::cast(object)->IsBuiltin()) { |
1182 count++; | 1134 count++; |
1183 if (count == n) HEAP->CollectAllGarbage(true); | 1135 if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
1184 object = Handle<Object>( | 1136 object = Handle<Object>( |
1185 Object::cast(JSFunction::cast(*object)->next_function_link())); | 1137 Object::cast(JSFunction::cast(*object)->next_function_link())); |
1186 } | 1138 } |
1187 return count; | 1139 return count; |
1188 } | 1140 } |
1189 | 1141 |
1190 | 1142 |
1191 TEST(TestInternalWeakListsTraverseWithGC) { | 1143 TEST(TestInternalWeakListsTraverseWithGC) { |
1192 v8::V8::Initialize(); | 1144 v8::V8::Initialize(); |
1193 | 1145 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1233 CompileRun("f5()"); | 1185 CompileRun("f5()"); |
1234 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0])); | 1186 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0])); |
1235 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4)); | 1187 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4)); |
1236 | 1188 |
1237 ctx[0]->Exit(); | 1189 ctx[0]->Exit(); |
1238 } | 1190 } |
1239 | 1191 |
1240 | 1192 |
1241 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) { | 1193 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) { |
1242 InitializeVM(); | 1194 InitializeVM(); |
| 1195 HEAP->EnsureHeapIsIterable(); |
1243 intptr_t size_of_objects_1 = HEAP->SizeOfObjects(); | 1196 intptr_t size_of_objects_1 = HEAP->SizeOfObjects(); |
1244 HeapIterator iterator(HeapIterator::kFilterFreeListNodes); | 1197 HeapIterator iterator; |
1245 intptr_t size_of_objects_2 = 0; | 1198 intptr_t size_of_objects_2 = 0; |
1246 for (HeapObject* obj = iterator.next(); | 1199 for (HeapObject* obj = iterator.next(); |
1247 obj != NULL; | 1200 obj != NULL; |
1248 obj = iterator.next()) { | 1201 obj = iterator.next()) { |
1249 size_of_objects_2 += obj->Size(); | 1202 size_of_objects_2 += obj->Size(); |
1250 } | 1203 } |
1251 // Delta must be within 1% of the larger result. | 1204 // Delta must be within 5% of the larger result. |
| 1205 // TODO(gc): Tighten this up by distinguishing between byte |
| 1206 // arrays that are real and those that merely mark free space |
| 1207 // on the heap. |
1252 if (size_of_objects_1 > size_of_objects_2) { | 1208 if (size_of_objects_1 > size_of_objects_2) { |
1253 intptr_t delta = size_of_objects_1 - size_of_objects_2; | 1209 intptr_t delta = size_of_objects_1 - size_of_objects_2; |
1254 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, " | 1210 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, " |
1255 "Iterator: %" V8_PTR_PREFIX "d, " | 1211 "Iterator: %" V8_PTR_PREFIX "d, " |
1256 "delta: %" V8_PTR_PREFIX "d\n", | 1212 "delta: %" V8_PTR_PREFIX "d\n", |
1257 size_of_objects_1, size_of_objects_2, delta); | 1213 size_of_objects_1, size_of_objects_2, delta); |
1258 CHECK_GT(size_of_objects_1 / 100, delta); | 1214 CHECK_GT(size_of_objects_1 / 20, delta); |
1259 } else { | 1215 } else { |
1260 intptr_t delta = size_of_objects_2 - size_of_objects_1; | 1216 intptr_t delta = size_of_objects_2 - size_of_objects_1; |
1261 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, " | 1217 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, " |
1262 "Iterator: %" V8_PTR_PREFIX "d, " | 1218 "Iterator: %" V8_PTR_PREFIX "d, " |
1263 "delta: %" V8_PTR_PREFIX "d\n", | 1219 "delta: %" V8_PTR_PREFIX "d\n", |
1264 size_of_objects_1, size_of_objects_2, delta); | 1220 size_of_objects_1, size_of_objects_2, delta); |
1265 CHECK_GT(size_of_objects_2 / 100, delta); | 1221 CHECK_GT(size_of_objects_2 / 20, delta); |
1266 } | 1222 } |
1267 } | 1223 } |
1268 | 1224 |
1269 | 1225 |
1270 class HeapIteratorTestHelper { | 1226 TEST(GrowAndShrinkNewSpace) { |
1271 public: | 1227 InitializeVM(); |
1272 HeapIteratorTestHelper(Object* a, Object* b) | 1228 NewSpace* new_space = HEAP->new_space(); |
1273 : a_(a), b_(b), a_found_(false), b_found_(false) {} | 1229 |
1274 bool a_found() { return a_found_; } | 1230 // Explicitly growing should double the space capacity. |
1275 bool b_found() { return b_found_; } | 1231 intptr_t old_capacity, new_capacity; |
1276 void IterateHeap(HeapIterator::HeapObjectsFiltering mode) { | 1232 old_capacity = new_space->Capacity(); |
1277 HeapIterator iterator(mode); | 1233 new_space->Grow(); |
1278 for (HeapObject* obj = iterator.next(); | 1234 new_capacity = new_space->Capacity(); |
1279 obj != NULL; | 1235 CHECK(2 * old_capacity == new_capacity); |
1280 obj = iterator.next()) { | 1236 |
1281 if (obj == a_) | 1237 // Fill up new space to the point that it is completely full. Make sure |
1282 a_found_ = true; | 1238 // that the scavenger does not undo the filling. |
1283 else if (obj == b_) | 1239 old_capacity = new_space->Capacity(); |
1284 b_found_ = true; | 1240 { |
| 1241 v8::HandleScope scope; |
| 1242 AlwaysAllocateScope always_allocate; |
| 1243 intptr_t available = new_space->EffectiveCapacity() - new_space->Size(); |
| 1244 intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10; |
| 1245 for (intptr_t i = 0; i < number_of_fillers; i++) { |
| 1246 CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED))); |
1285 } | 1247 } |
1286 } | 1248 } |
1287 private: | 1249 new_capacity = new_space->Capacity(); |
1288 Object* a_; | 1250 CHECK(old_capacity == new_capacity); |
1289 Object* b_; | |
1290 bool a_found_; | |
1291 bool b_found_; | |
1292 }; | |
1293 | 1251 |
1294 TEST(HeapIteratorFilterUnreachable) { | 1252 // Explicitly shrinking should not affect space capacity. |
1295 InitializeVM(); | 1253 old_capacity = new_space->Capacity(); |
1296 v8::HandleScope scope; | 1254 new_space->Shrink(); |
1297 CompileRun("a = {}; b = {};"); | 1255 new_capacity = new_space->Capacity(); |
1298 v8::Handle<Object> a(ISOLATE->context()->global()->GetProperty( | 1256 CHECK(old_capacity == new_capacity); |
1299 *FACTORY->LookupAsciiSymbol("a"))->ToObjectChecked()); | 1257 |
1300 v8::Handle<Object> b(ISOLATE->context()->global()->GetProperty( | 1258 // Let the scavenger empty the new space. |
1301 *FACTORY->LookupAsciiSymbol("b"))->ToObjectChecked()); | 1259 HEAP->CollectGarbage(NEW_SPACE); |
1302 CHECK_NE(*a, *b); | 1260 CHECK_LE(new_space->Size(), old_capacity); |
1303 { | 1261 |
1304 HeapIteratorTestHelper helper(*a, *b); | 1262 // Explicitly shrinking should halve the space capacity. |
1305 helper.IterateHeap(HeapIterator::kFilterUnreachable); | 1263 old_capacity = new_space->Capacity(); |
1306 CHECK(helper.a_found()); | 1264 new_space->Shrink(); |
1307 CHECK(helper.b_found()); | 1265 new_capacity = new_space->Capacity(); |
1308 } | 1266 CHECK(old_capacity == 2 * new_capacity); |
1309 CHECK(ISOLATE->context()->global()->DeleteProperty( | 1267 |
1310 *FACTORY->LookupAsciiSymbol("a"), JSObject::FORCE_DELETION)); | 1268 // Consecutive shrinking should not affect space capacity. |
1311 // We ensure that GC will not happen, so our raw pointer stays valid. | 1269 old_capacity = new_space->Capacity(); |
1312 AssertNoAllocation no_alloc; | 1270 new_space->Shrink(); |
1313 Object* a_saved = *a; | 1271 new_space->Shrink(); |
1314 a.Clear(); | 1272 new_space->Shrink(); |
1315 // Verify that "a" object still resides in the heap... | 1273 new_capacity = new_space->Capacity(); |
1316 { | 1274 CHECK(old_capacity == new_capacity); |
1317 HeapIteratorTestHelper helper(a_saved, *b); | |
1318 helper.IterateHeap(HeapIterator::kNoFiltering); | |
1319 CHECK(helper.a_found()); | |
1320 CHECK(helper.b_found()); | |
1321 } | |
1322 // ...but is now unreachable. | |
1323 { | |
1324 HeapIteratorTestHelper helper(a_saved, *b); | |
1325 helper.IterateHeap(HeapIterator::kFilterUnreachable); | |
1326 CHECK(!helper.a_found()); | |
1327 CHECK(helper.b_found()); | |
1328 } | |
1329 } | 1275 } |
OLD | NEW |