| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1950 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1961 "var half_size_reg_exp;" | 1961 "var half_size_reg_exp;" |
| 1962 "while (reg_exp_source.length < 20 * 1024) {" | 1962 "while (reg_exp_source.length < 20 * 1024) {" |
| 1963 " half_size_reg_exp = reg_exp_source;" | 1963 " half_size_reg_exp = reg_exp_source;" |
| 1964 " reg_exp_source = reg_exp_source + reg_exp_source;" | 1964 " reg_exp_source = reg_exp_source + reg_exp_source;" |
| 1965 "}" | 1965 "}" |
| 1966 // Flatten string. | 1966 // Flatten string. |
| 1967 "reg_exp_source.match(/f/);"); | 1967 "reg_exp_source.match(/f/);"); |
| 1968 | 1968 |
| 1969 // Get initial heap size after several full GCs, which will stabilize | 1969 // Get initial heap size after several full GCs, which will stabilize |
| 1970 // the heap size and return with sweeping finished completely. | 1970 // the heap size and return with sweeping finished completely. |
| 1971 CcTest::heap()->CollectAllGarbage(); | 1971 CcTest::heap()->CollectAllAvailableGarbage("initial cleanup"); |
| 1972 CcTest::heap()->CollectAllGarbage(); | |
| 1973 CcTest::heap()->CollectAllGarbage(); | |
| 1974 CcTest::heap()->CollectAllGarbage(); | |
| 1975 CcTest::heap()->CollectAllGarbage(); | |
| 1976 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); | 1972 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); |
| 1977 if (collector->sweeping_in_progress()) { | 1973 if (collector->sweeping_in_progress()) { |
| 1978 collector->EnsureSweepingCompleted(); | 1974 collector->EnsureSweepingCompleted(); |
| 1979 } | 1975 } |
| 1980 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); | 1976 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); |
| 1981 | 1977 |
| 1982 CompileRun("'foo'.match(reg_exp_source);"); | 1978 CompileRun("'foo'.match(reg_exp_source);"); |
| 1983 CcTest::heap()->CollectAllGarbage(); | 1979 CcTest::heap()->CollectAllGarbage(); |
| 1984 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects()); | 1980 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects()); |
| 1985 | 1981 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1998 | 1994 |
| 1999 // Small regexp is half the size, but compiles to more than twice the code | 1995 // Small regexp is half the size, but compiles to more than twice the code |
| 2000 // due to the optimization steps. | 1996 // due to the optimization steps. |
| 2001 CHECK_GE(size_with_optimized_regexp, | 1997 CHECK_GE(size_with_optimized_regexp, |
| 2002 size_with_regexp + size_of_regexp_code * 2); | 1998 size_with_regexp + size_of_regexp_code * 2); |
| 2003 } | 1999 } |
| 2004 | 2000 |
| 2005 | 2001 |
| 2006 HEAP_TEST(TestSizeOfObjects) { | 2002 HEAP_TEST(TestSizeOfObjects) { |
| 2007 v8::V8::Initialize(); | 2003 v8::V8::Initialize(); |
| 2004 Heap* heap = CcTest::heap(); |
| 2005 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 2008 | 2006 |
| 2009 // Get initial heap size after several full GCs, which will stabilize | 2007 // Get initial heap size after several full GCs, which will stabilize |
| 2010 // the heap size and return with sweeping finished completely. | 2008 // the heap size and return with sweeping finished completely. |
| 2011 CcTest::heap()->CollectAllGarbage(); | 2009 heap->CollectAllAvailableGarbage("initial cleanup"); |
| 2012 CcTest::heap()->CollectAllGarbage(); | |
| 2013 CcTest::heap()->CollectAllGarbage(); | |
| 2014 CcTest::heap()->CollectAllGarbage(); | |
| 2015 CcTest::heap()->CollectAllGarbage(); | |
| 2016 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); | |
| 2017 if (collector->sweeping_in_progress()) { | 2010 if (collector->sweeping_in_progress()) { |
| 2018 collector->EnsureSweepingCompleted(); | 2011 collector->EnsureSweepingCompleted(); |
| 2019 } | 2012 } |
| 2020 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); | 2013 int initial_size = static_cast<int>(heap->SizeOfObjects()); |
| 2021 | 2014 |
| 2022 { | 2015 { |
| 2023 // Allocate objects on several different old-space pages so that | 2016 // Allocate objects on several different old-space pages so that |
| 2024 // concurrent sweeper threads will be busy sweeping the old space on | 2017 // concurrent sweeper threads will be busy sweeping the old space on |
| 2025 // subsequent GC runs. | 2018 // subsequent GC runs. |
| 2026 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); | 2019 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); |
| 2027 int filler_size = static_cast<int>(FixedArray::SizeFor(8192)); | 2020 int filler_size = static_cast<int>(FixedArray::SizeFor(8192)); |
| 2028 for (int i = 1; i <= 100; i++) { | 2021 for (int i = 1; i <= 100; i++) { |
| 2029 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked(); | 2022 heap->AllocateFixedArray(8192, TENURED).ToObjectChecked(); |
| 2030 CHECK_EQ(initial_size + i * filler_size, | 2023 CHECK_EQ(initial_size + i * filler_size, |
| 2031 static_cast<int>(CcTest::heap()->SizeOfObjects())); | 2024 static_cast<int>(heap->SizeOfObjects())); |
| 2032 } | 2025 } |
| 2033 } | 2026 } |
| 2034 | 2027 |
| 2035 // The heap size should go back to initial size after a full GC, even | 2028 // The heap size should go back to initial size after a full GC, even |
| 2036 // though sweeping didn't finish yet. | 2029 // though sweeping didn't finish yet. |
| 2037 CcTest::heap()->CollectAllGarbage(); | 2030 heap->CollectAllGarbage(); |
| 2038 | |
| 2039 // Normally sweeping would not be complete here, but no guarantees. | 2031 // Normally sweeping would not be complete here, but no guarantees. |
| 2040 | 2032 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); |
| 2041 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects())); | |
| 2042 | |
| 2043 // Waiting for sweeper threads should not change heap size. | 2033 // Waiting for sweeper threads should not change heap size. |
| 2044 if (collector->sweeping_in_progress()) { | 2034 if (collector->sweeping_in_progress()) { |
| 2045 collector->EnsureSweepingCompleted(); | 2035 collector->EnsureSweepingCompleted(); |
| 2046 } | 2036 } |
| 2047 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects())); | 2037 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); |
| 2048 } | 2038 } |
| 2049 | 2039 |
| 2050 | 2040 |
| 2051 TEST(TestAlignmentCalculations) { | 2041 TEST(TestAlignmentCalculations) { |
| 2052 // Maximum fill amounts are consistent. | 2042 // Maximum fill amounts are consistent. |
| 2053 int maximum_double_misalignment = kDoubleSize - kPointerSize; | 2043 int maximum_double_misalignment = kDoubleSize - kPointerSize; |
| 2054 int maximum_simd128_misalignment = kSimd128Size - kPointerSize; | 2044 int maximum_simd128_misalignment = kSimd128Size - kPointerSize; |
| 2055 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned); | 2045 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned); |
| 2056 CHECK_EQ(0, max_word_fill); | 2046 CHECK_EQ(0, max_word_fill); |
| 2057 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned); | 2047 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned); |
| (...skipping 5027 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7085 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); | 7075 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); |
| 7086 slots[chunk->area_end() - kPointerSize] = false; | 7076 slots[chunk->area_end() - kPointerSize] = false; |
| 7087 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { | 7077 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { |
| 7088 CHECK(slots[addr]); | 7078 CHECK(slots[addr]); |
| 7089 return KEEP_SLOT; | 7079 return KEEP_SLOT; |
| 7090 }); | 7080 }); |
| 7091 } | 7081 } |
| 7092 | 7082 |
| 7093 } // namespace internal | 7083 } // namespace internal |
| 7094 } // namespace v8 | 7084 } // namespace v8 |
| OLD | NEW |