OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/once.h" | 9 #include "src/base/once.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 1968 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1979 SLOW_ASSERT(object_size <= Page::kMaxRegularHeapObjectSize); | 1979 SLOW_ASSERT(object_size <= Page::kMaxRegularHeapObjectSize); |
1980 SLOW_ASSERT(object->Size() == object_size); | 1980 SLOW_ASSERT(object->Size() == object_size); |
1981 | 1981 |
1982 int allocation_size = object_size; | 1982 int allocation_size = object_size; |
1983 if (alignment != kObjectAlignment) { | 1983 if (alignment != kObjectAlignment) { |
1984 ASSERT(alignment == kDoubleAlignment); | 1984 ASSERT(alignment == kDoubleAlignment); |
1985 allocation_size += kPointerSize; | 1985 allocation_size += kPointerSize; |
1986 } | 1986 } |
1987 | 1987 |
1988 Heap* heap = map->GetHeap(); | 1988 Heap* heap = map->GetHeap(); |
1989 if (heap->ShouldBePromoted(object->address(), object_size)) { | 1989 AllocationResult allocation; |
1990 AllocationResult allocation; | |
1991 | 1990 |
1992 if (object_contents == DATA_OBJECT) { | 1991 if (!heap->ShouldBePromoted(object->address(), object_size)) { |
1993 ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); | 1992 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); |
1994 allocation = heap->old_data_space()->AllocateRaw(allocation_size); | 1993 allocation = heap->new_space()->AllocateRaw(allocation_size); |
1995 } else { | |
1996 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); | |
1997 allocation = heap->old_pointer_space()->AllocateRaw(allocation_size); | |
1998 } | |
1999 | 1994 |
| 1995 // Allocation in the other semi-space may fail due to fragmentation. |
| 1996 // In that case we allocate in the old generation. |
2000 HeapObject* target = NULL; // Initialization to please compiler. | 1997 HeapObject* target = NULL; // Initialization to please compiler. |
2001 if (allocation.To(&target)) { | 1998 if (allocation.To(&target)) { |
2002 if (alignment != kObjectAlignment) { | 1999 if (alignment != kObjectAlignment) { |
2003 target = EnsureDoubleAligned(heap, target, allocation_size); | 2000 target = EnsureDoubleAligned(heap, target, allocation_size); |
2004 } | 2001 } |
2005 | 2002 |
2006 // Order is important: slot might be inside of the target if target | 2003 // Order is important: slot might be inside of the target if target |
2007 // was allocated over a dead object and slot comes from the store | 2004 // was allocated over a dead object and slot comes from the store |
2008 // buffer. | 2005 // buffer. |
2009 *slot = target; | 2006 *slot = target; |
2010 MigrateObject(heap, object, target, object_size); | 2007 MigrateObject(heap, object, target, object_size); |
2011 | 2008 |
2012 if (object_contents == POINTER_OBJECT) { | 2009 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); |
2013 if (map->instance_type() == JS_FUNCTION_TYPE) { | 2010 heap->IncrementSemiSpaceCopiedObjectSize(object_size); |
2014 heap->promotion_queue()->insert( | |
2015 target, JSFunction::kNonWeakFieldsEndOffset); | |
2016 } else { | |
2017 heap->promotion_queue()->insert(target, object_size); | |
2018 } | |
2019 } | |
2020 | |
2021 heap->IncrementPromotedObjectsSize(object_size); | |
2022 return; | 2011 return; |
2023 } | 2012 } |
2024 } | 2013 } |
2025 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); | |
2026 AllocationResult allocation = | |
2027 heap->new_space()->AllocateRaw(allocation_size); | |
2028 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); | |
2029 | 2014 |
2030 // Allocation in the other semi-space may fail due to fragmentation. | 2015 if (object_contents == DATA_OBJECT) { |
2031 // In that case we allocate in the old generation. | 2016 ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); |
2032 if (allocation.IsRetry()) { | 2017 allocation = heap->old_data_space()->AllocateRaw(allocation_size); |
2033 if (object_contents == DATA_OBJECT) { | 2018 } else { |
2034 ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); | 2019 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); |
2035 allocation = heap->old_data_space()->AllocateRaw(allocation_size); | 2020 allocation = heap->old_pointer_space()->AllocateRaw(allocation_size); |
2036 } else { | |
2037 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); | |
2038 allocation = heap->old_pointer_space()->AllocateRaw(allocation_size); | |
2039 } | |
2040 } | 2021 } |
2041 | 2022 |
2042 HeapObject* target = HeapObject::cast(allocation.ToObjectChecked()); | 2023 HeapObject* target = NULL; // Initialization to please compiler. |
| 2024 if (allocation.To(&target)) { |
| 2025 if (alignment != kObjectAlignment) { |
| 2026 target = EnsureDoubleAligned(heap, target, allocation_size); |
| 2027 } |
2043 | 2028 |
2044 if (alignment != kObjectAlignment) { | 2029 // Order is important: slot might be inside of the target if target |
2045 target = EnsureDoubleAligned(heap, target, allocation_size); | 2030 // was allocated over a dead object and slot comes from the store |
| 2031 // buffer. |
| 2032 *slot = target; |
| 2033 MigrateObject(heap, object, target, object_size); |
| 2034 |
| 2035 if (object_contents == POINTER_OBJECT) { |
| 2036 if (map->instance_type() == JS_FUNCTION_TYPE) { |
| 2037 heap->promotion_queue()->insert(target, |
| 2038 JSFunction::kNonWeakFieldsEndOffset); |
| 2039 } else { |
| 2040 heap->promotion_queue()->insert(target, object_size); |
| 2041 } |
| 2042 } |
| 2043 |
| 2044 heap->IncrementPromotedObjectsSize(object_size); |
| 2045 return; |
2046 } | 2046 } |
2047 | 2047 |
2048 // Order is important: slot might be inside of the target if target | 2048 // The scavenger should always have enough space available in the old |
2049 // was allocated over a dead object and slot comes from the store | 2049 // generation for promotion. Otherwise a full gc would have been triggered. |
2050 // buffer. | 2050 UNREACHABLE(); |
2051 *slot = target; | |
2052 MigrateObject(heap, object, target, object_size); | |
2053 heap->IncrementSemiSpaceCopiedObjectSize(object_size); | |
2054 return; | |
2055 } | 2051 } |
2056 | 2052 |
2057 | 2053 |
2058 static inline void EvacuateJSFunction(Map* map, | 2054 static inline void EvacuateJSFunction(Map* map, |
2059 HeapObject** slot, | 2055 HeapObject** slot, |
2060 HeapObject* object) { | 2056 HeapObject* object) { |
2061 ObjectEvacuationStrategy<POINTER_OBJECT>:: | 2057 ObjectEvacuationStrategy<POINTER_OBJECT>:: |
2062 template VisitSpecialized<JSFunction::kSize>(map, slot, object); | 2058 template VisitSpecialized<JSFunction::kSize>(map, slot, object); |
2063 | 2059 |
2064 HeapObject* target = *slot; | 2060 HeapObject* target = *slot; |
(...skipping 4305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6370 static_cast<int>(object_sizes_last_time_[index])); | 6366 static_cast<int>(object_sizes_last_time_[index])); |
6371 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6367 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
6372 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6368 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
6373 | 6369 |
6374 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6370 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
6375 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6371 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
6376 ClearObjectStats(); | 6372 ClearObjectStats(); |
6377 } | 6373 } |
6378 | 6374 |
6379 } } // namespace v8::internal | 6375 } } // namespace v8::internal |
OLD | NEW |