OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1968 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1979 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & | 1979 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & |
1980 kDoubleAlignmentMask) == 0); // NOLINT | 1980 kDoubleAlignmentMask) == 0); // NOLINT |
1981 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == | 1981 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == |
1982 0); // NOLINT | 1982 0); // NOLINT |
1983 #ifdef V8_HOST_ARCH_32_BIT | 1983 #ifdef V8_HOST_ARCH_32_BIT |
1984 STATIC_ASSERT((HeapNumber::kValueOffset & kDoubleAlignmentMask) != | 1984 STATIC_ASSERT((HeapNumber::kValueOffset & kDoubleAlignmentMask) != |
1985 0); // NOLINT | 1985 0); // NOLINT |
1986 #endif | 1986 #endif |
1987 | 1987 |
1988 | 1988 |
1989 HeapObject* Heap::EnsureAligned(HeapObject* object, int size, | 1989 int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) { |
1990 AllocationAlignment alignment) { | 1990 switch (alignment) { |
1991 if (alignment == kDoubleAligned && | 1991 case kWordAligned: |
1992 (OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { | 1992 return 0; |
1993 CreateFillerObjectAt(object->address(), kPointerSize); | 1993 case kDoubleAligned: |
1994 return HeapObject::FromAddress(object->address() + kPointerSize); | 1994 case kDoubleUnaligned: |
1995 } else if (alignment == kDoubleUnaligned && | 1995 return kDoubleSize - kPointerSize; |
1996 (OffsetFrom(object->address()) & kDoubleAlignmentMask) == 0) { | 1996 default: |
1997 CreateFillerObjectAt(object->address(), kPointerSize); | 1997 UNREACHABLE(); |
1998 return HeapObject::FromAddress(object->address() + kPointerSize); | |
1999 } else { | |
2000 CreateFillerObjectAt(object->address() + size - kPointerSize, kPointerSize); | |
2001 return object; | |
2002 } | 1998 } |
| 1999 return 0; |
2003 } | 2000 } |
2004 | 2001 |
2005 | 2002 |
2006 HeapObject* Heap::PrecedeWithFiller(HeapObject* object) { | 2003 int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) { |
2007 CreateFillerObjectAt(object->address(), kPointerSize); | 2004 intptr_t offset = OffsetFrom(address); |
2008 return HeapObject::FromAddress(object->address() + kPointerSize); | 2005 if (alignment == kDoubleAligned && (offset & kDoubleAlignmentMask) != 0) |
| 2006 return kPointerSize; |
| 2007 if (alignment == kDoubleUnaligned && (offset & kDoubleAlignmentMask) == 0) |
| 2008 return kDoubleSize - kPointerSize; // No fill if double is always aligned. |
| 2009 return 0; |
| 2010 } |
| 2011 |
| 2012 |
| 2013 HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) { |
| 2014 CreateFillerObjectAt(object->address(), filler_size); |
| 2015 return HeapObject::FromAddress(object->address() + filler_size); |
| 2016 } |
| 2017 |
| 2018 |
| 2019 HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size, |
| 2020 int allocation_size, |
| 2021 AllocationAlignment alignment) { |
| 2022 int filler_size = allocation_size - object_size; |
| 2023 DCHECK(filler_size > 0); |
| 2024 int pre_filler = GetFillToAlign(object->address(), alignment); |
| 2025 if (pre_filler) { |
| 2026 object = PrecedeWithFiller(object, pre_filler); |
| 2027 filler_size -= pre_filler; |
| 2028 } |
| 2029 if (filler_size) |
| 2030 CreateFillerObjectAt(object->address() + object_size, filler_size); |
| 2031 return object; |
2009 } | 2032 } |
2010 | 2033 |
2011 | 2034 |
2012 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { | 2035 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { |
2013 return EnsureAligned(object, size, kDoubleAligned); | 2036 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned); |
2014 } | 2037 } |
2015 | 2038 |
2016 | 2039 |
2017 enum LoggingAndProfiling { | 2040 enum LoggingAndProfiling { |
2018 LOGGING_AND_PROFILING_ENABLED, | 2041 LOGGING_AND_PROFILING_ENABLED, |
2019 LOGGING_AND_PROFILING_DISABLED | 2042 LOGGING_AND_PROFILING_DISABLED |
2020 }; | 2043 }; |
2021 | 2044 |
2022 | 2045 |
2023 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 2046 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
(...skipping 4590 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6614 *object_type = "CODE_TYPE"; \ | 6637 *object_type = "CODE_TYPE"; \ |
6615 *object_sub_type = "CODE_AGE/" #name; \ | 6638 *object_sub_type = "CODE_AGE/" #name; \ |
6616 return true; | 6639 return true; |
6617 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6640 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) |
6618 #undef COMPARE_AND_RETURN_NAME | 6641 #undef COMPARE_AND_RETURN_NAME |
6619 } | 6642 } |
6620 return false; | 6643 return false; |
6621 } | 6644 } |
6622 } | 6645 } |
6623 } // namespace v8::internal | 6646 } // namespace v8::internal |
OLD | NEW |