| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 1915 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1926 | 1926 |
| 1927 | 1927 |
| 1928 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == | 1928 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == |
| 1929 0); // NOLINT | 1929 0); // NOLINT |
| 1930 STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & kDoubleAlignmentMask) == | 1930 STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & kDoubleAlignmentMask) == |
| 1931 0); // NOLINT | 1931 0); // NOLINT |
| 1932 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & | 1932 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & |
| 1933 kDoubleAlignmentMask) == 0); // NOLINT | 1933 kDoubleAlignmentMask) == 0); // NOLINT |
| 1934 | 1934 |
| 1935 | 1935 |
| 1936 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, HeapObject* object, | 1936 HeapObject* Heap::EnsureDoubleAligned(HeapObject* object, int size) { |
| 1937 int size)); | |
| 1938 | |
| 1939 static HeapObject* EnsureDoubleAligned(Heap* heap, HeapObject* object, | |
| 1940 int size) { | |
| 1941 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { | 1937 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { |
| 1942 heap->CreateFillerObjectAt(object->address(), kPointerSize); | 1938 CreateFillerObjectAt(object->address(), kPointerSize); |
| 1943 return HeapObject::FromAddress(object->address() + kPointerSize); | 1939 return HeapObject::FromAddress(object->address() + kPointerSize); |
| 1944 } else { | 1940 } else { |
| 1945 heap->CreateFillerObjectAt(object->address() + size - kPointerSize, | 1941 CreateFillerObjectAt(object->address() + size - kPointerSize, kPointerSize); |
| 1946 kPointerSize); | |
| 1947 return object; | 1942 return object; |
| 1948 } | 1943 } |
| 1949 } | 1944 } |
| 1950 | 1945 |
| 1951 | 1946 |
| 1952 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { | 1947 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { |
| 1953 return EnsureDoubleAligned(this, object, size); | 1948 return EnsureDoubleAligned(object, size); |
| 1954 } | 1949 } |
| 1955 | 1950 |
| 1956 | 1951 |
| 1957 enum LoggingAndProfiling { | 1952 enum LoggingAndProfiling { |
| 1958 LOGGING_AND_PROFILING_ENABLED, | 1953 LOGGING_AND_PROFILING_ENABLED, |
| 1959 LOGGING_AND_PROFILING_DISABLED | 1954 LOGGING_AND_PROFILING_DISABLED |
| 1960 }; | 1955 }; |
| 1961 | 1956 |
| 1962 | 1957 |
| 1963 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 1958 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2093 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); | 2088 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); |
| 2094 } | 2089 } |
| 2095 } | 2090 } |
| 2096 } | 2091 } |
| 2097 | 2092 |
| 2098 template <int alignment> | 2093 template <int alignment> |
| 2099 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, | 2094 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, |
| 2100 HeapObject* object, int object_size) { | 2095 HeapObject* object, int object_size) { |
| 2101 Heap* heap = map->GetHeap(); | 2096 Heap* heap = map->GetHeap(); |
| 2102 | 2097 |
| 2103 int allocation_size = object_size; | 2098 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); |
| 2104 if (alignment != kObjectAlignment) { | 2099 AllocationResult allocation; |
| 2105 DCHECK(alignment == kDoubleAlignment); | 2100 if (alignment == kDoubleAlignment) { |
| 2106 allocation_size += kPointerSize; | 2101 allocation = heap->new_space()->AllocateRawDoubleAligned(object_size); |
| 2102 } else { |
| 2103 allocation = heap->new_space()->AllocateRaw(object_size); |
| 2107 } | 2104 } |
| 2108 | 2105 |
| 2109 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); | |
| 2110 AllocationResult allocation = | |
| 2111 heap->new_space()->AllocateRaw(allocation_size); | |
| 2112 | |
| 2113 HeapObject* target = NULL; // Initialization to please compiler. | 2106 HeapObject* target = NULL; // Initialization to please compiler. |
| 2114 if (allocation.To(&target)) { | 2107 if (allocation.To(&target)) { |
| 2115 // Order is important here: Set the promotion limit before storing a | 2108 // Order is important here: Set the promotion limit before storing a |
| 2116 // filler for double alignment or migrating the object. Otherwise we | 2109 // filler for double alignment or migrating the object. Otherwise we |
| 2117 // may end up overwriting promotion queue entries when we migrate the | 2110 // may end up overwriting promotion queue entries when we migrate the |
| 2118 // object. | 2111 // object. |
| 2119 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); | 2112 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); |
| 2120 | 2113 |
| 2121 if (alignment != kObjectAlignment) { | |
| 2122 target = EnsureDoubleAligned(heap, target, allocation_size); | |
| 2123 } | |
| 2124 MigrateObject(heap, object, target, object_size); | 2114 MigrateObject(heap, object, target, object_size); |
| 2125 | 2115 |
| 2126 // Update slot to new target. | 2116 // Update slot to new target. |
| 2127 *slot = target; | 2117 *slot = target; |
| 2128 | 2118 |
| 2129 heap->IncrementSemiSpaceCopiedObjectSize(object_size); | 2119 heap->IncrementSemiSpaceCopiedObjectSize(object_size); |
| 2130 return true; | 2120 return true; |
| 2131 } | 2121 } |
| 2132 return false; | 2122 return false; |
| 2133 } | 2123 } |
| 2134 | 2124 |
| 2135 | 2125 |
| 2136 template <ObjectContents object_contents, int alignment> | 2126 template <ObjectContents object_contents, int alignment> |
| 2137 static inline bool PromoteObject(Map* map, HeapObject** slot, | 2127 static inline bool PromoteObject(Map* map, HeapObject** slot, |
| 2138 HeapObject* object, int object_size) { | 2128 HeapObject* object, int object_size) { |
| 2139 Heap* heap = map->GetHeap(); | 2129 Heap* heap = map->GetHeap(); |
| 2140 | 2130 |
| 2141 int allocation_size = object_size; | 2131 AllocationResult allocation; |
| 2142 if (alignment != kObjectAlignment) { | 2132 if (alignment == kDoubleAlignment) { |
| 2143 DCHECK(alignment == kDoubleAlignment); | 2133 allocation = heap->old_space()->AllocateRawDoubleAligned(object_size); |
| 2144 allocation_size += kPointerSize; | 2134 } else { |
| 2135 allocation = heap->old_space()->AllocateRaw(object_size); |
| 2145 } | 2136 } |
| 2146 | 2137 |
| 2147 AllocationResult allocation; | |
| 2148 allocation = heap->old_space()->AllocateRaw(allocation_size); | |
| 2149 | |
| 2150 HeapObject* target = NULL; // Initialization to please compiler. | 2138 HeapObject* target = NULL; // Initialization to please compiler. |
| 2151 if (allocation.To(&target)) { | 2139 if (allocation.To(&target)) { |
| 2152 if (alignment != kObjectAlignment) { | |
| 2153 target = EnsureDoubleAligned(heap, target, allocation_size); | |
| 2154 } | |
| 2155 MigrateObject(heap, object, target, object_size); | 2140 MigrateObject(heap, object, target, object_size); |
| 2156 | 2141 |
| 2157 // Update slot to new target. | 2142 // Update slot to new target. |
| 2158 *slot = target; | 2143 *slot = target; |
| 2159 | 2144 |
| 2160 if (object_contents == POINTER_OBJECT) { | 2145 if (object_contents == POINTER_OBJECT) { |
| 2161 if (map->instance_type() == JS_FUNCTION_TYPE) { | 2146 if (map->instance_type() == JS_FUNCTION_TYPE) { |
| 2162 heap->promotion_queue()->insert(target, | 2147 heap->promotion_queue()->insert(target, |
| 2163 JSFunction::kNonWeakFieldsEndOffset); | 2148 JSFunction::kNonWeakFieldsEndOffset); |
| 2164 } else { | 2149 } else { |
| (...skipping 1498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3663 size += kPointerSize; | 3648 size += kPointerSize; |
| 3664 } | 3649 } |
| 3665 #endif | 3650 #endif |
| 3666 AllocationSpace space = SelectSpace(size, pretenure); | 3651 AllocationSpace space = SelectSpace(size, pretenure); |
| 3667 | 3652 |
| 3668 HeapObject* object; | 3653 HeapObject* object; |
| 3669 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); | 3654 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
| 3670 if (!allocation.To(&object)) return allocation; | 3655 if (!allocation.To(&object)) return allocation; |
| 3671 | 3656 |
| 3672 if (array_type == kExternalFloat64Array) { | 3657 if (array_type == kExternalFloat64Array) { |
| 3673 object = EnsureDoubleAligned(this, object, size); | 3658 object = EnsureDoubleAligned(object, size); |
| 3674 } | 3659 } |
| 3675 | 3660 |
| 3676 object->set_map(MapForFixedTypedArray(array_type)); | 3661 object->set_map(MapForFixedTypedArray(array_type)); |
| 3677 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); | 3662 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); |
| 3678 elements->set_length(length); | 3663 elements->set_length(length); |
| 3679 memset(elements->DataPtr(), 0, elements->DataSize()); | 3664 memset(elements->DataPtr(), 0, elements->DataSize()); |
| 3680 return elements; | 3665 return elements; |
| 3681 } | 3666 } |
| 3682 | 3667 |
| 3683 | 3668 |
| (...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4394 size += kPointerSize; | 4379 size += kPointerSize; |
| 4395 #endif | 4380 #endif |
| 4396 AllocationSpace space = SelectSpace(size, pretenure); | 4381 AllocationSpace space = SelectSpace(size, pretenure); |
| 4397 | 4382 |
| 4398 HeapObject* object; | 4383 HeapObject* object; |
| 4399 { | 4384 { |
| 4400 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); | 4385 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
| 4401 if (!allocation.To(&object)) return allocation; | 4386 if (!allocation.To(&object)) return allocation; |
| 4402 } | 4387 } |
| 4403 | 4388 |
| 4404 return EnsureDoubleAligned(this, object, size); | 4389 return EnsureDoubleAligned(object, size); |
| 4405 } | 4390 } |
| 4406 | 4391 |
| 4407 | 4392 |
| 4408 AllocationResult Heap::AllocateConstantPoolArray( | 4393 AllocationResult Heap::AllocateConstantPoolArray( |
| 4409 const ConstantPoolArray::NumberOfEntries& small) { | 4394 const ConstantPoolArray::NumberOfEntries& small) { |
| 4410 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); | 4395 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); |
| 4411 int size = ConstantPoolArray::SizeFor(small); | 4396 int size = ConstantPoolArray::SizeFor(small); |
| 4412 #ifndef V8_HOST_ARCH_64_BIT | 4397 #ifndef V8_HOST_ARCH_64_BIT |
| 4413 size += kPointerSize; | 4398 size += kPointerSize; |
| 4414 #endif | 4399 #endif |
| 4415 AllocationSpace space = SelectSpace(size, TENURED); | 4400 AllocationSpace space = SelectSpace(size, TENURED); |
| 4416 | 4401 |
| 4417 HeapObject* object; | 4402 HeapObject* object; |
| 4418 { | 4403 { |
| 4419 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); | 4404 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
| 4420 if (!allocation.To(&object)) return allocation; | 4405 if (!allocation.To(&object)) return allocation; |
| 4421 } | 4406 } |
| 4422 object = EnsureDoubleAligned(this, object, size); | 4407 object = EnsureDoubleAligned(object, size); |
| 4423 object->set_map_no_write_barrier(constant_pool_array_map()); | 4408 object->set_map_no_write_barrier(constant_pool_array_map()); |
| 4424 | 4409 |
| 4425 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); | 4410 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); |
| 4426 constant_pool->Init(small); | 4411 constant_pool->Init(small); |
| 4427 constant_pool->ClearPtrEntries(isolate()); | 4412 constant_pool->ClearPtrEntries(isolate()); |
| 4428 return constant_pool; | 4413 return constant_pool; |
| 4429 } | 4414 } |
| 4430 | 4415 |
| 4431 | 4416 |
| 4432 AllocationResult Heap::AllocateExtendedConstantPoolArray( | 4417 AllocationResult Heap::AllocateExtendedConstantPoolArray( |
| 4433 const ConstantPoolArray::NumberOfEntries& small, | 4418 const ConstantPoolArray::NumberOfEntries& small, |
| 4434 const ConstantPoolArray::NumberOfEntries& extended) { | 4419 const ConstantPoolArray::NumberOfEntries& extended) { |
| 4435 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); | 4420 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); |
| 4436 CHECK(extended.are_in_range(0, kMaxInt)); | 4421 CHECK(extended.are_in_range(0, kMaxInt)); |
| 4437 int size = ConstantPoolArray::SizeForExtended(small, extended); | 4422 int size = ConstantPoolArray::SizeForExtended(small, extended); |
| 4438 #ifndef V8_HOST_ARCH_64_BIT | 4423 #ifndef V8_HOST_ARCH_64_BIT |
| 4439 size += kPointerSize; | 4424 size += kPointerSize; |
| 4440 #endif | 4425 #endif |
| 4441 AllocationSpace space = SelectSpace(size, TENURED); | 4426 AllocationSpace space = SelectSpace(size, TENURED); |
| 4442 | 4427 |
| 4443 HeapObject* object; | 4428 HeapObject* object; |
| 4444 { | 4429 { |
| 4445 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); | 4430 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
| 4446 if (!allocation.To(&object)) return allocation; | 4431 if (!allocation.To(&object)) return allocation; |
| 4447 } | 4432 } |
| 4448 object = EnsureDoubleAligned(this, object, size); | 4433 object = EnsureDoubleAligned(object, size); |
| 4449 object->set_map_no_write_barrier(constant_pool_array_map()); | 4434 object->set_map_no_write_barrier(constant_pool_array_map()); |
| 4450 | 4435 |
| 4451 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); | 4436 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); |
| 4452 constant_pool->InitExtended(small, extended); | 4437 constant_pool->InitExtended(small, extended); |
| 4453 constant_pool->ClearPtrEntries(isolate()); | 4438 constant_pool->ClearPtrEntries(isolate()); |
| 4454 return constant_pool; | 4439 return constant_pool; |
| 4455 } | 4440 } |
| 4456 | 4441 |
| 4457 | 4442 |
| 4458 AllocationResult Heap::AllocateEmptyConstantPoolArray() { | 4443 AllocationResult Heap::AllocateEmptyConstantPoolArray() { |
| (...skipping 1975 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6434 static_cast<int>(object_sizes_last_time_[index])); | 6419 static_cast<int>(object_sizes_last_time_[index])); |
| 6435 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6420 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6436 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6421 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6437 | 6422 |
| 6438 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6423 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6439 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6424 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6440 ClearObjectStats(); | 6425 ClearObjectStats(); |
| 6441 } | 6426 } |
| 6442 } | 6427 } |
| 6443 } // namespace v8::internal | 6428 } // namespace v8::internal |
| OLD | NEW |