Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: src/heap.cc

Issue 35103002: Align double array backing store during compaction and mark-sweep promotion. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebase Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2001 matching lines...) Expand 10 before | Expand all | Expand 10 after
2012 } 2012 }
2013 2013
2014 // Take another spin if there are now unswept objects in new space 2014 // Take another spin if there are now unswept objects in new space
2015 // (there are currently no more unswept promoted objects). 2015 // (there are currently no more unswept promoted objects).
2016 } while (new_space_front != new_space_.top()); 2016 } while (new_space_front != new_space_.top());
2017 2017
2018 return new_space_front; 2018 return new_space_front;
2019 } 2019 }
2020 2020
2021 2021
2022 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
2023 STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0);
2024
2025
2026 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
2027 HeapObject* object,
2028 int size));
2029
2030 static HeapObject* EnsureDoubleAligned(Heap* heap,
2031 HeapObject* object,
2032 int size) {
2033 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
2034 heap->CreateFillerObjectAt(object->address(), kPointerSize);
2035 return HeapObject::FromAddress(object->address() + kPointerSize);
2036 } else {
2037 heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
2038 kPointerSize);
2039 return object;
2040 }
2041 }
2042
2043
2044 enum LoggingAndProfiling { 2022 enum LoggingAndProfiling {
2045 LOGGING_AND_PROFILING_ENABLED, 2023 LOGGING_AND_PROFILING_ENABLED,
2046 LOGGING_AND_PROFILING_DISABLED 2024 LOGGING_AND_PROFILING_DISABLED
2047 }; 2025 };
2048 2026
2049 2027
2050 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; 2028 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
2051 2029
2052 2030
2053 template<MarksHandling marks_handling, 2031 template<MarksHandling marks_handling,
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
2212 } else { 2190 } else {
2213 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); 2191 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2214 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); 2192 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
2215 } 2193 }
2216 2194
2217 Object* result = NULL; // Initialization to please compiler. 2195 Object* result = NULL; // Initialization to please compiler.
2218 if (maybe_result->ToObject(&result)) { 2196 if (maybe_result->ToObject(&result)) {
2219 HeapObject* target = HeapObject::cast(result); 2197 HeapObject* target = HeapObject::cast(result);
2220 2198
2221 if (alignment != kObjectAlignment) { 2199 if (alignment != kObjectAlignment) {
2222 target = EnsureDoubleAligned(heap, target, allocation_size); 2200 target = heap->EnsureDoubleAligned(target, allocation_size);
2223 } 2201 }
2224 2202
2225 // Order is important: slot might be inside of the target if target 2203 // Order is important: slot might be inside of the target if target
2226 // was allocated over a dead object and slot comes from the store 2204 // was allocated over a dead object and slot comes from the store
2227 // buffer. 2205 // buffer.
2228 *slot = target; 2206 *slot = target;
2229 MigrateObject(heap, object, target, object_size); 2207 MigrateObject(heap, object, target, object_size);
2230 2208
2231 if (object_contents == POINTER_OBJECT) { 2209 if (object_contents == POINTER_OBJECT) {
2232 if (map->instance_type() == JS_FUNCTION_TYPE) { 2210 if (map->instance_type() == JS_FUNCTION_TYPE) {
2233 heap->promotion_queue()->insert( 2211 heap->promotion_queue()->insert(
2234 target, JSFunction::kNonWeakFieldsEndOffset); 2212 target, JSFunction::kNonWeakFieldsEndOffset);
2235 } else { 2213 } else {
2236 heap->promotion_queue()->insert(target, object_size); 2214 heap->promotion_queue()->insert(target, object_size);
2237 } 2215 }
2238 } 2216 }
2239 2217
2240 heap->tracer()->increment_promoted_objects_size(object_size); 2218 heap->tracer()->increment_promoted_objects_size(object_size);
2241 return; 2219 return;
2242 } 2220 }
2243 } 2221 }
2244 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); 2222 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE));
2245 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); 2223 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
2246 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); 2224 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2247 Object* result = allocation->ToObjectUnchecked(); 2225 Object* result = allocation->ToObjectUnchecked();
2248 HeapObject* target = HeapObject::cast(result); 2226 HeapObject* target = HeapObject::cast(result);
2249 2227
2250 if (alignment != kObjectAlignment) { 2228 if (alignment != kObjectAlignment) {
2251 target = EnsureDoubleAligned(heap, target, allocation_size); 2229 target = heap->EnsureDoubleAligned(target, allocation_size);
2252 } 2230 }
2253 2231
2254 // Order is important: slot might be inside of the target if target 2232 // Order is important: slot might be inside of the target if target
2255 // was allocated over a dead object and slot comes from the store 2233 // was allocated over a dead object and slot comes from the store
2256 // buffer. 2234 // buffer.
2257 *slot = target; 2235 *slot = target;
2258 MigrateObject(heap, object, target, object_size); 2236 MigrateObject(heap, object, target, object_size);
2259 return; 2237 return;
2260 } 2238 }
2261 2239
(...skipping 3175 matching lines...) Expand 10 before | Expand all | Expand 10 after
5437 #ifndef V8_HOST_ARCH_64_BIT 5415 #ifndef V8_HOST_ARCH_64_BIT
5438 size += kPointerSize; 5416 size += kPointerSize;
5439 #endif 5417 #endif
5440 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 5418 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5441 5419
5442 HeapObject* object; 5420 HeapObject* object;
5443 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); 5421 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
5444 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5422 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5445 } 5423 }
5446 5424
5447 return EnsureDoubleAligned(this, object, size); 5425 return EnsureDoubleAligned(object, size);
5448 } 5426 }
5449 5427
5450 5428
5451 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, 5429 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries,
5452 int number_of_ptr_entries, 5430 int number_of_ptr_entries,
5453 int number_of_int32_entries) { 5431 int number_of_int32_entries) {
5454 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 || 5432 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 ||
5455 number_of_int32_entries > 0); 5433 number_of_int32_entries > 0);
5456 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, 5434 int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
5457 number_of_ptr_entries, 5435 number_of_ptr_entries,
5458 number_of_int32_entries); 5436 number_of_int32_entries);
5459 #ifndef V8_HOST_ARCH_64_BIT 5437 #ifndef V8_HOST_ARCH_64_BIT
5460 size += kPointerSize; 5438 size += kPointerSize;
5461 #endif 5439 #endif
5462 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); 5440 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
5463 5441
5464 HeapObject* object; 5442 HeapObject* object;
5465 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); 5443 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE);
5466 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5444 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5467 } 5445 }
5468 object = EnsureDoubleAligned(this, object, size); 5446 object = EnsureDoubleAligned(object, size);
5469 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); 5447 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map());
5470 5448
5471 ConstantPoolArray* constant_pool = 5449 ConstantPoolArray* constant_pool =
5472 reinterpret_cast<ConstantPoolArray*>(object); 5450 reinterpret_cast<ConstantPoolArray*>(object);
5473 constant_pool->SetEntryCounts(number_of_int64_entries, 5451 constant_pool->SetEntryCounts(number_of_int64_entries,
5474 number_of_ptr_entries, 5452 number_of_ptr_entries,
5475 number_of_int32_entries); 5453 number_of_int32_entries);
5476 MemsetPointer( 5454 MemsetPointer(
5477 HeapObject::RawField( 5455 HeapObject::RawField(
5478 constant_pool, 5456 constant_pool,
(...skipping 2434 matching lines...) Expand 10 before | Expand all | Expand 10 after
7913 if (FLAG_concurrent_recompilation) { 7891 if (FLAG_concurrent_recompilation) {
7914 heap_->relocation_mutex_->Lock(); 7892 heap_->relocation_mutex_->Lock();
7915 #ifdef DEBUG 7893 #ifdef DEBUG
7916 heap_->relocation_mutex_locked_by_optimizer_thread_ = 7894 heap_->relocation_mutex_locked_by_optimizer_thread_ =
7917 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 7895 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
7918 #endif // DEBUG 7896 #endif // DEBUG
7919 } 7897 }
7920 } 7898 }
7921 7899
7922 } } // namespace v8::internal 7900 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698