Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/once.h" | 9 #include "src/base/once.h" |
| 10 #include "src/base/utils/random-number-generator.h" | 10 #include "src/base/utils/random-number-generator.h" |
| (...skipping 3242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3253 Marking::IsBlack(Marking::MarkBitFrom(address))) { | 3253 Marking::IsBlack(Marking::MarkBitFrom(address))) { |
| 3254 if (mode == FROM_GC) { | 3254 if (mode == FROM_GC) { |
| 3255 MemoryChunk::IncrementLiveBytesFromGC(address, by); | 3255 MemoryChunk::IncrementLiveBytesFromGC(address, by); |
| 3256 } else { | 3256 } else { |
| 3257 MemoryChunk::IncrementLiveBytesFromMutator(address, by); | 3257 MemoryChunk::IncrementLiveBytesFromMutator(address, by); |
| 3258 } | 3258 } |
| 3259 } | 3259 } |
| 3260 } | 3260 } |
| 3261 | 3261 |
| 3262 | 3262 |
| 3263 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, | |
| 3264 int elements_to_trim) { | |
| 3265 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; | |
| 3266 const int bytes_to_trim = elements_to_trim * element_size; | |
| 3267 Map* map = object->map(); | |
| 3268 | |
| 3269 // For now this trick is only applied to objects in new and paged space. | |
| 3270 // In large object space the object's start must coincide with chunk | |
| 3271 // and thus the trick is just not applicable. | |
| 3272 DCHECK(!lo_space()->Contains(object)); | |
| 3273 DCHECK(object->map() != fixed_cow_array_map()); | |
| 3274 | |
| 3275 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); | |
| 3276 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); | |
| 3277 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); | |
| 3278 | |
| 3279 const int len = object->length(); | |
| 3280 DCHECK(elements_to_trim <= len); | |
| 3281 | |
| 3282 // Calculate location of new array start. | |
| 3283 Address new_start = object->address() + bytes_to_trim; | |
| 3284 | |
| 3285 if (bytes_to_trim > FreeSpace::kHeaderSize && | |
| 3286 object->IsFixedArray() && | |
| 3287 !new_space()->Contains(object)) { | |
| 3288 // If we are doing a big trim in old space then we zap the space that was | |
| 3289 // formerly part of the array so that the GC (aided by the card-based | |
| 3290 // remembered set) won't find pointers to new-space there. | |
| 3291 Object** zap = reinterpret_cast<Object**>(object->address()); | |
|
Hannes Payer (out of office)
2014/08/05 10:45:07
Please add a a method that is taking care of the z
Michael Starzinger
2014/08/05 11:01:06
Done. I called it ZapFixedArrayForTrimming.
| |
| 3292 zap++; // Header of filler must be at least one word so skip that. | |
| 3293 for (int i = 1; i < elements_to_trim; i++) { | |
| 3294 *zap++ = Smi::FromInt(0); | |
| 3295 } | |
| 3296 } | |
| 3297 | |
| 3298 // Technically in new space this write might be omitted (except for | |
| 3299 // debug mode which iterates through the heap), but to play safer | |
| 3300 // we still do it. | |
| 3301 CreateFillerObjectAt(object->address(), bytes_to_trim); | |
| 3302 | |
| 3303 // Initialize header of the trimmed array. Since left trimming is only | |
| 3304 // performed on pages which are not concurrently swept creating a filler | |
| 3305 // object does not require synchronization. | |
| 3306 DCHECK(CanMoveObjectStart(object)); | |
| 3307 Object** former_start = HeapObject::RawField(object, 0); | |
| 3308 int new_start_index = elements_to_trim * (element_size / kPointerSize); | |
| 3309 former_start[new_start_index] = map; | |
| 3310 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); | |
| 3311 FixedArrayBase* new_object = | |
| 3312 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); | |
| 3313 | |
| 3314 // Maintain consistency of live bytes during incremental marking | |
| 3315 marking()->TransferMark(object->address(), new_start); | |
| 3316 AdjustLiveBytes(new_start, -bytes_to_trim, Heap::FROM_MUTATOR); | |
| 3317 | |
| 3318 // Notify the heap profiler of change in object layout. | |
| 3319 OnMoveEvent(new_object, object, new_object->Size()); | |
| 3320 return new_object; | |
| 3321 } | |
| 3322 | |
| 3323 | |
| 3324 template<Heap::InvocationMode mode> | |
| 3325 void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) { | |
| 3326 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; | |
| 3327 const int bytes_to_trim = elements_to_trim * element_size; | |
| 3328 | |
| 3329 // For now this trick is only applied to objects in new and paged space. | |
| 3330 DCHECK(!lo_space()->Contains(object)); | |
| 3331 DCHECK(object->map() != fixed_cow_array_map()); | |
| 3332 | |
| 3333 const int len = object->length(); | |
| 3334 DCHECK(elements_to_trim < len); | |
| 3335 | |
| 3336 // Calculate location of new array end. | |
| 3337 Address new_end = object->address() + object->Size() - bytes_to_trim; | |
| 3338 | |
| 3339 if (bytes_to_trim > FreeSpace::kHeaderSize && | |
| 3340 object->IsFixedArray() && | |
| 3341 (mode != Heap::FROM_GC || Heap::ShouldZapGarbage())) { | |
| 3342 // If we are doing a big trim in old space then we zap the space that was | |
| 3343 // formerly part of the array so that the GC (aided by the card-based | |
| 3344 // remembered set) won't find pointers to new-space there. | |
| 3345 Object** zap = reinterpret_cast<Object**>(new_end); | |
| 3346 zap++; // Header of filler must be at least one word so skip that. | |
| 3347 for (int i = 1; i < elements_to_trim; i++) { | |
| 3348 *zap++ = Smi::FromInt(0); | |
| 3349 } | |
| 3350 } | |
| 3351 | |
| 3352 // Technically in new space this write might be omitted (except for | |
| 3353 // debug mode which iterates through the heap), but to play safer | |
| 3354 // we still do it. | |
| 3355 CreateFillerObjectAt(new_end, bytes_to_trim); | |
| 3356 | |
| 3357 // Initialize header of the trimmed array. We are storing the new length | |
| 3358 // using release store after creating a filler for the left-over space to | |
| 3359 // avoid races with the sweeper thread. | |
| 3360 object->synchronized_set_length(len - elements_to_trim); | |
| 3361 | |
| 3362 // Maintain consistency of live bytes during incremental marking | |
| 3363 AdjustLiveBytes(object->address(), -bytes_to_trim, mode); | |
| 3364 | |
| 3365 // Notify the heap profiler of change in object layout. The array may not be | |
| 3366 // moved during GC, and size has to be adjusted nevertheless. | |
| 3367 HeapProfiler* profiler = isolate()->heap_profiler(); | |
| 3368 if (profiler->is_tracking_allocations()) { | |
| 3369 profiler->UpdateObjectSizeEvent(object->address(), object->Size()); | |
| 3370 } | |
| 3371 } | |
| 3372 | |
| 3373 | |
| 3374 // Force instantiation of template instances class. | |
|
Hannes Payer (out of office)
2014/08/05 10:45:07
Please move the force instantiation before the act
Michael Starzinger
2014/08/05 11:01:06
Done.
| |
| 3375 template | |
| 3376 void Heap::RightTrimFixedArray<Heap::FROM_GC>(FixedArrayBase*, int); | |
| 3377 template | |
| 3378 void Heap::RightTrimFixedArray<Heap::FROM_MUTATOR>(FixedArrayBase*, int); | |
| 3379 | |
| 3380 | |
| 3263 AllocationResult Heap::AllocateExternalArray(int length, | 3381 AllocationResult Heap::AllocateExternalArray(int length, |
| 3264 ExternalArrayType array_type, | 3382 ExternalArrayType array_type, |
| 3265 void* external_pointer, | 3383 void* external_pointer, |
| 3266 PretenureFlag pretenure) { | 3384 PretenureFlag pretenure) { |
| 3267 int size = ExternalArray::kAlignedSize; | 3385 int size = ExternalArray::kAlignedSize; |
| 3268 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 3386 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 3269 HeapObject* result; | 3387 HeapObject* result; |
| 3270 { | 3388 { |
| 3271 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); | 3389 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3272 if (!allocation.To(&result)) return allocation; | 3390 if (!allocation.To(&result)) return allocation; |
| (...skipping 2778 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6051 static_cast<int>(object_sizes_last_time_[index])); | 6169 static_cast<int>(object_sizes_last_time_[index])); |
| 6052 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6170 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6053 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6171 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6054 | 6172 |
| 6055 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6173 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6056 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6174 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6057 ClearObjectStats(); | 6175 ClearObjectStats(); |
| 6058 } | 6176 } |
| 6059 } | 6177 } |
| 6060 } // namespace v8::internal | 6178 } // namespace v8::internal |
| OLD | NEW |