| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_SPACES_INL_H_ | 5 #ifndef V8_HEAP_SPACES_INL_H_ |
| 6 #define V8_HEAP_SPACES_INL_H_ | 6 #define V8_HEAP_SPACES_INL_H_ |
| 7 | 7 |
| 8 #include "src/heap/spaces.h" | 8 #include "src/heap/spaces.h" |
| 9 #include "src/heap-profiler.h" | 9 #include "src/heap-profiler.h" |
| 10 #include "src/isolate.h" | 10 #include "src/isolate.h" |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { | 243 HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { |
| 244 Address current_top = allocation_info_.top(); | 244 Address current_top = allocation_info_.top(); |
| 245 Address new_top = current_top + size_in_bytes; | 245 Address new_top = current_top + size_in_bytes; |
| 246 if (new_top > allocation_info_.limit()) return NULL; | 246 if (new_top > allocation_info_.limit()) return NULL; |
| 247 | 247 |
| 248 allocation_info_.set_top(new_top); | 248 allocation_info_.set_top(new_top); |
| 249 return HeapObject::FromAddress(current_top); | 249 return HeapObject::FromAddress(current_top); |
| 250 } | 250 } |
| 251 | 251 |
| 252 | 252 |
| 253 HeapObject* PagedSpace::AllocateLinearlyDoubleAlign(int size_in_bytes) { |
| 254 Address current_top = allocation_info_.top(); |
| 255 int alignment_size = 0; |
| 256 |
| 257 if ((OffsetFrom(current_top) & kDoubleAlignmentMask) != 0) { |
| 258 alignment_size = kPointerSize; |
| 259 size_in_bytes += alignment_size; |
| 260 } |
| 261 Address new_top = current_top + size_in_bytes; |
| 262 if (new_top > allocation_info_.limit()) return NULL; |
| 263 |
| 264 allocation_info_.set_top(new_top); |
| 265 if (alignment_size > 0) |
| 266 return heap()->EnsureDoubleAligned(HeapObject::FromAddress(current_top), |
| 267 size_in_bytes); |
| 268 return HeapObject::FromAddress(current_top); |
| 269 } |
| 270 |
| 271 |
| 253 // Raw allocation. | 272 // Raw allocation. |
| 254 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { | 273 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { |
| 255 HeapObject* object = AllocateLinearly(size_in_bytes); | 274 HeapObject* object = AllocateLinearly(size_in_bytes); |
| 256 | 275 |
| 257 if (object == NULL) { | 276 if (object == NULL) { |
| 258 object = free_list_.Allocate(size_in_bytes); | 277 object = free_list_.Allocate(size_in_bytes); |
| 259 if (object == NULL) { | 278 if (object == NULL) { |
| 260 object = SlowAllocateRaw(size_in_bytes); | 279 object = SlowAllocateRaw(size_in_bytes); |
| 261 } | 280 } |
| 262 } | 281 } |
| 263 | 282 |
| 264 if (object != NULL) { | 283 if (object != NULL) { |
| 265 if (identity() == CODE_SPACE) { | 284 if (identity() == CODE_SPACE) { |
| 266 SkipList::Update(object->address(), size_in_bytes); | 285 SkipList::Update(object->address(), size_in_bytes); |
| 267 } | 286 } |
| 268 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); | 287 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); |
| 269 return object; | 288 return object; |
| 270 } | 289 } |
| 271 | 290 |
| 272 return AllocationResult::Retry(identity()); | 291 return AllocationResult::Retry(identity()); |
| 273 } | 292 } |
| 274 | 293 |
| 275 | 294 |
| 295 // Raw allocation. |
| 296 AllocationResult PagedSpace::AllocateRawDoubleAligned(int size_in_bytes) { |
| 297 DCHECK(identity() == OLD_SPACE); |
| 298 HeapObject* object = AllocateLinearlyDoubleAlign(size_in_bytes); |
| 299 int aligned_size_in_bytes = size_in_bytes + kPointerSize; |
| 300 |
| 301 if (object == NULL) { |
| 302 object = free_list_.Allocate(aligned_size_in_bytes); |
| 303 if (object == NULL) { |
| 304 object = SlowAllocateRaw(aligned_size_in_bytes); |
| 305 } |
| 306 object = heap()->EnsureDoubleAligned(object, aligned_size_in_bytes); |
| 307 } |
| 308 |
| 309 if (object != NULL) { |
| 310 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes); |
| 311 return object; |
| 312 } |
| 313 |
| 314 return AllocationResult::Retry(identity()); |
| 315 } |
| 316 |
| 317 |
| 276 // ----------------------------------------------------------------------------- | 318 // ----------------------------------------------------------------------------- |
| 277 // NewSpace | 319 // NewSpace |
| 278 | 320 |
| 279 | 321 |
| 322 AllocationResult NewSpace::AllocateRawDoubleAligned(int size_in_bytes) { |
| 323 Address old_top = allocation_info_.top(); |
| 324 int alignment_size = 0; |
| 325 int aligned_size_in_bytes = 0; |
| 326 |
| 327 // If double alignment is required and top pointer is not aligned, we allocate |
| 328 // additional memory to take care of the alignment. |
| 329 if ((OffsetFrom(old_top) & kDoubleAlignmentMask) != 0) { |
| 330 alignment_size += kPointerSize; |
| 331 } |
| 332 aligned_size_in_bytes = size_in_bytes + alignment_size; |
| 333 |
| 334 if (allocation_info_.limit() - old_top < aligned_size_in_bytes) { |
| 335 return SlowAllocateRaw(size_in_bytes, true); |
| 336 } |
| 337 |
| 338 HeapObject* obj = HeapObject::FromAddress(old_top); |
| 339 allocation_info_.set_top(allocation_info_.top() + aligned_size_in_bytes); |
| 340 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); |
| 341 |
| 342 if (alignment_size > 0) { |
| 343 obj = heap()->EnsureDoubleAligned(obj, aligned_size_in_bytes); |
| 344 } |
| 345 |
| 346 // The slow path above ultimately goes through AllocateRaw, so this suffices. |
| 347 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); |
| 348 |
| 349 return obj; |
| 350 } |
| 351 |
| 352 |
| 280 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { | 353 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { |
| 281 Address old_top = allocation_info_.top(); | 354 Address old_top = allocation_info_.top(); |
| 282 | 355 |
| 283 if (allocation_info_.limit() - old_top < size_in_bytes) { | 356 if (allocation_info_.limit() - old_top < size_in_bytes) { |
| 284 return SlowAllocateRaw(size_in_bytes); | 357 return SlowAllocateRaw(size_in_bytes, false); |
| 285 } | 358 } |
| 286 | 359 |
| 287 HeapObject* obj = HeapObject::FromAddress(old_top); | 360 HeapObject* obj = HeapObject::FromAddress(old_top); |
| 288 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); | 361 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); |
| 289 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 362 DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); |
| 290 | 363 |
| 291 // The slow path above ultimately goes through AllocateRaw, so this suffices. | 364 // The slow path above ultimately goes through AllocateRaw, so this suffices. |
| 292 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); | 365 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes); |
| 293 | 366 |
| 294 return obj; | 367 return obj; |
| 295 } | 368 } |
| 296 | 369 |
| 297 | 370 |
| 298 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { | 371 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { |
| 299 heap->incremental_marking()->SetOldSpacePageFlags(chunk); | 372 heap->incremental_marking()->SetOldSpacePageFlags(chunk); |
| 300 return static_cast<LargePage*>(chunk); | 373 return static_cast<LargePage*>(chunk); |
| 301 } | 374 } |
| 302 | 375 |
| 303 | 376 |
| 304 intptr_t LargeObjectSpace::Available() { | 377 intptr_t LargeObjectSpace::Available() { |
| 305 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); | 378 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); |
| 306 } | 379 } |
| 307 | 380 |
| 308 } | 381 } |
| 309 } // namespace v8::internal | 382 } // namespace v8::internal |
| 310 | 383 |
| 311 #endif // V8_HEAP_SPACES_INL_H_ | 384 #endif // V8_HEAP_SPACES_INL_H_ |
| OLD | NEW |