OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
317 if (isolate_->code_range()->contains(static_cast<Address>(base))) { | 317 if (isolate_->code_range()->contains(static_cast<Address>(base))) { |
318 ASSERT(executable == EXECUTABLE); | 318 ASSERT(executable == EXECUTABLE); |
319 isolate_->code_range()->FreeRawMemory(base, size); | 319 isolate_->code_range()->FreeRawMemory(base, size); |
320 } else { | 320 } else { |
321 ASSERT(executable == NOT_EXECUTABLE || !isolate_->code_range()->exists()); | 321 ASSERT(executable == NOT_EXECUTABLE || !isolate_->code_range()->exists()); |
322 VirtualMemory::ReleaseRegion(base, size); | 322 VirtualMemory::ReleaseRegion(base, size); |
323 } | 323 } |
324 } | 324 } |
325 | 325 |
326 | 326 |
327 Address MemoryAllocator::ReserveAlignedMemory(const size_t requested, | 327 Address MemoryAllocator::ReserveAlignedMemory(const size_t size, |
328 size_t alignment, | 328 size_t alignment) { |
329 size_t* allocated_size) { | |
330 ASSERT(IsAligned(alignment, OS::AllocateAlignment())); | 329 ASSERT(IsAligned(alignment, OS::AllocateAlignment())); |
331 if (size_ + requested > capacity_) return NULL; | 330 Address result = static_cast<Address>( |
332 | 331 VirtualMemory::ReserveAlignedRegion(size, alignment)); |
333 size_t allocated = RoundUp(requested + alignment, | 332 size_ += size; |
334 static_cast<intptr_t>(OS::AllocateAlignment())); | 333 return result; |
335 | |
336 Address base = reinterpret_cast<Address>( | |
337 VirtualMemory::ReserveRegion(allocated)); | |
338 | |
339 Address end = base + allocated; | |
340 | |
341 if (base == 0) return NULL; | |
342 | |
343 Address aligned_base = RoundUp(base, alignment); | |
344 | |
345 ASSERT(aligned_base + requested <= base + allocated); | |
346 | |
347 // The difference between re-aligned base address and base address is | |
348 // multiple of OS::AllocateAlignment(). | |
349 if (aligned_base != base) { | |
350 ASSERT(aligned_base > base); | |
351 // TODO(gc) check result of operation? | |
352 VirtualMemory::ReleaseRegion(reinterpret_cast<void*>(base), | |
353 aligned_base - base); | |
354 allocated -= (aligned_base - base); | |
355 base = aligned_base; | |
356 } | |
357 | |
358 ASSERT(base + allocated == end); | |
359 | |
360 Address requested_end = base + requested; | |
361 Address aligned_requested_end = | |
362 RoundUp(requested_end, OS::AllocateAlignment()); | |
363 | |
364 if (aligned_requested_end < end) { | |
365 // TODO(gc) check result of operation? | |
366 VirtualMemory::ReleaseRegion(reinterpret_cast<void*>(aligned_requested_end), | |
367 end - aligned_requested_end); | |
368 allocated = aligned_requested_end - base; | |
369 } | |
370 | |
371 size_ += allocated; | |
372 *allocated_size = allocated; | |
373 return base; | |
374 } | 334 } |
375 | 335 |
376 | 336 |
377 Address MemoryAllocator::AllocateAlignedMemory(const size_t requested, | 337 Address MemoryAllocator::AllocateAlignedMemory(const size_t size, |
378 size_t alignment, | 338 size_t alignment, |
379 Executability executable, | 339 Executability executable) { |
380 size_t* allocated_size) { | |
381 Address base = | 340 Address base = |
382 ReserveAlignedMemory(requested, Page::kPageSize, allocated_size); | 341 ReserveAlignedMemory(size, Page::kPageSize); |
383 | 342 |
384 if (base == NULL) return NULL; | 343 if (base == NULL) return NULL; |
385 | 344 |
386 if (!VirtualMemory::CommitRegion(base, | 345 if (!VirtualMemory::CommitRegion(base, |
387 *allocated_size, | 346 size, |
388 executable == EXECUTABLE)) { | 347 executable == EXECUTABLE)) { |
389 VirtualMemory::ReleaseRegion(base, *allocated_size); | 348 VirtualMemory::ReleaseRegion(base, size); |
390 size_ -= *allocated_size; | 349 size_ -= size; |
391 return NULL; | 350 return NULL; |
392 } | 351 } |
393 | 352 |
394 return base; | 353 return base; |
395 } | 354 } |
396 | 355 |
397 | 356 |
398 void Page::InitializeAsAnchor(PagedSpace* owner) { | 357 void Page::InitializeAsAnchor(PagedSpace* owner) { |
399 set_owner(owner); | 358 set_owner(owner); |
400 set_prev_page(this); | 359 set_prev_page(this); |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
500 // Allocate executable memory either from code range or from the | 459 // Allocate executable memory either from code range or from the |
501 // OS. | 460 // OS. |
502 if (isolate_->code_range()->exists()) { | 461 if (isolate_->code_range()->exists()) { |
503 base = isolate_->code_range()->AllocateRawMemory(chunk_size, &chunk_size); | 462 base = isolate_->code_range()->AllocateRawMemory(chunk_size, &chunk_size); |
504 ASSERT(IsAligned(reinterpret_cast<intptr_t>(base), | 463 ASSERT(IsAligned(reinterpret_cast<intptr_t>(base), |
505 MemoryChunk::kAlignment)); | 464 MemoryChunk::kAlignment)); |
506 size_ += chunk_size; | 465 size_ += chunk_size; |
507 } else { | 466 } else { |
508 base = AllocateAlignedMemory(chunk_size, | 467 base = AllocateAlignedMemory(chunk_size, |
509 MemoryChunk::kAlignment, | 468 MemoryChunk::kAlignment, |
510 executable, | 469 executable); |
511 &chunk_size); | |
512 } | 470 } |
513 | 471 |
514 if (base == NULL) return NULL; | 472 if (base == NULL) return NULL; |
515 | 473 |
516 // Update executable memory size. | 474 // Update executable memory size. |
517 size_executable_ += chunk_size; | 475 size_executable_ += chunk_size; |
518 } else { | 476 } else { |
519 base = AllocateAlignedMemory(chunk_size, | 477 base = AllocateAlignedMemory(chunk_size, |
520 MemoryChunk::kAlignment, | 478 MemoryChunk::kAlignment, |
521 executable, | 479 executable); |
522 &chunk_size); | |
523 | 480 |
524 if (base == NULL) return NULL; | 481 if (base == NULL) return NULL; |
525 } | 482 } |
526 | 483 |
527 #ifdef DEBUG | 484 #ifdef DEBUG |
528 ZapBlock(base, chunk_size); | 485 ZapBlock(base, chunk_size); |
529 #endif | 486 #endif |
530 isolate_->counters()->memory_allocated()-> | 487 isolate_->counters()->memory_allocated()-> |
531 Increment(static_cast<int>(chunk_size)); | 488 Increment(static_cast<int>(chunk_size)); |
532 | 489 |
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
832 | 789 |
833 | 790 |
834 bool NewSpace::Setup(int reserved_semispace_capacity, | 791 bool NewSpace::Setup(int reserved_semispace_capacity, |
835 int maximum_semispace_capacity) { | 792 int maximum_semispace_capacity) { |
836 // Setup new space based on the preallocated memory block defined by | 793 // Setup new space based on the preallocated memory block defined by |
837 // start and size. The provided space is divided into two semi-spaces. | 794 // start and size. The provided space is divided into two semi-spaces. |
838 // To support fast containment testing in the new space, the size of | 795 // To support fast containment testing in the new space, the size of |
839 // this chunk must be a power of two and it must be aligned to its size. | 796 // this chunk must be a power of two and it must be aligned to its size. |
840 int initial_semispace_capacity = heap()->InitialSemiSpaceSize(); | 797 int initial_semispace_capacity = heap()->InitialSemiSpaceSize(); |
841 | 798 |
842 size_t size = 0; | 799 size_t size = 2 * reserved_semispace_capacity; |
843 Address base = | 800 Address base = |
844 heap()->isolate()->memory_allocator()->ReserveAlignedMemory( | 801 heap()->isolate()->memory_allocator()->ReserveAlignedMemory( |
845 2 * reserved_semispace_capacity, | 802 size, |
846 2 * reserved_semispace_capacity, | 803 size); |
847 &size); | |
848 | 804 |
849 if (base == NULL) return false; | 805 if (base == NULL) return false; |
850 | 806 |
851 chunk_base_ = base; | 807 chunk_base_ = base; |
852 chunk_size_ = static_cast<uintptr_t>(size); | 808 chunk_size_ = static_cast<uintptr_t>(size); |
853 LOG(heap()->isolate(), NewEvent("InitialChunk", chunk_base_, chunk_size_)); | 809 LOG(heap()->isolate(), NewEvent("InitialChunk", chunk_base_, chunk_size_)); |
854 | 810 |
855 ASSERT(initial_semispace_capacity <= maximum_semispace_capacity); | 811 ASSERT(initial_semispace_capacity <= maximum_semispace_capacity); |
856 ASSERT(IsPowerOf2(maximum_semispace_capacity)); | 812 ASSERT(IsPowerOf2(maximum_semispace_capacity)); |
857 | 813 |
(...skipping 1647 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2505 for (HeapObject* obj = obj_it.Next(); obj != NULL; obj = obj_it.Next()) { | 2461 for (HeapObject* obj = obj_it.Next(); obj != NULL; obj = obj_it.Next()) { |
2506 if (obj->IsCode()) { | 2462 if (obj->IsCode()) { |
2507 Code* code = Code::cast(obj); | 2463 Code* code = Code::cast(obj); |
2508 isolate->code_kind_statistics()[code->kind()] += code->Size(); | 2464 isolate->code_kind_statistics()[code->kind()] += code->Size(); |
2509 } | 2465 } |
2510 } | 2466 } |
2511 } | 2467 } |
2512 #endif // DEBUG | 2468 #endif // DEBUG |
2513 | 2469 |
2514 } } // namespace v8::internal | 2470 } } // namespace v8::internal |
OLD | NEW |