OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
9 #include "src/full-codegen.h" | 9 #include "src/full-codegen.h" |
10 #include "src/heap/mark-compact.h" | 10 #include "src/heap/mark-compact.h" |
(...skipping 1437 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1448 | 1448 |
1449 return true; | 1449 return true; |
1450 } | 1450 } |
1451 | 1451 |
1452 | 1452 |
1453 AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, | 1453 AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, |
1454 AllocationAlignment alignment) { | 1454 AllocationAlignment alignment) { |
1455 Address old_top = allocation_info_.top(); | 1455 Address old_top = allocation_info_.top(); |
1456 Address high = to_space_.page_high(); | 1456 Address high = to_space_.page_high(); |
1457 if (allocation_info_.limit() < high) { | 1457 if (allocation_info_.limit() < high) { |
| 1458 int alignment_size = Heap::GetFillToAlign(old_top, alignment); |
| 1459 int aligned_size_in_bytes = size_in_bytes + alignment_size; |
| 1460 |
1458 // Either the limit has been lowered because linear allocation was disabled | 1461 // Either the limit has been lowered because linear allocation was disabled |
1459 // or because incremental marking wants to get a chance to do a step. Set | 1462 // or because incremental marking wants to get a chance to do a step. Set |
1460 // the new limit accordingly. | 1463 // the new limit accordingly. |
1461 int aligned_size = size_in_bytes; | 1464 Address new_top = old_top + aligned_size_in_bytes; |
1462 aligned_size += (alignment != kWordAligned) ? kPointerSize : 0; | |
1463 Address new_top = old_top + aligned_size; | |
1464 int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_); | 1465 int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_); |
1465 heap()->incremental_marking()->Step(bytes_allocated, | 1466 heap()->incremental_marking()->Step(bytes_allocated, |
1466 IncrementalMarking::GC_VIA_STACK_GUARD); | 1467 IncrementalMarking::GC_VIA_STACK_GUARD); |
1467 UpdateInlineAllocationLimit(aligned_size); | 1468 UpdateInlineAllocationLimit(aligned_size_in_bytes); |
1468 top_on_previous_step_ = new_top; | 1469 top_on_previous_step_ = new_top; |
1469 if (alignment == kDoubleAligned) | 1470 if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes); |
1470 return AllocateRawAligned(size_in_bytes, kDoubleAligned); | 1471 return AllocateRawAligned(size_in_bytes, alignment); |
1471 else if (alignment == kDoubleUnaligned) | |
1472 return AllocateRawAligned(size_in_bytes, kDoubleUnaligned); | |
1473 return AllocateRawUnaligned(size_in_bytes); | |
1474 } else if (AddFreshPage()) { | 1472 } else if (AddFreshPage()) { |
1475 // Switched to new page. Try allocating again. | 1473 // Switched to new page. Try allocating again. |
1476 int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); | 1474 int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |
1477 heap()->incremental_marking()->Step(bytes_allocated, | 1475 heap()->incremental_marking()->Step(bytes_allocated, |
1478 IncrementalMarking::GC_VIA_STACK_GUARD); | 1476 IncrementalMarking::GC_VIA_STACK_GUARD); |
1479 top_on_previous_step_ = to_space_.page_low(); | 1477 top_on_previous_step_ = to_space_.page_low(); |
1480 if (alignment == kDoubleAligned) | 1478 if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes); |
1481 return AllocateRawAligned(size_in_bytes, kDoubleAligned); | 1479 return AllocateRawAligned(size_in_bytes, alignment); |
1482 else if (alignment == kDoubleUnaligned) | |
1483 return AllocateRawAligned(size_in_bytes, kDoubleUnaligned); | |
1484 return AllocateRawUnaligned(size_in_bytes); | |
1485 } else { | 1480 } else { |
1486 return AllocationResult::Retry(); | 1481 return AllocationResult::Retry(); |
1487 } | 1482 } |
1488 } | 1483 } |
1489 | 1484 |
1490 | 1485 |
1491 #ifdef VERIFY_HEAP | 1486 #ifdef VERIFY_HEAP |
1492 // We do not use the SemiSpaceIterator because verification doesn't assume | 1487 // We do not use the SemiSpaceIterator because verification doesn't assume |
1493 // that it works (it depends on the invariants we are checking). | 1488 // that it works (it depends on the invariants we are checking). |
1494 void NewSpace::Verify() { | 1489 void NewSpace::Verify() { |
(...skipping 1630 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3125 object->ShortPrint(); | 3120 object->ShortPrint(); |
3126 PrintF("\n"); | 3121 PrintF("\n"); |
3127 } | 3122 } |
3128 printf(" --------------------------------------\n"); | 3123 printf(" --------------------------------------\n"); |
3129 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3124 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
3130 } | 3125 } |
3131 | 3126 |
3132 #endif // DEBUG | 3127 #endif // DEBUG |
3133 } | 3128 } |
3134 } // namespace v8::internal | 3129 } // namespace v8::internal |
OLD | NEW |