| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 477 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 488 Code* code = Code::cast(object); | 488 Code* code = Code::cast(object); |
| 489 Code::Kind current_kind = code->kind(); | 489 Code::Kind current_kind = code->kind(); |
| 490 if (current_kind == Code::FUNCTION || | 490 if (current_kind == Code::FUNCTION || |
| 491 current_kind == Code::OPTIMIZED_FUNCTION) { | 491 current_kind == Code::OPTIMIZED_FUNCTION) { |
| 492 code->ClearInlineCaches(kind); | 492 code->ClearInlineCaches(kind); |
| 493 } | 493 } |
| 494 } | 494 } |
| 495 } | 495 } |
| 496 | 496 |
| 497 | 497 |
| 498 void Heap::RepairFreeListsAfterBoot() { | 498 void Heap::RepairFreeListsAfterDeserialization() { |
| 499 PagedSpaces spaces(this); | 499 PagedSpaces spaces(this); |
| 500 for (PagedSpace* space = spaces.next(); space != NULL; | 500 for (PagedSpace* space = spaces.next(); space != NULL; |
| 501 space = spaces.next()) { | 501 space = spaces.next()) { |
| 502 space->RepairFreeListsAfterBoot(); | 502 space->RepairFreeListsAfterDeserialization(); |
| 503 } | 503 } |
| 504 } | 504 } |
| 505 | 505 |
| 506 | 506 |
| 507 void Heap::ProcessPretenuringFeedback() { | 507 void Heap::ProcessPretenuringFeedback() { |
| 508 if (FLAG_allocation_site_pretenuring) { | 508 if (FLAG_allocation_site_pretenuring) { |
| 509 int tenure_decisions = 0; | 509 int tenure_decisions = 0; |
| 510 int dont_tenure_decisions = 0; | 510 int dont_tenure_decisions = 0; |
| 511 int allocation_mementos_found = 0; | 511 int allocation_mementos_found = 0; |
| 512 int allocation_sites = 0; | 512 int allocation_sites = 0; |
| (...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 945 for (auto& chunk : *reservation) { | 945 for (auto& chunk : *reservation) { |
| 946 AllocationResult allocation; | 946 AllocationResult allocation; |
| 947 int size = chunk.size; | 947 int size = chunk.size; |
| 948 DCHECK_LE(size, MemoryAllocator::PageAreaSize( | 948 DCHECK_LE(size, MemoryAllocator::PageAreaSize( |
| 949 static_cast<AllocationSpace>(space))); | 949 static_cast<AllocationSpace>(space))); |
| 950 if (space == NEW_SPACE) { | 950 if (space == NEW_SPACE) { |
| 951 allocation = new_space()->AllocateRaw(size); | 951 allocation = new_space()->AllocateRaw(size); |
| 952 } else { | 952 } else { |
| 953 allocation = paged_space(space)->AllocateRaw(size); | 953 allocation = paged_space(space)->AllocateRaw(size); |
| 954 } | 954 } |
| 955 FreeListNode* node; | 955 HeapObject* free_space; |
| 956 if (allocation.To(&node)) { | 956 if (allocation.To(&free_space)) { |
| 957 // Mark with a free list node, in case we have a GC before | 957 // Mark with a free list node, in case we have a GC before |
| 958 // deserializing. | 958 // deserializing. |
| 959 node->set_size(this, size); | 959 Address free_space_address = free_space->address(); |
| 960 CreateFillerObjectAt(free_space_address, size); |
| 960 DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); | 961 DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); |
| 961 chunk.start = node->address(); | 962 chunk.start = free_space_address; |
| 962 chunk.end = node->address() + size; | 963 chunk.end = free_space_address + size; |
| 963 } else { | 964 } else { |
| 964 perform_gc = true; | 965 perform_gc = true; |
| 965 break; | 966 break; |
| 966 } | 967 } |
| 967 } | 968 } |
| 968 } | 969 } |
| 969 if (perform_gc) { | 970 if (perform_gc) { |
| 970 if (space == NEW_SPACE) { | 971 if (space == NEW_SPACE) { |
| 971 CollectGarbage(NEW_SPACE, "failed to reserve space in the new space"); | 972 CollectGarbage(NEW_SPACE, "failed to reserve space in the new space"); |
| 972 } else { | 973 } else { |
| (...skipping 2412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3385 | 3386 |
| 3386 result->set_map_no_write_barrier(byte_array_map()); | 3387 result->set_map_no_write_barrier(byte_array_map()); |
| 3387 ByteArray::cast(result)->set_length(length); | 3388 ByteArray::cast(result)->set_length(length); |
| 3388 return result; | 3389 return result; |
| 3389 } | 3390 } |
| 3390 | 3391 |
| 3391 | 3392 |
| 3392 void Heap::CreateFillerObjectAt(Address addr, int size) { | 3393 void Heap::CreateFillerObjectAt(Address addr, int size) { |
| 3393 if (size == 0) return; | 3394 if (size == 0) return; |
| 3394 HeapObject* filler = HeapObject::FromAddress(addr); | 3395 HeapObject* filler = HeapObject::FromAddress(addr); |
| 3396 // At this point, we may be deserializing the heap from a snapshot, and |
| 3397 // none of the maps have been created yet and are NULL. |
| 3395 if (size == kPointerSize) { | 3398 if (size == kPointerSize) { |
| 3396 filler->set_map_no_write_barrier(one_pointer_filler_map()); | 3399 filler->set_map_no_write_barrier(raw_unchecked_one_pointer_filler_map()); |
| 3400 DCHECK(filler->map() == NULL || filler->map() == one_pointer_filler_map()); |
| 3397 } else if (size == 2 * kPointerSize) { | 3401 } else if (size == 2 * kPointerSize) { |
| 3398 filler->set_map_no_write_barrier(two_pointer_filler_map()); | 3402 filler->set_map_no_write_barrier(raw_unchecked_two_pointer_filler_map()); |
| 3403 DCHECK(filler->map() == NULL || filler->map() == two_pointer_filler_map()); |
| 3399 } else { | 3404 } else { |
| 3400 filler->set_map_no_write_barrier(free_space_map()); | 3405 filler->set_map_no_write_barrier(raw_unchecked_free_space_map()); |
| 3401 FreeSpace::cast(filler)->set_size(size); | 3406 DCHECK(filler->map() == NULL || filler->map() == free_space_map()); |
| 3407 FreeSpace::cast(filler)->nobarrier_set_size(size); |
| 3402 } | 3408 } |
| 3403 } | 3409 } |
| 3404 | 3410 |
| 3405 | 3411 |
| 3406 bool Heap::CanMoveObjectStart(HeapObject* object) { | 3412 bool Heap::CanMoveObjectStart(HeapObject* object) { |
| 3407 Address address = object->address(); | 3413 Address address = object->address(); |
| 3408 bool is_in_old_pointer_space = InOldPointerSpace(address); | 3414 bool is_in_old_pointer_space = InOldPointerSpace(address); |
| 3409 bool is_in_old_data_space = InOldDataSpace(address); | 3415 bool is_in_old_data_space = InOldDataSpace(address); |
| 3410 | 3416 |
| 3411 if (lo_space()->Contains(object)) return false; | 3417 if (lo_space()->Contains(object)) return false; |
| (...skipping 3027 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6439 static_cast<int>(object_sizes_last_time_[index])); | 6445 static_cast<int>(object_sizes_last_time_[index])); |
| 6440 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6446 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6441 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6447 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6442 | 6448 |
| 6443 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6449 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6444 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6450 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6445 ClearObjectStats(); | 6451 ClearObjectStats(); |
| 6446 } | 6452 } |
| 6447 } | 6453 } |
| 6448 } // namespace v8::internal | 6454 } // namespace v8::internal |
| OLD | NEW |