| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 1923 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1934 | 1934 |
| 1935 offset += 2; | 1935 offset += 2; |
| 1936 current_cell >>= 2; | 1936 current_cell >>= 2; |
| 1937 | 1937 |
| 1938 // TODO(hpayer): Refactor EvacuateObject and call this function instead. | 1938 // TODO(hpayer): Refactor EvacuateObject and call this function instead. |
| 1939 if (heap()->ShouldBePromoted(object->address(), size) && | 1939 if (heap()->ShouldBePromoted(object->address(), size) && |
| 1940 TryPromoteObject(object, size)) { | 1940 TryPromoteObject(object, size)) { |
| 1941 continue; | 1941 continue; |
| 1942 } | 1942 } |
| 1943 | 1943 |
| 1944 AllocationResult allocation; | 1944 AllocationAlignment alignment = object->NeedsToEnsureDoubleAlignment() |
| 1945 #ifdef V8_HOST_ARCH_32_BIT | 1945 ? kDoubleAligned |
| 1946 if (object->NeedsToEnsureDoubleAlignment()) { | 1946 : kWordAligned; |
| 1947 allocation = new_space->AllocateRawAligned(size, kDoubleAligned); | 1947 AllocationResult allocation = new_space->AllocateRaw(size, alignment); |
| 1948 } else { | |
| 1949 allocation = new_space->AllocateRaw(size); | |
| 1950 } | |
| 1951 #else | |
| 1952 allocation = new_space->AllocateRaw(size); | |
| 1953 #endif | |
| 1954 if (allocation.IsRetry()) { | 1948 if (allocation.IsRetry()) { |
| 1955 if (!new_space->AddFreshPage()) { | 1949 if (!new_space->AddFreshPage()) { |
| 1956 // Shouldn't happen. We are sweeping linearly, and to-space | 1950 // Shouldn't happen. We are sweeping linearly, and to-space |
| 1957 // has the same number of pages as from-space, so there is | 1951 // has the same number of pages as from-space, so there is |
| 1958 // always room. | 1952 // always room. |
| 1959 UNREACHABLE(); | 1953 UNREACHABLE(); |
| 1960 } | 1954 } |
| 1961 #ifdef V8_HOST_ARCH_32_BIT | 1955 allocation = new_space->AllocateRaw(size, alignment); |
| 1962 if (object->NeedsToEnsureDoubleAlignment()) { | |
| 1963 allocation = new_space->AllocateRawAligned(size, kDoubleAligned); | |
| 1964 } else { | |
| 1965 allocation = new_space->AllocateRaw(size); | |
| 1966 } | |
| 1967 #else | |
| 1968 allocation = new_space->AllocateRaw(size); | |
| 1969 #endif | |
| 1970 DCHECK(!allocation.IsRetry()); | 1956 DCHECK(!allocation.IsRetry()); |
| 1971 } | 1957 } |
| 1972 Object* target = allocation.ToObjectChecked(); | 1958 Object* target = allocation.ToObjectChecked(); |
| 1973 | 1959 |
| 1974 MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE); | 1960 MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE); |
| 1975 heap()->IncrementSemiSpaceCopiedObjectSize(size); | 1961 heap()->IncrementSemiSpaceCopiedObjectSize(size); |
| 1976 } | 1962 } |
| 1977 *cells = 0; | 1963 *cells = 0; |
| 1978 } | 1964 } |
| 1979 return survivors_size; | 1965 return survivors_size; |
| (...skipping 1132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3112 } | 3098 } |
| 3113 | 3099 |
| 3114 | 3100 |
| 3115 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, | 3101 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, |
| 3116 int object_size) { | 3102 int object_size) { |
| 3117 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 3103 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 3118 | 3104 |
| 3119 OldSpace* old_space = heap()->old_space(); | 3105 OldSpace* old_space = heap()->old_space(); |
| 3120 | 3106 |
| 3121 HeapObject* target; | 3107 HeapObject* target; |
| 3122 AllocationResult allocation; | 3108 AllocationAlignment alignment = |
| 3123 #ifdef V8_HOST_ARCH_32_BIT | 3109 object->NeedsToEnsureDoubleAlignment() ? kDoubleAligned : kWordAligned; |
| 3124 if (object->NeedsToEnsureDoubleAlignment()) { | 3110 AllocationResult allocation = old_space->AllocateRaw(object_size, alignment); |
| 3125 allocation = old_space->AllocateRawAligned(object_size, kDoubleAligned); | |
| 3126 } else { | |
| 3127 allocation = old_space->AllocateRaw(object_size); | |
| 3128 } | |
| 3129 #else | |
| 3130 allocation = old_space->AllocateRaw(object_size); | |
| 3131 #endif | |
| 3132 if (allocation.To(&target)) { | 3111 if (allocation.To(&target)) { |
| 3133 MigrateObject(target, object, object_size, old_space->identity()); | 3112 MigrateObject(target, object, object_size, old_space->identity()); |
| 3134 heap()->IncrementPromotedObjectsSize(object_size); | 3113 heap()->IncrementPromotedObjectsSize(object_size); |
| 3135 return true; | 3114 return true; |
| 3136 } | 3115 } |
| 3137 | 3116 |
| 3138 return false; | 3117 return false; |
| 3139 } | 3118 } |
| 3140 | 3119 |
| 3141 | 3120 |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3345 if (*cell == 0) continue; | 3324 if (*cell == 0) continue; |
| 3346 | 3325 |
| 3347 int live_objects = MarkWordToObjectStarts(*cell, offsets); | 3326 int live_objects = MarkWordToObjectStarts(*cell, offsets); |
| 3348 for (int i = 0; i < live_objects; i++) { | 3327 for (int i = 0; i < live_objects; i++) { |
| 3349 Address object_addr = cell_base + offsets[i] * kPointerSize; | 3328 Address object_addr = cell_base + offsets[i] * kPointerSize; |
| 3350 HeapObject* object = HeapObject::FromAddress(object_addr); | 3329 HeapObject* object = HeapObject::FromAddress(object_addr); |
| 3351 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3330 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
| 3352 | 3331 |
| 3353 int size = object->Size(); | 3332 int size = object->Size(); |
| 3354 | 3333 |
| 3334 AllocationAlignment alignment = object->NeedsToEnsureDoubleAlignment() |
| 3335 ? kDoubleAligned |
| 3336 : kWordAligned; |
| 3355 HeapObject* target_object; | 3337 HeapObject* target_object; |
| 3356 AllocationResult allocation = space->AllocateRaw(size); | 3338 AllocationResult allocation = space->AllocateRaw(size, alignment); |
| 3357 if (!allocation.To(&target_object)) { | 3339 if (!allocation.To(&target_object)) { |
| 3358 // If allocation failed, use emergency memory and re-try allocation. | 3340 // If allocation failed, use emergency memory and re-try allocation. |
| 3359 CHECK(space->HasEmergencyMemory()); | 3341 CHECK(space->HasEmergencyMemory()); |
| 3360 space->UseEmergencyMemory(); | 3342 space->UseEmergencyMemory(); |
| 3361 allocation = space->AllocateRaw(size); | 3343 allocation = space->AllocateRaw(size, alignment); |
| 3362 } | 3344 } |
| 3363 if (!allocation.To(&target_object)) { | 3345 if (!allocation.To(&target_object)) { |
| 3364 // OS refused to give us memory. | 3346 // OS refused to give us memory. |
| 3365 V8::FatalProcessOutOfMemory("Evacuation"); | 3347 V8::FatalProcessOutOfMemory("Evacuation"); |
| 3366 return; | 3348 return; |
| 3367 } | 3349 } |
| 3368 | 3350 |
| 3369 MigrateObject(target_object, object, size, space->identity()); | 3351 MigrateObject(target_object, object, size, space->identity()); |
| 3370 DCHECK(object->map_word().IsForwardingAddress()); | 3352 DCHECK(object->map_word().IsForwardingAddress()); |
| 3371 } | 3353 } |
| (...skipping 1410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4782 SlotsBuffer* buffer = *buffer_address; | 4764 SlotsBuffer* buffer = *buffer_address; |
| 4783 while (buffer != NULL) { | 4765 while (buffer != NULL) { |
| 4784 SlotsBuffer* next_buffer = buffer->next(); | 4766 SlotsBuffer* next_buffer = buffer->next(); |
| 4785 DeallocateBuffer(buffer); | 4767 DeallocateBuffer(buffer); |
| 4786 buffer = next_buffer; | 4768 buffer = next_buffer; |
| 4787 } | 4769 } |
| 4788 *buffer_address = NULL; | 4770 *buffer_address = NULL; |
| 4789 } | 4771 } |
| 4790 } | 4772 } |
| 4791 } // namespace v8::internal | 4773 } // namespace v8::internal |
| OLD | NEW |