| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2275 } | 2275 } |
| 2276 } | 2276 } |
| 2277 } | 2277 } |
| 2278 | 2278 |
| 2279 | 2279 |
| 2280 template<ObjectContents object_contents, int alignment> | 2280 template<ObjectContents object_contents, int alignment> |
| 2281 static inline void EvacuateObject(Map* map, | 2281 static inline void EvacuateObject(Map* map, |
| 2282 HeapObject** slot, | 2282 HeapObject** slot, |
| 2283 HeapObject* object, | 2283 HeapObject* object, |
| 2284 int object_size) { | 2284 int object_size) { |
| 2285 SLOW_ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize); | 2285 SLOW_ASSERT(object_size <= Page::kMaxRegularHeapObjectSize); |
| 2286 SLOW_ASSERT(object->Size() == object_size); | 2286 SLOW_ASSERT(object->Size() == object_size); |
| 2287 | 2287 |
| 2288 int allocation_size = object_size; | 2288 int allocation_size = object_size; |
| 2289 if (alignment != kObjectAlignment) { | 2289 if (alignment != kObjectAlignment) { |
| 2290 ASSERT(alignment == kDoubleAlignment); | 2290 ASSERT(alignment == kDoubleAlignment); |
| 2291 allocation_size += kPointerSize; | 2291 allocation_size += kPointerSize; |
| 2292 } | 2292 } |
| 2293 | 2293 |
| 2294 Heap* heap = map->GetHeap(); | 2294 Heap* heap = map->GetHeap(); |
| 2295 if (heap->ShouldBePromoted(object->address(), object_size)) { | 2295 if (heap->ShouldBePromoted(object->address(), object_size)) { |
| (...skipping 633 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2929 } | 2929 } |
| 2930 ASSERT(!InNewSpace(empty_fixed_array())); | 2930 ASSERT(!InNewSpace(empty_fixed_array())); |
| 2931 return true; | 2931 return true; |
| 2932 } | 2932 } |
| 2933 | 2933 |
| 2934 | 2934 |
| 2935 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { | 2935 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { |
| 2936 // Statically ensure that it is safe to allocate heap numbers in paged | 2936 // Statically ensure that it is safe to allocate heap numbers in paged |
| 2937 // spaces. | 2937 // spaces. |
| 2938 int size = HeapNumber::kSize; | 2938 int size = HeapNumber::kSize; |
| 2939 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize); | 2939 STATIC_ASSERT(HeapNumber::kSize <= Page::kRegularObjectAreaSize); |
| 2940 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); | 2940 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 2941 | 2941 |
| 2942 Object* result; | 2942 Object* result; |
| 2943 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); | 2943 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 2944 if (!maybe_result->ToObject(&result)) return maybe_result; | 2944 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2945 } | 2945 } |
| 2946 | 2946 |
| 2947 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); | 2947 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); |
| 2948 HeapNumber::cast(result)->set_value(value); | 2948 HeapNumber::cast(result)->set_value(value); |
| 2949 return result; | 2949 return result; |
| 2950 } | 2950 } |
| 2951 | 2951 |
| 2952 | 2952 |
| 2953 MaybeObject* Heap::AllocateCell(Object* value) { | 2953 MaybeObject* Heap::AllocateCell(Object* value) { |
| 2954 int size = Cell::kSize; | 2954 int size = Cell::kSize; |
| 2955 STATIC_ASSERT(Cell::kSize <= Page::kNonCodeObjectAreaSize); | 2955 STATIC_ASSERT(Cell::kSize <= Page::kRegularObjectAreaSize); |
| 2956 | 2956 |
| 2957 Object* result; | 2957 Object* result; |
| 2958 { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); | 2958 { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); |
| 2959 if (!maybe_result->ToObject(&result)) return maybe_result; | 2959 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2960 } | 2960 } |
| 2961 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); | 2961 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); |
| 2962 Cell::cast(result)->set_value(value); | 2962 Cell::cast(result)->set_value(value); |
| 2963 return result; | 2963 return result; |
| 2964 } | 2964 } |
| 2965 | 2965 |
| 2966 | 2966 |
| 2967 MaybeObject* Heap::AllocatePropertyCell() { | 2967 MaybeObject* Heap::AllocatePropertyCell() { |
| 2968 int size = PropertyCell::kSize; | 2968 int size = PropertyCell::kSize; |
| 2969 STATIC_ASSERT(PropertyCell::kSize <= Page::kNonCodeObjectAreaSize); | 2969 STATIC_ASSERT(PropertyCell::kSize <= Page::kRegularObjectAreaSize); |
| 2970 | 2970 |
| 2971 Object* result; | 2971 Object* result; |
| 2972 MaybeObject* maybe_result = | 2972 MaybeObject* maybe_result = |
| 2973 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); | 2973 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); |
| 2974 if (!maybe_result->ToObject(&result)) return maybe_result; | 2974 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2975 | 2975 |
| 2976 HeapObject::cast(result)->set_map_no_write_barrier( | 2976 HeapObject::cast(result)->set_map_no_write_barrier( |
| 2977 global_property_cell_map()); | 2977 global_property_cell_map()); |
| 2978 PropertyCell* cell = PropertyCell::cast(result); | 2978 PropertyCell* cell = PropertyCell::cast(result); |
| 2979 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 2979 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
| (...skipping 737 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3717 return Smi::FromInt(int_value); | 3717 return Smi::FromInt(int_value); |
| 3718 } | 3718 } |
| 3719 | 3719 |
| 3720 // Materialize the value in the heap. | 3720 // Materialize the value in the heap. |
| 3721 return AllocateHeapNumber(value, pretenure); | 3721 return AllocateHeapNumber(value, pretenure); |
| 3722 } | 3722 } |
| 3723 | 3723 |
| 3724 | 3724 |
| 3725 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { | 3725 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { |
| 3726 // Statically ensure that it is safe to allocate foreigns in paged spaces. | 3726 // Statically ensure that it is safe to allocate foreigns in paged spaces. |
| 3727 STATIC_ASSERT(Foreign::kSize <= Page::kMaxNonCodeHeapObjectSize); | 3727 STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize); |
| 3728 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 3728 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 3729 Foreign* result; | 3729 Foreign* result; |
| 3730 MaybeObject* maybe_result = Allocate(foreign_map(), space); | 3730 MaybeObject* maybe_result = Allocate(foreign_map(), space); |
| 3731 if (!maybe_result->To(&result)) return maybe_result; | 3731 if (!maybe_result->To(&result)) return maybe_result; |
| 3732 result->set_foreign_address(address); | 3732 result->set_foreign_address(address); |
| 3733 return result; | 3733 return result; |
| 3734 } | 3734 } |
| 3735 | 3735 |
| 3736 | 3736 |
| 3737 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { | 3737 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { |
| (...skipping 1603 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5341 } | 5341 } |
| 5342 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( | 5342 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( |
| 5343 hash_table_map()); | 5343 hash_table_map()); |
| 5344 ASSERT(result->IsHashTable()); | 5344 ASSERT(result->IsHashTable()); |
| 5345 return result; | 5345 return result; |
| 5346 } | 5346 } |
| 5347 | 5347 |
| 5348 | 5348 |
| 5349 MaybeObject* Heap::AllocateSymbol() { | 5349 MaybeObject* Heap::AllocateSymbol() { |
| 5350 // Statically ensure that it is safe to allocate symbols in paged spaces. | 5350 // Statically ensure that it is safe to allocate symbols in paged spaces. |
| 5351 STATIC_ASSERT(Symbol::kSize <= Page::kNonCodeObjectAreaSize); | 5351 STATIC_ASSERT(Symbol::kSize <= Page::kRegularObjectAreaSize); |
| 5352 | 5352 |
| 5353 Object* result; | 5353 Object* result; |
| 5354 MaybeObject* maybe = | 5354 MaybeObject* maybe = |
| 5355 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); | 5355 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); |
| 5356 if (!maybe->ToObject(&result)) return maybe; | 5356 if (!maybe->ToObject(&result)) return maybe; |
| 5357 | 5357 |
| 5358 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); | 5358 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); |
| 5359 | 5359 |
| 5360 // Generate a random hash value. | 5360 // Generate a random hash value. |
| 5361 int hash; | 5361 int hash; |
| (...skipping 943 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6305 ASSERT(external_allocation_limit_ <= 256 * MB); | 6305 ASSERT(external_allocation_limit_ <= 256 * MB); |
| 6306 | 6306 |
| 6307 // The old generation is paged and needs at least one page for each space. | 6307 // The old generation is paged and needs at least one page for each space. |
| 6308 int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; | 6308 int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; |
| 6309 max_old_generation_size_ = Max(static_cast<intptr_t>(paged_space_count * | 6309 max_old_generation_size_ = Max(static_cast<intptr_t>(paged_space_count * |
| 6310 Page::kPageSize), | 6310 Page::kPageSize), |
| 6311 RoundUp(max_old_generation_size_, | 6311 RoundUp(max_old_generation_size_, |
| 6312 Page::kPageSize)); | 6312 Page::kPageSize)); |
| 6313 | 6313 |
| 6314 // We rely on being able to allocate new arrays in paged spaces. | 6314 // We rely on being able to allocate new arrays in paged spaces. |
| 6315 ASSERT(MaxRegularSpaceAllocationSize() >= | 6315 ASSERT(Page::kMaxRegularHeapObjectSize >= |
| 6316 (JSArray::kSize + | 6316 (JSArray::kSize + |
| 6317 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + | 6317 FixedArray::SizeFor(JSObject::kInitialMaxFastElementArray) + |
| 6318 AllocationMemento::kSize)); | 6318 AllocationMemento::kSize)); |
| 6319 | 6319 |
| 6320 configured_ = true; | 6320 configured_ = true; |
| 6321 return true; | 6321 return true; |
| 6322 } | 6322 } |
| 6323 | 6323 |
| 6324 | 6324 |
| 6325 bool Heap::ConfigureHeapDefault() { | 6325 bool Heap::ConfigureHeapDefault() { |
| (...skipping 1414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7740 static_cast<int>(object_sizes_last_time_[index])); | 7740 static_cast<int>(object_sizes_last_time_[index])); |
| 7741 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7741 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7742 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7742 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7743 | 7743 |
| 7744 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7744 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7745 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7745 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7746 ClearObjectStats(); | 7746 ClearObjectStats(); |
| 7747 } | 7747 } |
| 7748 | 7748 |
| 7749 } } // namespace v8::internal | 7749 } } // namespace v8::internal |
| OLD | NEW |