| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_SPACES_INL_H_ | 5 #ifndef V8_SPACES_INL_H_ |
| 6 #define V8_SPACES_INL_H_ | 6 #define V8_SPACES_INL_H_ |
| 7 | 7 |
| 8 #include "heap-profiler.h" | 8 #include "heap-profiler.h" |
| 9 #include "isolate.h" | 9 #include "isolate.h" |
| 10 #include "spaces.h" | 10 #include "spaces.h" |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 244 Address current_top = allocation_info_.top(); | 244 Address current_top = allocation_info_.top(); |
| 245 Address new_top = current_top + size_in_bytes; | 245 Address new_top = current_top + size_in_bytes; |
| 246 if (new_top > allocation_info_.limit()) return NULL; | 246 if (new_top > allocation_info_.limit()) return NULL; |
| 247 | 247 |
| 248 allocation_info_.set_top(new_top); | 248 allocation_info_.set_top(new_top); |
| 249 return HeapObject::FromAddress(current_top); | 249 return HeapObject::FromAddress(current_top); |
| 250 } | 250 } |
| 251 | 251 |
| 252 | 252 |
| 253 // Raw allocation. | 253 // Raw allocation. |
| 254 MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { | 254 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { |
| 255 HeapObject* object = AllocateLinearly(size_in_bytes); | 255 HeapObject* object = AllocateLinearly(size_in_bytes); |
| 256 if (object != NULL) { | 256 if (object != NULL) { |
| 257 if (identity() == CODE_SPACE) { | 257 if (identity() == CODE_SPACE) { |
| 258 SkipList::Update(object->address(), size_in_bytes); | 258 SkipList::Update(object->address(), size_in_bytes); |
| 259 } | 259 } |
| 260 return object; | 260 return object; |
| 261 } | 261 } |
| 262 | 262 |
| 263 ASSERT(!heap()->linear_allocation() || | 263 ASSERT(!heap()->linear_allocation() || |
| 264 (anchor_.next_chunk() == &anchor_ && | 264 (anchor_.next_chunk() == &anchor_ && |
| 265 anchor_.prev_chunk() == &anchor_)); | 265 anchor_.prev_chunk() == &anchor_)); |
| 266 | 266 |
| 267 object = free_list_.Allocate(size_in_bytes); | 267 object = free_list_.Allocate(size_in_bytes); |
| 268 if (object != NULL) { | 268 if (object != NULL) { |
| 269 if (identity() == CODE_SPACE) { | 269 if (identity() == CODE_SPACE) { |
| 270 SkipList::Update(object->address(), size_in_bytes); | 270 SkipList::Update(object->address(), size_in_bytes); |
| 271 } | 271 } |
| 272 return object; | 272 return object; |
| 273 } | 273 } |
| 274 | 274 |
| 275 object = SlowAllocateRaw(size_in_bytes); | 275 object = SlowAllocateRaw(size_in_bytes); |
| 276 if (object != NULL) { | 276 if (object != NULL) { |
| 277 if (identity() == CODE_SPACE) { | 277 if (identity() == CODE_SPACE) { |
| 278 SkipList::Update(object->address(), size_in_bytes); | 278 SkipList::Update(object->address(), size_in_bytes); |
| 279 } | 279 } |
| 280 return object; | 280 return object; |
| 281 } | 281 } |
| 282 | 282 |
| 283 return Failure::RetryAfterGC(identity()); | 283 return AllocationResult::Retry(identity()); |
| 284 } | 284 } |
| 285 | 285 |
| 286 | 286 |
| 287 // ----------------------------------------------------------------------------- | 287 // ----------------------------------------------------------------------------- |
| 288 // NewSpace | 288 // NewSpace |
| 289 | 289 |
| 290 | 290 |
| 291 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { | 291 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { |
| 292 Address old_top = allocation_info_.top(); | 292 Address old_top = allocation_info_.top(); |
| 293 #ifdef DEBUG | 293 #ifdef DEBUG |
| 294 // If we are stressing compaction we waste some memory in new space | 294 // If we are stressing compaction we waste some memory in new space |
| 295 // in order to get more frequent GCs. | 295 // in order to get more frequent GCs. |
| 296 if (FLAG_stress_compaction && !heap()->linear_allocation()) { | 296 if (FLAG_stress_compaction && !heap()->linear_allocation()) { |
| 297 if (allocation_info_.limit() - old_top >= size_in_bytes * 4) { | 297 if (allocation_info_.limit() - old_top >= size_in_bytes * 4) { |
| 298 int filler_size = size_in_bytes * 4; | 298 int filler_size = size_in_bytes * 4; |
| 299 for (int i = 0; i < filler_size; i += kPointerSize) { | 299 for (int i = 0; i < filler_size; i += kPointerSize) { |
| 300 *(reinterpret_cast<Object**>(old_top + i)) = | 300 *(reinterpret_cast<Object**>(old_top + i)) = |
| 301 heap()->one_pointer_filler_map(); | 301 heap()->one_pointer_filler_map(); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 333 Map* map = object->map(); | 333 Map* map = object->map(); |
| 334 Heap* heap = object->GetHeap(); | 334 Heap* heap = object->GetHeap(); |
| 335 return map == heap->raw_unchecked_free_space_map() | 335 return map == heap->raw_unchecked_free_space_map() |
| 336 || map == heap->raw_unchecked_one_pointer_filler_map() | 336 || map == heap->raw_unchecked_one_pointer_filler_map() |
| 337 || map == heap->raw_unchecked_two_pointer_filler_map(); | 337 || map == heap->raw_unchecked_two_pointer_filler_map(); |
| 338 } | 338 } |
| 339 | 339 |
| 340 } } // namespace v8::internal | 340 } } // namespace v8::internal |
| 341 | 341 |
| 342 #endif // V8_SPACES_INL_H_ | 342 #endif // V8_SPACES_INL_H_ |
| OLD | NEW |