OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_SPACES_INL_H_ | 5 #ifndef V8_SPACES_INL_H_ |
6 #define V8_SPACES_INL_H_ | 6 #define V8_SPACES_INL_H_ |
7 | 7 |
8 #include "heap-profiler.h" | 8 #include "heap-profiler.h" |
9 #include "isolate.h" | 9 #include "isolate.h" |
10 #include "spaces.h" | 10 #include "spaces.h" |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
283 return AllocationResult::Retry(identity()); | 283 return AllocationResult::Retry(identity()); |
284 } | 284 } |
285 | 285 |
286 | 286 |
287 // ----------------------------------------------------------------------------- | 287 // ----------------------------------------------------------------------------- |
288 // NewSpace | 288 // NewSpace |
289 | 289 |
290 | 290 |
291 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { | 291 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { |
292 Address old_top = allocation_info_.top(); | 292 Address old_top = allocation_info_.top(); |
293 #ifdef DEBUG | |
294 // If we are stressing compaction we waste some memory in new space | |
295 // in order to get more frequent GCs. | |
296 if (FLAG_stress_compaction && !heap()->linear_allocation()) { | |
297 if (allocation_info_.limit() - old_top >= size_in_bytes * 4) { | |
298 int filler_size = size_in_bytes * 4; | |
299 for (int i = 0; i < filler_size; i += kPointerSize) { | |
300 *(reinterpret_cast<Object**>(old_top + i)) = | |
301 heap()->one_pointer_filler_map(); | |
302 } | |
303 old_top += filler_size; | |
304 allocation_info_.set_top(allocation_info_.top() + filler_size); | |
305 } | |
306 } | |
307 #endif | |
308 | 293 |
309 if (allocation_info_.limit() - old_top < size_in_bytes) { | 294 if (allocation_info_.limit() - old_top < size_in_bytes) { |
310 return SlowAllocateRaw(size_in_bytes); | 295 return SlowAllocateRaw(size_in_bytes); |
311 } | 296 } |
312 | 297 |
313 HeapObject* obj = HeapObject::FromAddress(old_top); | 298 HeapObject* obj = HeapObject::FromAddress(old_top); |
314 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); | 299 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); |
315 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); | 300 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); |
316 | 301 |
317 return obj; | 302 return obj; |
(...skipping 15 matching lines...) Expand all Loading... |
333 Map* map = object->map(); | 318 Map* map = object->map(); |
334 Heap* heap = object->GetHeap(); | 319 Heap* heap = object->GetHeap(); |
335 return map == heap->raw_unchecked_free_space_map() | 320 return map == heap->raw_unchecked_free_space_map() |
336 || map == heap->raw_unchecked_one_pointer_filler_map() | 321 || map == heap->raw_unchecked_one_pointer_filler_map() |
337 || map == heap->raw_unchecked_two_pointer_filler_map(); | 322 || map == heap->raw_unchecked_two_pointer_filler_map(); |
338 } | 323 } |
339 | 324 |
340 } } // namespace v8::internal | 325 } } // namespace v8::internal |
341 | 326 |
342 #endif // V8_SPACES_INL_H_ | 327 #endif // V8_SPACES_INL_H_ |
OLD | NEW |