OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_SPACES_INL_H_ | 5 #ifndef V8_SPACES_INL_H_ |
6 #define V8_SPACES_INL_H_ | 6 #define V8_SPACES_INL_H_ |
7 | 7 |
8 #include "src/heap-profiler.h" | 8 #include "src/heap-profiler.h" |
9 #include "src/isolate.h" | 9 #include "src/isolate.h" |
10 #include "src/spaces.h" | 10 #include "src/spaces.h" |
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
246 if (new_top > allocation_info_.limit()) return NULL; | 246 if (new_top > allocation_info_.limit()) return NULL; |
247 | 247 |
248 allocation_info_.set_top(new_top); | 248 allocation_info_.set_top(new_top); |
249 return HeapObject::FromAddress(current_top); | 249 return HeapObject::FromAddress(current_top); |
250 } | 250 } |
251 | 251 |
252 | 252 |
253 // Raw allocation. | 253 // Raw allocation. |
254 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { | 254 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { |
255 HeapObject* object = AllocateLinearly(size_in_bytes); | 255 HeapObject* object = AllocateLinearly(size_in_bytes); |
256 | |
Igor Sheludko
2014/06/13 15:46:03
This change is no longer needed, however I think t
| |
257 ASSERT((object == NULL) || | |
258 !heap()->linear_allocation() || | |
259 (anchor_.next_chunk() == &anchor_ && | |
260 anchor_.prev_chunk() == &anchor_)); | |
261 | |
262 if (object == NULL) { | |
263 object = free_list_.Allocate(size_in_bytes); | |
264 if (object == NULL) { | |
265 object = SlowAllocateRaw(size_in_bytes); | |
266 } | |
267 } | |
268 | |
256 if (object != NULL) { | 269 if (object != NULL) { |
257 if (identity() == CODE_SPACE) { | 270 if (identity() == CODE_SPACE) { |
258 SkipList::Update(object->address(), size_in_bytes); | 271 SkipList::Update(object->address(), size_in_bytes); |
259 } | |
260 return object; | |
261 } | |
262 | |
263 ASSERT(!heap()->linear_allocation() || | |
264 (anchor_.next_chunk() == &anchor_ && | |
265 anchor_.prev_chunk() == &anchor_)); | |
266 | |
267 object = free_list_.Allocate(size_in_bytes); | |
268 if (object != NULL) { | |
269 if (identity() == CODE_SPACE) { | |
270 SkipList::Update(object->address(), size_in_bytes); | |
271 } | |
272 return object; | |
273 } | |
274 | |
275 object = SlowAllocateRaw(size_in_bytes); | |
276 if (object != NULL) { | |
277 if (identity() == CODE_SPACE) { | |
278 SkipList::Update(object->address(), size_in_bytes); | |
279 } | 272 } |
280 return object; | 273 return object; |
281 } | 274 } |
282 | 275 |
283 return AllocationResult::Retry(identity()); | 276 return AllocationResult::Retry(identity()); |
284 } | 277 } |
285 | 278 |
286 | 279 |
287 // ----------------------------------------------------------------------------- | 280 // ----------------------------------------------------------------------------- |
288 // NewSpace | 281 // NewSpace |
(...skipping 29 matching lines...) Expand all Loading... | |
318 Map* map = object->map(); | 311 Map* map = object->map(); |
319 Heap* heap = object->GetHeap(); | 312 Heap* heap = object->GetHeap(); |
320 return map == heap->raw_unchecked_free_space_map() | 313 return map == heap->raw_unchecked_free_space_map() |
321 || map == heap->raw_unchecked_one_pointer_filler_map() | 314 || map == heap->raw_unchecked_one_pointer_filler_map() |
322 || map == heap->raw_unchecked_two_pointer_filler_map(); | 315 || map == heap->raw_unchecked_two_pointer_filler_map(); |
323 } | 316 } |
324 | 317 |
325 } } // namespace v8::internal | 318 } } // namespace v8::internal |
326 | 319 |
327 #endif // V8_SPACES_INL_H_ | 320 #endif // V8_SPACES_INL_H_ |
OLD | NEW |