Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(718)

Side by Side Diff: src/spaces-inl.h

Issue 325553002: --verify-predictable mode added for ensuring that GC behaves deterministically. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/spaces.cc ('K') | « src/spaces.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_SPACES_INL_H_ 5 #ifndef V8_SPACES_INL_H_
6 #define V8_SPACES_INL_H_ 6 #define V8_SPACES_INL_H_
7 7
8 #include "src/heap-profiler.h" 8 #include "src/heap-profiler.h"
9 #include "src/isolate.h" 9 #include "src/isolate.h"
10 #include "src/spaces.h" 10 #include "src/spaces.h"
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
246 if (new_top > allocation_info_.limit()) return NULL; 246 if (new_top > allocation_info_.limit()) return NULL;
247 247
248 allocation_info_.set_top(new_top); 248 allocation_info_.set_top(new_top);
249 return HeapObject::FromAddress(current_top); 249 return HeapObject::FromAddress(current_top);
250 } 250 }
251 251
252 252
253 // Raw allocation. 253 // Raw allocation.
254 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { 254 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) {
255 HeapObject* object = AllocateLinearly(size_in_bytes); 255 HeapObject* object = AllocateLinearly(size_in_bytes);
256
257 ASSERT((object == NULL) ||
258 !heap()->linear_allocation() ||
259 (anchor_.next_chunk() == &anchor_ &&
260 anchor_.prev_chunk() == &anchor_));
261
262 if (object == NULL) {
263 object = free_list_.Allocate(size_in_bytes);
264 if (object == NULL) {
265 object = SlowAllocateRaw(size_in_bytes);
266 }
267 }
268
256 if (object != NULL) { 269 if (object != NULL) {
257 if (identity() == CODE_SPACE) { 270 if (identity() == CODE_SPACE) {
258 SkipList::Update(object->address(), size_in_bytes); 271 SkipList::Update(object->address(), size_in_bytes);
259 } 272 }
260 return object; 273 if (FLAG_allocations_digest) {
261 } 274 heap()->OnAllocationEvent(object, size_in_bytes);
Hannes Payer (out of office) 2014/06/09 15:23:51 See my comment about Heap::AllocateRaw.
Igor Sheludko 2014/06/10 08:22:54 Replied.
262
263 ASSERT(!heap()->linear_allocation() ||
264 (anchor_.next_chunk() == &anchor_ &&
265 anchor_.prev_chunk() == &anchor_));
266
267 object = free_list_.Allocate(size_in_bytes);
268 if (object != NULL) {
269 if (identity() == CODE_SPACE) {
270 SkipList::Update(object->address(), size_in_bytes);
271 } 275 }
272 return object; 276 return object;
273 } 277 }
274 278
275 object = SlowAllocateRaw(size_in_bytes);
276 if (object != NULL) {
277 if (identity() == CODE_SPACE) {
278 SkipList::Update(object->address(), size_in_bytes);
279 }
280 return object;
281 }
282
283 return AllocationResult::Retry(identity()); 279 return AllocationResult::Retry(identity());
284 } 280 }
285 281
286 282
287 // ----------------------------------------------------------------------------- 283 // -----------------------------------------------------------------------------
288 // NewSpace 284 // NewSpace
289 285
290 286
291 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { 287 AllocationResult NewSpace::AllocateRaw(int size_in_bytes) {
292 Address old_top = allocation_info_.top(); 288 Address old_top = allocation_info_.top();
293 289
294 if (allocation_info_.limit() - old_top < size_in_bytes) { 290 if (allocation_info_.limit() - old_top < size_in_bytes) {
295 return SlowAllocateRaw(size_in_bytes); 291 return SlowAllocateRaw(size_in_bytes);
296 } 292 }
297 293
298 HeapObject* obj = HeapObject::FromAddress(old_top); 294 HeapObject* obj = HeapObject::FromAddress(old_top);
299 allocation_info_.set_top(allocation_info_.top() + size_in_bytes); 295 allocation_info_.set_top(allocation_info_.top() + size_in_bytes);
300 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); 296 ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
301 297
298 if (FLAG_allocations_digest) {
299 heap()->OnAllocationEvent(obj, size_in_bytes);
Hannes Payer (out of office) 2014/06/09 15:23:51 See my comment about Heap::AllocateRaw.
Igor Sheludko 2014/06/10 08:22:54 Replied.
300 }
302 return obj; 301 return obj;
303 } 302 }
304 303
305 304
306 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) { 305 LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk) {
307 heap->incremental_marking()->SetOldSpacePageFlags(chunk); 306 heap->incremental_marking()->SetOldSpacePageFlags(chunk);
308 return static_cast<LargePage*>(chunk); 307 return static_cast<LargePage*>(chunk);
309 } 308 }
310 309
311 310
312 intptr_t LargeObjectSpace::Available() { 311 intptr_t LargeObjectSpace::Available() {
313 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); 312 return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
314 } 313 }
315 314
316 315
317 bool FreeListNode::IsFreeListNode(HeapObject* object) { 316 bool FreeListNode::IsFreeListNode(HeapObject* object) {
318 Map* map = object->map(); 317 Map* map = object->map();
319 Heap* heap = object->GetHeap(); 318 Heap* heap = object->GetHeap();
320 return map == heap->raw_unchecked_free_space_map() 319 return map == heap->raw_unchecked_free_space_map()
321 || map == heap->raw_unchecked_one_pointer_filler_map() 320 || map == heap->raw_unchecked_one_pointer_filler_map()
322 || map == heap->raw_unchecked_two_pointer_filler_map(); 321 || map == heap->raw_unchecked_two_pointer_filler_map();
323 } 322 }
324 323
325 } } // namespace v8::internal 324 } } // namespace v8::internal
326 325
327 #endif // V8_SPACES_INL_H_ 326 #endif // V8_SPACES_INL_H_
OLDNEW
« src/spaces.cc ('K') | « src/spaces.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698