OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 6285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6296 Handle<Code> code = stub.GetCode(); | 6296 Handle<Code> code = stub.GetCode(); |
6297 CHECK(code->Size() <= i::kMaxRegularHeapObjectSize); | 6297 CHECK(code->Size() <= i::kMaxRegularHeapObjectSize); |
6298 CHECK(!heap->code_space()->FirstPage()->Contains(code->address())); | 6298 CHECK(!heap->code_space()->FirstPage()->Contains(code->address())); |
6299 | 6299 |
6300 // Ensure it's not in large object space. | 6300 // Ensure it's not in large object space. |
6301 MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); | 6301 MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); |
6302 CHECK(chunk->owner()->identity() != LO_SPACE); | 6302 CHECK(chunk->owner()->identity() != LO_SPACE); |
6303 CHECK(chunk->NeverEvacuate()); | 6303 CHECK(chunk->NeverEvacuate()); |
6304 } | 6304 } |
6305 | 6305 |
| 6306 HEAP_TEST(RegressMissingWriteBarrierInAllocate) { |
| 6307 if (!FLAG_incremental_marking) return; |
| 6308 FLAG_black_allocation = true; |
| 6309 CcTest::InitializeVM(); |
| 6310 v8::HandleScope scope(CcTest::isolate()); |
| 6311 Heap* heap = CcTest::heap(); |
| 6312 Isolate* isolate = heap->isolate(); |
| 6313 CcTest::CollectAllGarbage(); |
| 6314 heap::SimulateIncrementalMarking(heap, false); |
| 6315 Map* map; |
| 6316 { |
| 6317 AlwaysAllocateScope always_allocate(isolate); |
| 6318 map = Map::cast(heap->AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize) |
| 6319 .ToObjectChecked()); |
| 6320 } |
| 6321 heap->incremental_marking()->StartBlackAllocationForTesting(); |
| 6322 Handle<HeapObject> object; |
| 6323 { |
| 6324 AlwaysAllocateScope always_allocate(isolate); |
| 6325 object = Handle<HeapObject>( |
| 6326 heap->Allocate(map, OLD_SPACE).ToObjectChecked(), isolate); |
| 6327 } |
| 6328 // The object is black. If Heap::Allocate sets the map without write-barrier, |
| 6329 // then the map is white and will be freed prematurely. |
| 6330 heap::SimulateIncrementalMarking(heap, true); |
| 6331 CcTest::CollectAllGarbage(); |
| 6332 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 6333 if (collector->sweeping_in_progress()) { |
| 6334 collector->EnsureSweepingCompleted(); |
| 6335 } |
| 6336 CHECK(object->map()->IsMap()); |
| 6337 } |
| 6338 |
6306 } // namespace internal | 6339 } // namespace internal |
6307 } // namespace v8 | 6340 } // namespace v8 |
OLD | NEW |