Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(423)

Side by Side Diff: src/incremental-marking.cc

Issue 8070002: Pass correct anchor_slot for EMBEDDED_OBJECT pointers from code objects. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
108 108
109 109
110 class IncrementalMarkingMarkingVisitor : public ObjectVisitor { 110 class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
111 public: 111 public:
112 IncrementalMarkingMarkingVisitor(Heap* heap, 112 IncrementalMarkingMarkingVisitor(Heap* heap,
113 IncrementalMarking* incremental_marking) 113 IncrementalMarking* incremental_marking)
114 : heap_(heap), 114 : heap_(heap),
115 incremental_marking_(incremental_marking) { 115 incremental_marking_(incremental_marking) {
116 } 116 }
117 117
118 void VisitEmbeddedPointer(Code* host, Object** p) {
119 Object* obj = *p;
120 if (obj->NonFailureIsHeapObject()) {
121 heap_->mark_compact_collector()->RecordSlot(
122 reinterpret_cast<Object**>(host),
123 p,
124 obj);
125 MarkObject(obj);
126 }
127 }
128
118 void VisitCodeTarget(RelocInfo* rinfo) { 129 void VisitCodeTarget(RelocInfo* rinfo) {
119 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 130 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
120 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 131 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
121 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); 132 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
122 MarkObject(target); 133 MarkObject(target);
123 } 134 }
124 135
125 void VisitDebugTarget(RelocInfo* rinfo) { 136 void VisitDebugTarget(RelocInfo* rinfo) {
126 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 137 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
127 rinfo->IsPatchedReturnSequence()) || 138 rinfo->IsPatchedReturnSequence()) ||
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
211 } 222 }
212 } 223 }
213 } 224 }
214 225
215 Heap* heap_; 226 Heap* heap_;
216 IncrementalMarking* incremental_marking_; 227 IncrementalMarking* incremental_marking_;
217 }; 228 };
218 229
219 230
220 void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk, 231 void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
221 bool is_marking) { 232 bool is_marking,
233 bool is_compacting) {
222 if (is_marking) { 234 if (is_marking) {
223 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); 235 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
224 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); 236 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
237
238 // It's difficult to filter out slots recorded for large objects.
239 if (chunk->owner()->identity() == LO_SPACE &&
240 chunk->size() > static_cast<size_t>(Page::kPageSize) &&
241 is_compacting) {
242 chunk->SetFlag(MemoryChunk::RESCAN_ON_EVACUATION);
243 }
225 } else if (chunk->owner()->identity() == CELL_SPACE || 244 } else if (chunk->owner()->identity() == CELL_SPACE ||
226 chunk->scan_on_scavenge()) { 245 chunk->scan_on_scavenge()) {
227 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); 246 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
228 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); 247 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
229 } else { 248 } else {
230 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); 249 chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
231 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); 250 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
232 } 251 }
233 } 252 }
234 253
235 254
236 void IncrementalMarking::SetNewSpacePageFlags(NewSpacePage* chunk, 255 void IncrementalMarking::SetNewSpacePageFlags(NewSpacePage* chunk,
237 bool is_marking) { 256 bool is_marking) {
238 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); 257 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
239 if (is_marking) { 258 if (is_marking) {
240 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); 259 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
241 } else { 260 } else {
242 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); 261 chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
243 } 262 }
244 chunk->SetFlag(MemoryChunk::SCAN_ON_SCAVENGE); 263 chunk->SetFlag(MemoryChunk::SCAN_ON_SCAVENGE);
245 } 264 }
246 265
247 266
248 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace( 267 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
249 PagedSpace* space) { 268 PagedSpace* space) {
250 PageIterator it(space); 269 PageIterator it(space);
251 while (it.has_next()) { 270 while (it.has_next()) {
252 Page* p = it.next(); 271 Page* p = it.next();
253 SetOldSpacePageFlags(p, false); 272 SetOldSpacePageFlags(p, false, false);
254 } 273 }
255 } 274 }
256 275
257 276
258 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace( 277 void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
259 NewSpace* space) { 278 NewSpace* space) {
260 NewSpacePageIterator it(space); 279 NewSpacePageIterator it(space);
261 while (it.has_next()) { 280 while (it.has_next()) {
262 NewSpacePage* p = it.next(); 281 NewSpacePage* p = it.next();
263 SetNewSpacePageFlags(p, false); 282 SetNewSpacePageFlags(p, false);
264 } 283 }
265 } 284 }
266 285
267 286
268 void IncrementalMarking::DeactivateIncrementalWriteBarrier() { 287 void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
269 DeactivateIncrementalWriteBarrierForSpace(heap_->old_pointer_space()); 288 DeactivateIncrementalWriteBarrierForSpace(heap_->old_pointer_space());
270 DeactivateIncrementalWriteBarrierForSpace(heap_->old_data_space()); 289 DeactivateIncrementalWriteBarrierForSpace(heap_->old_data_space());
271 DeactivateIncrementalWriteBarrierForSpace(heap_->cell_space()); 290 DeactivateIncrementalWriteBarrierForSpace(heap_->cell_space());
272 DeactivateIncrementalWriteBarrierForSpace(heap_->map_space()); 291 DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
273 DeactivateIncrementalWriteBarrierForSpace(heap_->code_space()); 292 DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
274 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space()); 293 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
275 294
276 LargePage* lop = heap_->lo_space()->first_page(); 295 LargePage* lop = heap_->lo_space()->first_page();
277 while (lop->is_valid()) { 296 while (lop->is_valid()) {
278 SetOldSpacePageFlags(lop, false); 297 SetOldSpacePageFlags(lop, false, false);
279 lop = lop->next_page(); 298 lop = lop->next_page();
280 } 299 }
281 } 300 }
282 301
283 302
284 void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) { 303 void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
285 PageIterator it(space); 304 PageIterator it(space);
286 while (it.has_next()) { 305 while (it.has_next()) {
287 Page* p = it.next(); 306 Page* p = it.next();
288 SetOldSpacePageFlags(p, true); 307 SetOldSpacePageFlags(p, true, is_compacting_);
289 } 308 }
290 } 309 }
291 310
292 311
293 void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) { 312 void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
294 NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd()); 313 NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
295 while (it.has_next()) { 314 while (it.has_next()) {
296 NewSpacePage* p = it.next(); 315 NewSpacePage* p = it.next();
297 SetNewSpacePageFlags(p, true); 316 SetNewSpacePageFlags(p, true);
298 } 317 }
299 } 318 }
300 319
301 320
302 void IncrementalMarking::ActivateIncrementalWriteBarrier() { 321 void IncrementalMarking::ActivateIncrementalWriteBarrier() {
303 ActivateIncrementalWriteBarrier(heap_->old_pointer_space()); 322 ActivateIncrementalWriteBarrier(heap_->old_pointer_space());
304 ActivateIncrementalWriteBarrier(heap_->old_data_space()); 323 ActivateIncrementalWriteBarrier(heap_->old_data_space());
305 ActivateIncrementalWriteBarrier(heap_->cell_space()); 324 ActivateIncrementalWriteBarrier(heap_->cell_space());
306 ActivateIncrementalWriteBarrier(heap_->map_space()); 325 ActivateIncrementalWriteBarrier(heap_->map_space());
307 ActivateIncrementalWriteBarrier(heap_->code_space()); 326 ActivateIncrementalWriteBarrier(heap_->code_space());
308 ActivateIncrementalWriteBarrier(heap_->new_space()); 327 ActivateIncrementalWriteBarrier(heap_->new_space());
309 328
310 LargePage* lop = heap_->lo_space()->first_page(); 329 LargePage* lop = heap_->lo_space()->first_page();
311 while (lop->is_valid()) { 330 while (lop->is_valid()) {
312 SetOldSpacePageFlags(lop, true); 331 SetOldSpacePageFlags(lop, true, is_compacting_);
313 lop = lop->next_page(); 332 lop = lop->next_page();
314 } 333 }
315 } 334 }
316 335
317 336
318 bool IncrementalMarking::WorthActivating() { 337 bool IncrementalMarking::WorthActivating() {
319 #ifndef DEBUG 338 #ifndef DEBUG
320 static const intptr_t kActivationThreshold = 8 * MB; 339 static const intptr_t kActivationThreshold = 8 * MB;
321 #else 340 #else
322 // TODO(gc) consider setting this to some low level so that some 341 // TODO(gc) consider setting this to some low level so that some
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
446 465
447 heap_->CompletelyClearInstanceofCache(); 466 heap_->CompletelyClearInstanceofCache();
448 heap_->isolate()->compilation_cache()->MarkCompactPrologue(); 467 heap_->isolate()->compilation_cache()->MarkCompactPrologue();
449 468
450 if (FLAG_cleanup_code_caches_at_gc) { 469 if (FLAG_cleanup_code_caches_at_gc) {
451 // We will mark cache black with a separate pass 470 // We will mark cache black with a separate pass
452 // when we finish marking. 471 // when we finish marking.
453 MarkObjectGreyDoNotEnqueue(heap_->polymorphic_code_cache()); 472 MarkObjectGreyDoNotEnqueue(heap_->polymorphic_code_cache());
454 } 473 }
455 474
456 if (is_compacting_) {
457 // It's difficult to filter out slots recorded for large objects.
458 LargeObjectIterator it(heap_->lo_space());
459 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
460 if (obj->IsFixedArray() || obj->IsCode()) {
461 Page* p = Page::FromAddress(obj->address());
462 if (p->size() > static_cast<size_t>(Page::kPageSize)) {
463 p->SetFlag(Page::RESCAN_ON_EVACUATION);
464 }
465 }
466 }
467 }
468
469 // Mark strong roots grey. 475 // Mark strong roots grey.
470 IncrementalMarkingRootMarkingVisitor visitor(heap_, this); 476 IncrementalMarkingRootMarkingVisitor visitor(heap_, this);
471 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); 477 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
472 478
473 // Ready to start incremental marking. 479 // Ready to start incremental marking.
474 if (FLAG_trace_incremental_marking) { 480 if (FLAG_trace_incremental_marking) {
475 PrintF("[IncrementalMarking] Running\n"); 481 PrintF("[IncrementalMarking] Running\n");
476 } 482 }
477 } 483 }
478 484
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
794 bytes_rescanned_ = 0; 800 bytes_rescanned_ = 0;
795 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 801 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
796 } 802 }
797 803
798 804
799 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 805 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
800 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); 806 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize();
801 } 807 }
802 808
803 } } // namespace v8::internal 809 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/mark-compact.cc » ('j') | src/objects.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698