Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(296)

Side by Side Diff: src/heap.cc

Issue 27267: Experimental: periodic merge from the bleeding edge branch to the code... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/toiger/
Patch Set: '' Created 11 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/jsregexp.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
90 90
91 // Double the new space after this many scavenge collections. 91 // Double the new space after this many scavenge collections.
92 int Heap::new_space_growth_limit_ = 8; 92 int Heap::new_space_growth_limit_ = 8;
93 int Heap::scavenge_count_ = 0; 93 int Heap::scavenge_count_ = 0;
94 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; 94 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
95 95
96 int Heap::mc_count_ = 0; 96 int Heap::mc_count_ = 0;
97 int Heap::gc_count_ = 0; 97 int Heap::gc_count_ = 0;
98 98
99 int Heap::always_allocate_scope_depth_ = 0; 99 int Heap::always_allocate_scope_depth_ = 0;
100 bool Heap::context_disposed_pending_ = false;
100 101
101 #ifdef DEBUG 102 #ifdef DEBUG
102 bool Heap::allocation_allowed_ = true; 103 bool Heap::allocation_allowed_ = true;
103 104
104 int Heap::allocation_timeout_ = 0; 105 int Heap::allocation_timeout_ = 0;
105 bool Heap::disallow_allocation_failure_ = false; 106 bool Heap::disallow_allocation_failure_ = false;
106 #endif // DEBUG 107 #endif // DEBUG
107 108
108 109
109 int Heap::Capacity() { 110 int Heap::Capacity() {
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
286 287
287 288
288 void Heap::CollectAllGarbage() { 289 void Heap::CollectAllGarbage() {
289 // Since we are ignoring the return value, the exact choice of space does 290 // Since we are ignoring the return value, the exact choice of space does
290 // not matter, so long as we do not specify NEW_SPACE, which would not 291 // not matter, so long as we do not specify NEW_SPACE, which would not
291 // cause a full GC. 292 // cause a full GC.
292 CollectGarbage(0, OLD_POINTER_SPACE); 293 CollectGarbage(0, OLD_POINTER_SPACE);
293 } 294 }
294 295
295 296
297 void Heap::CollectAllGarbageIfContextDisposed() {
298 if (context_disposed_pending_) {
299 StatsRateScope scope(&Counters::gc_context);
300 CollectAllGarbage();
301 context_disposed_pending_ = false;
302 }
303 }
304
305
306 void Heap::NotifyContextDisposed() {
307 context_disposed_pending_ = true;
308 }
309
310
296 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { 311 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
297 // The VM is in the GC state until exiting this function. 312 // The VM is in the GC state until exiting this function.
298 VMState state(GC); 313 VMState state(GC);
299 314
300 #ifdef DEBUG 315 #ifdef DEBUG
301 // Reset the allocation timeout to the GC interval, but make sure to 316 // Reset the allocation timeout to the GC interval, but make sure to
302 // allow at least a few allocations after a collection. The reason 317 // allow at least a few allocations after a collection. The reason
303 // for this is that we have a lot of allocation sequences and we 318 // for this is that we have a lot of allocation sequences and we
304 // assume that a garbage collection will allow the subsequent 319 // assume that a garbage collection will allow the subsequent
305 // allocation attempts to go through. 320 // allocation attempts to go through.
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
412 FlatStringReader::PostGarbageCollectionProcessing(); 427 FlatStringReader::PostGarbageCollectionProcessing();
413 } 428 }
414 429
415 430
416 void Heap::MarkCompact(GCTracer* tracer) { 431 void Heap::MarkCompact(GCTracer* tracer) {
417 gc_state_ = MARK_COMPACT; 432 gc_state_ = MARK_COMPACT;
418 mc_count_++; 433 mc_count_++;
419 tracer->set_full_gc_count(mc_count_); 434 tracer->set_full_gc_count(mc_count_);
420 LOG(ResourceEvent("markcompact", "begin")); 435 LOG(ResourceEvent("markcompact", "begin"));
421 436
422 MarkCompactPrologue(); 437 MarkCompactCollector::Prepare(tracer);
423 438
424 MarkCompactCollector::CollectGarbage(tracer); 439 bool is_compacting = MarkCompactCollector::IsCompacting();
425 440
426 MarkCompactEpilogue(); 441 MarkCompactPrologue(is_compacting);
442
443 MarkCompactCollector::CollectGarbage();
444
445 MarkCompactEpilogue(is_compacting);
427 446
428 LOG(ResourceEvent("markcompact", "end")); 447 LOG(ResourceEvent("markcompact", "end"));
429 448
430 gc_state_ = NOT_IN_GC; 449 gc_state_ = NOT_IN_GC;
431 450
432 Shrink(); 451 Shrink();
433 452
434 Counters::objs_since_last_full.Set(0); 453 Counters::objs_since_last_full.Set(0);
454 context_disposed_pending_ = false;
435 } 455 }
436 456
437 457
438 void Heap::MarkCompactPrologue() { 458 void Heap::MarkCompactPrologue(bool is_compacting) {
459 // At any old GC clear the keyed lookup cache to enable collection of unused
460 // maps.
439 ClearKeyedLookupCache(); 461 ClearKeyedLookupCache();
462
440 CompilationCache::MarkCompactPrologue(); 463 CompilationCache::MarkCompactPrologue();
441 RegExpImpl::OldSpaceCollectionPrologue(); 464 RegExpImpl::OldSpaceCollectionPrologue();
442 Top::MarkCompactPrologue(); 465
443 ThreadManager::MarkCompactPrologue(); 466 Top::MarkCompactPrologue(is_compacting);
467 ThreadManager::MarkCompactPrologue(is_compacting);
444 } 468 }
445 469
446 470
447 void Heap::MarkCompactEpilogue() { 471 void Heap::MarkCompactEpilogue(bool is_compacting) {
448 Top::MarkCompactEpilogue(); 472 Top::MarkCompactEpilogue(is_compacting);
449 ThreadManager::MarkCompactEpilogue(); 473 ThreadManager::MarkCompactEpilogue(is_compacting);
450 } 474 }
451 475
452 476
453 Object* Heap::FindCodeObject(Address a) { 477 Object* Heap::FindCodeObject(Address a) {
454 Object* obj = code_space_->FindObject(a); 478 Object* obj = code_space_->FindObject(a);
455 if (obj->IsFailure()) { 479 if (obj->IsFailure()) {
456 obj = lo_space_->FindObject(a); 480 obj = lo_space_->FindObject(a);
457 } 481 }
458 ASSERT(!obj->IsFailure()); 482 ASSERT(!obj->IsFailure());
459 return obj; 483 return obj;
(...skipping 1134 matching lines...) Expand 10 before | Expand all | Expand 10 after
1594 } else { 1618 } else {
1595 filler->set_map(Heap::byte_array_map()); 1619 filler->set_map(Heap::byte_array_map());
1596 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); 1620 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
1597 } 1621 }
1598 } 1622 }
1599 1623
1600 1624
1601 Object* Heap::CreateCode(const CodeDesc& desc, 1625 Object* Heap::CreateCode(const CodeDesc& desc,
1602 ScopeInfo<>* sinfo, 1626 ScopeInfo<>* sinfo,
1603 Code::Flags flags, 1627 Code::Flags flags,
1604 Code** self_reference) { 1628 Handle<Object> self_reference) {
1605 // Compute size 1629 // Compute size
1606 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment); 1630 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
1607 int sinfo_size = 0; 1631 int sinfo_size = 0;
1608 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL); 1632 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
1609 int obj_size = Code::SizeFor(body_size, sinfo_size); 1633 int obj_size = Code::SizeFor(body_size, sinfo_size);
1610 Object* result; 1634 Object* result;
1611 if (obj_size > MaxHeapObjectSize()) { 1635 if (obj_size > MaxHeapObjectSize()) {
1612 result = lo_space_->AllocateRawCode(obj_size); 1636 result = lo_space_->AllocateRawCode(obj_size);
1613 } else { 1637 } else {
1614 result = code_space_->AllocateRaw(obj_size); 1638 result = code_space_->AllocateRaw(obj_size);
1615 } 1639 }
1616 1640
1617 if (result->IsFailure()) return result; 1641 if (result->IsFailure()) return result;
1618 1642
1619 // Initialize the object 1643 // Initialize the object
1620 HeapObject::cast(result)->set_map(code_map()); 1644 HeapObject::cast(result)->set_map(code_map());
1621 Code* code = Code::cast(result); 1645 Code* code = Code::cast(result);
1622 code->set_instruction_size(desc.instr_size); 1646 code->set_instruction_size(desc.instr_size);
1623 code->set_relocation_size(desc.reloc_size); 1647 code->set_relocation_size(desc.reloc_size);
1624 code->set_sinfo_size(sinfo_size); 1648 code->set_sinfo_size(sinfo_size);
1625 code->set_flags(flags); 1649 code->set_flags(flags);
1626 code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS); 1650 code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS);
1627 // Allow self references to created code object. 1651 // Allow self references to created code object by patching the handle to
1628 if (self_reference != NULL) { 1652 // point to the newly allocated Code object.
1629 *self_reference = code; 1653 if (!self_reference.is_null()) {
1654 *(self_reference.location()) = code;
1630 } 1655 }
1631 // Migrate generated code. 1656 // Migrate generated code.
1632 // The generated code can contain Object** values (typically from handles) 1657 // The generated code can contain Object** values (typically from handles)
1633 // that are dereferenced during the copy to point directly to the actual heap 1658 // that are dereferenced during the copy to point directly to the actual heap
1634 // objects. These pointers can include references to the code object itself, 1659 // objects. These pointers can include references to the code object itself,
1635 // through the self_reference parameter. 1660 // through the self_reference parameter.
1636 code->CopyFrom(desc); 1661 code->CopyFrom(desc);
1637 if (sinfo != NULL) sinfo->Serialize(code); // write scope info 1662 if (sinfo != NULL) sinfo->Serialize(code); // write scope info
1638 LOG(CodeAllocateEvent(code, desc.origin)); 1663 LOG(CodeAllocateEvent(code, desc.origin));
1639 1664
(...skipping 1660 matching lines...) Expand 10 before | Expand all | Expand 10 after
3300 #ifdef DEBUG 3325 #ifdef DEBUG
3301 bool Heap::GarbageCollectionGreedyCheck() { 3326 bool Heap::GarbageCollectionGreedyCheck() {
3302 ASSERT(FLAG_gc_greedy); 3327 ASSERT(FLAG_gc_greedy);
3303 if (Bootstrapper::IsActive()) return true; 3328 if (Bootstrapper::IsActive()) return true;
3304 if (disallow_allocation_failure()) return true; 3329 if (disallow_allocation_failure()) return true;
3305 return CollectGarbage(0, NEW_SPACE); 3330 return CollectGarbage(0, NEW_SPACE);
3306 } 3331 }
3307 #endif 3332 #endif
3308 3333
3309 } } // namespace v8::internal 3334 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/jsregexp.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698