| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 412 FlatStringReader::PostGarbageCollectionProcessing(); | 412 FlatStringReader::PostGarbageCollectionProcessing(); |
| 413 } | 413 } |
| 414 | 414 |
| 415 | 415 |
| 416 void Heap::MarkCompact(GCTracer* tracer) { | 416 void Heap::MarkCompact(GCTracer* tracer) { |
| 417 gc_state_ = MARK_COMPACT; | 417 gc_state_ = MARK_COMPACT; |
| 418 mc_count_++; | 418 mc_count_++; |
| 419 tracer->set_full_gc_count(mc_count_); | 419 tracer->set_full_gc_count(mc_count_); |
| 420 LOG(ResourceEvent("markcompact", "begin")); | 420 LOG(ResourceEvent("markcompact", "begin")); |
| 421 | 421 |
| 422 MarkCompactPrologue(); | 422 MarkCompactCollector::Prepare(tracer); |
| 423 | 423 |
| 424 MarkCompactCollector::CollectGarbage(tracer); | 424 bool is_compacting = MarkCompactCollector::IsCompacting(); |
| 425 | 425 |
| 426 MarkCompactEpilogue(); | 426 MarkCompactPrologue(is_compacting); |
| 427 |
| 428 MarkCompactCollector::CollectGarbage(); |
| 429 |
| 430 MarkCompactEpilogue(is_compacting); |
| 427 | 431 |
| 428 LOG(ResourceEvent("markcompact", "end")); | 432 LOG(ResourceEvent("markcompact", "end")); |
| 429 | 433 |
| 430 gc_state_ = NOT_IN_GC; | 434 gc_state_ = NOT_IN_GC; |
| 431 | 435 |
| 432 Shrink(); | 436 Shrink(); |
| 433 | 437 |
| 434 Counters::objs_since_last_full.Set(0); | 438 Counters::objs_since_last_full.Set(0); |
| 435 } | 439 } |
| 436 | 440 |
| 437 | 441 |
| 438 void Heap::MarkCompactPrologue() { | 442 void Heap::MarkCompactPrologue(bool is_compacting) { |
| 443 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 444 // maps. |
| 439 ClearKeyedLookupCache(); | 445 ClearKeyedLookupCache(); |
| 446 |
| 440 CompilationCache::MarkCompactPrologue(); | 447 CompilationCache::MarkCompactPrologue(); |
| 441 RegExpImpl::OldSpaceCollectionPrologue(); | 448 RegExpImpl::OldSpaceCollectionPrologue(); |
| 442 Top::MarkCompactPrologue(); | 449 |
| 443 ThreadManager::MarkCompactPrologue(); | 450 Top::MarkCompactPrologue(is_compacting); |
| 451 ThreadManager::MarkCompactPrologue(is_compacting); |
| 444 } | 452 } |
| 445 | 453 |
| 446 | 454 |
| 447 void Heap::MarkCompactEpilogue() { | 455 void Heap::MarkCompactEpilogue(bool is_compacting) { |
| 448 Top::MarkCompactEpilogue(); | 456 Top::MarkCompactEpilogue(is_compacting); |
| 449 ThreadManager::MarkCompactEpilogue(); | 457 ThreadManager::MarkCompactEpilogue(is_compacting); |
| 450 } | 458 } |
| 451 | 459 |
| 452 | 460 |
| 453 Object* Heap::FindCodeObject(Address a) { | 461 Object* Heap::FindCodeObject(Address a) { |
| 454 Object* obj = code_space_->FindObject(a); | 462 Object* obj = code_space_->FindObject(a); |
| 455 if (obj->IsFailure()) { | 463 if (obj->IsFailure()) { |
| 456 obj = lo_space_->FindObject(a); | 464 obj = lo_space_->FindObject(a); |
| 457 } | 465 } |
| 458 ASSERT(!obj->IsFailure()); | 466 ASSERT(!obj->IsFailure()); |
| 459 return obj; | 467 return obj; |
| (...skipping 1134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1594 } else { | 1602 } else { |
| 1595 filler->set_map(Heap::byte_array_map()); | 1603 filler->set_map(Heap::byte_array_map()); |
| 1596 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); | 1604 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); |
| 1597 } | 1605 } |
| 1598 } | 1606 } |
| 1599 | 1607 |
| 1600 | 1608 |
| 1601 Object* Heap::CreateCode(const CodeDesc& desc, | 1609 Object* Heap::CreateCode(const CodeDesc& desc, |
| 1602 ScopeInfo<>* sinfo, | 1610 ScopeInfo<>* sinfo, |
| 1603 Code::Flags flags, | 1611 Code::Flags flags, |
| 1604 Code** self_reference) { | 1612 Handle<Object> self_reference) { |
| 1605 // Compute size | 1613 // Compute size |
| 1606 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment); | 1614 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment); |
| 1607 int sinfo_size = 0; | 1615 int sinfo_size = 0; |
| 1608 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL); | 1616 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL); |
| 1609 int obj_size = Code::SizeFor(body_size, sinfo_size); | 1617 int obj_size = Code::SizeFor(body_size, sinfo_size); |
| 1610 Object* result; | 1618 Object* result; |
| 1611 if (obj_size > MaxHeapObjectSize()) { | 1619 if (obj_size > MaxHeapObjectSize()) { |
| 1612 result = lo_space_->AllocateRawCode(obj_size); | 1620 result = lo_space_->AllocateRawCode(obj_size); |
| 1613 } else { | 1621 } else { |
| 1614 result = code_space_->AllocateRaw(obj_size); | 1622 result = code_space_->AllocateRaw(obj_size); |
| 1615 } | 1623 } |
| 1616 | 1624 |
| 1617 if (result->IsFailure()) return result; | 1625 if (result->IsFailure()) return result; |
| 1618 | 1626 |
| 1619 // Initialize the object | 1627 // Initialize the object |
| 1620 HeapObject::cast(result)->set_map(code_map()); | 1628 HeapObject::cast(result)->set_map(code_map()); |
| 1621 Code* code = Code::cast(result); | 1629 Code* code = Code::cast(result); |
| 1622 code->set_instruction_size(desc.instr_size); | 1630 code->set_instruction_size(desc.instr_size); |
| 1623 code->set_relocation_size(desc.reloc_size); | 1631 code->set_relocation_size(desc.reloc_size); |
| 1624 code->set_sinfo_size(sinfo_size); | 1632 code->set_sinfo_size(sinfo_size); |
| 1625 code->set_flags(flags); | 1633 code->set_flags(flags); |
| 1626 code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS); | 1634 code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS); |
| 1627 // Allow self references to created code object. | 1635 // Allow self references to created code object by patching the handle to |
| 1628 if (self_reference != NULL) { | 1636 // point to the newly allocated Code object. |
| 1629 *self_reference = code; | 1637 if (!self_reference.is_null()) { |
| 1638 *(self_reference.location()) = code; |
| 1630 } | 1639 } |
| 1631 // Migrate generated code. | 1640 // Migrate generated code. |
| 1632 // The generated code can contain Object** values (typically from handles) | 1641 // The generated code can contain Object** values (typically from handles) |
| 1633 // that are dereferenced during the copy to point directly to the actual heap | 1642 // that are dereferenced during the copy to point directly to the actual heap |
| 1634 // objects. These pointers can include references to the code object itself, | 1643 // objects. These pointers can include references to the code object itself, |
| 1635 // through the self_reference parameter. | 1644 // through the self_reference parameter. |
| 1636 code->CopyFrom(desc); | 1645 code->CopyFrom(desc); |
| 1637 if (sinfo != NULL) sinfo->Serialize(code); // write scope info | 1646 if (sinfo != NULL) sinfo->Serialize(code); // write scope info |
| 1638 LOG(CodeAllocateEvent(code, desc.origin)); | 1647 LOG(CodeAllocateEvent(code, desc.origin)); |
| 1639 | 1648 |
| (...skipping 1660 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3300 #ifdef DEBUG | 3309 #ifdef DEBUG |
| 3301 bool Heap::GarbageCollectionGreedyCheck() { | 3310 bool Heap::GarbageCollectionGreedyCheck() { |
| 3302 ASSERT(FLAG_gc_greedy); | 3311 ASSERT(FLAG_gc_greedy); |
| 3303 if (Bootstrapper::IsActive()) return true; | 3312 if (Bootstrapper::IsActive()) return true; |
| 3304 if (disallow_allocation_failure()) return true; | 3313 if (disallow_allocation_failure()) return true; |
| 3305 return CollectGarbage(0, NEW_SPACE); | 3314 return CollectGarbage(0, NEW_SPACE); |
| 3306 } | 3315 } |
| 3307 #endif | 3316 #endif |
| 3308 | 3317 |
| 3309 } } // namespace v8::internal | 3318 } } // namespace v8::internal |
| OLD | NEW |