Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(100)

Unified Diff: src/mark-compact.cc

Issue 157503002: A64: Synchronize with r18444. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/log.cc ('k') | src/messages.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/mark-compact.cc
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 0e6b9804e09fe6776b0b61d97362d1d3ecf223ab..0594c0792af1a8c8651310bd6499c8fc75427c27 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -986,7 +986,8 @@ void MarkCompactCollector::Finish() {
// objects have been marked.
void CodeFlusher::ProcessJSFunctionCandidates() {
- Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+ Code* lazy_compile =
+ isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
Object* undefined = isolate_->heap()->undefined_value();
JSFunction* candidate = jsfunction_candidates_head_;
@@ -1031,7 +1032,8 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
- Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+ Code* lazy_compile =
+ isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
SharedFunctionInfo* next_candidate;
@@ -1063,55 +1065,40 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
void CodeFlusher::ProcessOptimizedCodeMaps() {
- static const int kEntriesStart = SharedFunctionInfo::kEntriesStart;
- static const int kEntryLength = SharedFunctionInfo::kEntryLength;
- static const int kContextOffset = 0;
- static const int kCodeOffset = 1;
- static const int kLiteralsOffset = 2;
- STATIC_ASSERT(kEntryLength == 3);
+ STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
SharedFunctionInfo* holder = optimized_code_map_holder_head_;
SharedFunctionInfo* next_holder;
+
while (holder != NULL) {
next_holder = GetNextCodeMap(holder);
ClearNextCodeMap(holder);
FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
- int new_length = kEntriesStart;
+ int new_length = SharedFunctionInfo::kEntriesStart;
int old_length = code_map->length();
- for (int i = kEntriesStart; i < old_length; i += kEntryLength) {
- Code* code = Code::cast(code_map->get(i + kCodeOffset));
- MarkBit code_mark = Marking::MarkBitFrom(code);
- if (!code_mark.Get()) {
- continue;
+ for (int i = SharedFunctionInfo::kEntriesStart;
+ i < old_length;
+ i += SharedFunctionInfo::kEntryLength) {
+ Code* code =
+ Code::cast(code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
+ if (!Marking::MarkBitFrom(code).Get()) continue;
+
+ // Move every slot in the entry.
+ for (int j = 0; j < SharedFunctionInfo::kEntryLength; j++) {
+ int dst_index = new_length++;
+ Object** slot = code_map->RawFieldOfElementAt(dst_index);
+ Object* object = code_map->get(i + j);
+ code_map->set(dst_index, object);
+ if (j == SharedFunctionInfo::kOsrAstIdOffset) {
+ ASSERT(object->IsSmi());
+ } else {
+ ASSERT(Marking::IsBlack(
+ Marking::MarkBitFrom(HeapObject::cast(*slot))));
+ isolate_->heap()->mark_compact_collector()->
+ RecordSlot(slot, slot, *slot);
+ }
}
-
- // Update and record the context slot in the optimized code map.
- Object** context_slot = HeapObject::RawField(code_map,
- FixedArray::OffsetOfElementAt(new_length));
- code_map->set(new_length++, code_map->get(i + kContextOffset));
- ASSERT(Marking::IsBlack(
- Marking::MarkBitFrom(HeapObject::cast(*context_slot))));
- isolate_->heap()->mark_compact_collector()->
- RecordSlot(context_slot, context_slot, *context_slot);
-
- // Update and record the code slot in the optimized code map.
- Object** code_slot = HeapObject::RawField(code_map,
- FixedArray::OffsetOfElementAt(new_length));
- code_map->set(new_length++, code_map->get(i + kCodeOffset));
- ASSERT(Marking::IsBlack(
- Marking::MarkBitFrom(HeapObject::cast(*code_slot))));
- isolate_->heap()->mark_compact_collector()->
- RecordSlot(code_slot, code_slot, *code_slot);
-
- // Update and record the literals slot in the optimized code map.
- Object** literals_slot = HeapObject::RawField(code_map,
- FixedArray::OffsetOfElementAt(new_length));
- code_map->set(new_length++, code_map->get(i + kLiteralsOffset));
- ASSERT(Marking::IsBlack(
- Marking::MarkBitFrom(HeapObject::cast(*literals_slot))));
- isolate_->heap()->mark_compact_collector()->
- RecordSlot(literals_slot, literals_slot, *literals_slot);
}
// Trim the optimized code map if entries have been removed.
@@ -1839,6 +1826,7 @@ class RootMarkingVisitor : public ObjectVisitor {
// Helper class for pruning the string table.
+template<bool finalize_external_strings>
class StringTableCleaner : public ObjectVisitor {
public:
explicit StringTableCleaner(Heap* heap)
@@ -1850,22 +1838,20 @@ class StringTableCleaner : public ObjectVisitor {
Object* o = *p;
if (o->IsHeapObject() &&
!Marking::MarkBitFrom(HeapObject::cast(o)).Get()) {
- // Check if the internalized string being pruned is external. We need to
- // delete the associated external data as this string is going away.
-
- // Since no objects have yet been moved we can safely access the map of
- // the object.
- if (o->IsExternalString()) {
+ if (finalize_external_strings) {
+ ASSERT(o->IsExternalString());
heap_->FinalizeExternalString(String::cast(*p));
+ } else {
+ pointers_removed_++;
}
// Set the entry to the_hole_value (as deleted).
*p = heap_->the_hole_value();
- pointers_removed_++;
}
}
}
int PointersRemoved() {
+ ASSERT(!finalize_external_strings);
return pointers_removed_;
}
@@ -1875,6 +1861,10 @@ class StringTableCleaner : public ObjectVisitor {
};
+typedef StringTableCleaner<false> InternalizedStringTableCleaner;
+typedef StringTableCleaner<true> ExternalStringTableCleaner;
+
+
// Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
// are retained.
class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
@@ -2398,10 +2388,12 @@ void MarkCompactCollector::AfterMarking() {
// string table. Cannot use string_table() here because the string
// table is marked.
StringTable* string_table = heap()->string_table();
- StringTableCleaner v(heap());
- string_table->IterateElements(&v);
- string_table->ElementsRemoved(v.PointersRemoved());
- heap()->external_string_table_.Iterate(&v);
+ InternalizedStringTableCleaner internalized_visitor(heap());
+ string_table->IterateElements(&internalized_visitor);
+ string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
+
+ ExternalStringTableCleaner external_visitor(heap());
+ heap()->external_string_table_.Iterate(&external_visitor);
heap()->external_string_table_.CleanUp();
// Process the weak references.
@@ -2422,11 +2414,6 @@ void MarkCompactCollector::AfterMarking() {
}
}
- if (!FLAG_watch_ic_patching) {
- // Clean up dead objects from the runtime profiler.
- heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
- }
-
if (FLAG_track_gc_object_stats) {
heap()->CheckpointObjectStats();
}
@@ -2606,9 +2593,7 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
cached_map,
SKIP_WRITE_BARRIER);
}
- Object** slot =
- HeapObject::RawField(prototype_transitions,
- FixedArray::OffsetOfElementAt(proto_index));
+ Object** slot = prototype_transitions->RawFieldOfElementAt(proto_index);
RecordSlot(slot, slot, prototype);
new_number_of_transitions++;
}
@@ -2713,12 +2698,10 @@ void MarkCompactCollector::ProcessWeakCollections() {
for (int i = 0; i < table->Capacity(); i++) {
if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
Object** key_slot =
- HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
- ObjectHashTable::EntryToIndex(i)));
+ table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
RecordSlot(anchor, key_slot, *key_slot);
Object** value_slot =
- HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
- ObjectHashTable::EntryToValueIndex(i)));
+ table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
MarkCompactMarkingVisitor::MarkObjectByPointer(
this, anchor, value_slot);
}
@@ -3361,6 +3344,13 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
EvacuateNewSpace();
}
+ // We have to travers our allocation sites scratchpad which contains raw
+ // pointers before we move objects. During new space evacauation we
+ // gathered pretenuring statistics. The found allocation sites may not be
+ // valid after compacting old space.
+ heap()->ProcessPretenuringFeedback();
+
+
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES);
EvacuatePages();
}
@@ -3514,12 +3504,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
heap_->UpdateReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry);
- if (!FLAG_watch_ic_patching) {
- // Update JSFunction pointers from the runtime profiler.
- heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
- &updating_visitor);
- }
-
EvacuationWeakObjectRetainer evacuation_object_retainer;
heap()->ProcessWeakReferences(&evacuation_object_retainer);
« no previous file with comments | « src/log.cc ('k') | src/messages.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698