Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(57)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1478943003: Use WeakCells in the optimized code map rather than traversing in pause. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Patch One. Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 2129 matching lines...) Expand 10 before | Expand all | Expand 10 after
2140 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); 2140 heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
2141 } 2141 }
2142 2142
2143 // Flush code from collected candidates. 2143 // Flush code from collected candidates.
2144 if (is_code_flushing_enabled()) { 2144 if (is_code_flushing_enabled()) {
2145 GCTracer::Scope gc_scope(heap()->tracer(), 2145 GCTracer::Scope gc_scope(heap()->tracer(),
2146 GCTracer::Scope::MC_MARK_CODE_FLUSH); 2146 GCTracer::Scope::MC_MARK_CODE_FLUSH);
2147 code_flusher_->ProcessCandidates(); 2147 code_flusher_->ProcessCandidates();
2148 } 2148 }
2149 2149
2150 // Process and clear all optimized code maps.
2151 if (!FLAG_flush_optimized_code_cache) {
2152 GCTracer::Scope gc_scope(heap()->tracer(),
2153 GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS);
2154 ProcessAndClearOptimizedCodeMaps();
2155 }
2156
2157 if (FLAG_track_gc_object_stats) { 2150 if (FLAG_track_gc_object_stats) {
2158 if (FLAG_trace_gc_object_stats) { 2151 if (FLAG_trace_gc_object_stats) {
2159 heap()->object_stats_->TraceObjectStats(); 2152 heap()->object_stats_->TraceObjectStats();
2160 } 2153 }
2161 heap()->object_stats_->CheckpointObjectStats(); 2154 heap()->object_stats_->CheckpointObjectStats();
2162 } 2155 }
2163 } 2156 }
2164 2157
2165 2158
2166 void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() {
Michael Starzinger 2015/11/27 10:06:39 Woot, I love it!
mvstanton 2015/12/01 11:28:15 Acknowledged.
2167 SharedFunctionInfo::Iterator iterator(isolate());
2168 while (SharedFunctionInfo* shared = iterator.Next()) {
2169 if (shared->OptimizedCodeMapIsCleared()) continue;
2170
2171 // Process context-dependent entries in the optimized code map.
2172 FixedArray* code_map = shared->optimized_code_map();
2173 int new_length = SharedFunctionInfo::kEntriesStart;
2174 int old_length = code_map->length();
2175 for (int i = SharedFunctionInfo::kEntriesStart; i < old_length;
2176 i += SharedFunctionInfo::kEntryLength) {
2177 // Each entry contains [ context, code, literals, ast-id ] as fields.
2178 STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
2179 Context* context =
2180 Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset));
2181 HeapObject* code = HeapObject::cast(
2182 code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
2183 FixedArray* literals = FixedArray::cast(
2184 code_map->get(i + SharedFunctionInfo::kLiteralsOffset));
2185 Smi* ast_id =
2186 Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset));
2187 if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue;
2188 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context)));
2189 if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue;
2190 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code)));
2191 if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue;
2192 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals)));
2193 // Move every slot in the entry and record slots when needed.
2194 code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code);
2195 code_map->set(new_length + SharedFunctionInfo::kContextOffset, context);
2196 code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals);
2197 code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id);
2198 Object** code_slot = code_map->RawFieldOfElementAt(
2199 new_length + SharedFunctionInfo::kCachedCodeOffset);
2200 RecordSlot(code_map, code_slot, *code_slot);
2201 Object** context_slot = code_map->RawFieldOfElementAt(
2202 new_length + SharedFunctionInfo::kContextOffset);
2203 RecordSlot(code_map, context_slot, *context_slot);
2204 Object** literals_slot = code_map->RawFieldOfElementAt(
2205 new_length + SharedFunctionInfo::kLiteralsOffset);
2206 RecordSlot(code_map, literals_slot, *literals_slot);
2207 new_length += SharedFunctionInfo::kEntryLength;
2208 }
2209
2210 // Process context-independent entry in the optimized code map.
2211 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
2212 if (shared_object->IsCode()) {
2213 Code* shared_code = Code::cast(shared_object);
2214 if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) {
2215 code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex);
2216 } else {
2217 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
2218 Object** slot =
2219 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
2220 RecordSlot(code_map, slot, *slot);
2221 }
2222 }
2223
2224 // Trim the optimized code map if entries have been removed.
2225 if (new_length < old_length) {
2226 shared->TrimOptimizedCodeMap(old_length - new_length);
2227 }
2228 }
2229 }
2230
2231
2232 void MarkCompactCollector::ProcessWeakReferences() { 2159 void MarkCompactCollector::ProcessWeakReferences() {
2233 // This should be done before processing weak cells because it checks 2160 // This should be done before processing weak cells because it checks
2234 // mark bits of maps in weak cells. 2161 // mark bits of maps in weak cells.
2235 DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps(); 2162 DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps();
2236 2163
2237 // Process weak cells before MarkCodeForDeoptimization and 2164 // Process weak cells before MarkCodeForDeoptimization and
2238 // ClearNonLiveReferences so that weak cells in dependent code arrays are 2165 // ClearNonLiveReferences so that weak cells in dependent code arrays are
2239 // cleared or contain only live code objects. 2166 // cleared or contain only live code objects.
2240 ProcessAndClearWeakCells(); 2167 ProcessAndClearWeakCells();
2241 2168
(...skipping 2276 matching lines...) Expand 10 before | Expand all | Expand 10 after
4518 MarkBit mark_bit = Marking::MarkBitFrom(host); 4445 MarkBit mark_bit = Marking::MarkBitFrom(host);
4519 if (Marking::IsBlack(mark_bit)) { 4446 if (Marking::IsBlack(mark_bit)) {
4520 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); 4447 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
4521 RecordRelocSlot(&rinfo, target); 4448 RecordRelocSlot(&rinfo, target);
4522 } 4449 }
4523 } 4450 }
4524 } 4451 }
4525 4452
4526 } // namespace internal 4453 } // namespace internal
4527 } // namespace v8 4454 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698