Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(420)

Side by Side Diff: src/mark-compact.cc

Issue 11028016: Move code flushing support into shared visitor. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed comments by Ulan Degenbaev. Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/objects-visiting.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 839 matching lines...) Expand 10 before | Expand all | Expand 10 after
850 // overflow flag. When the overflow flag is set, we continue marking objects 850 // overflow flag. When the overflow flag is set, we continue marking objects
851 // reachable from the objects on the marking stack, but no longer push them on 851 // reachable from the objects on the marking stack, but no longer push them on
852 // the marking stack. Instead, we mark them as both marked and overflowed. 852 // the marking stack. Instead, we mark them as both marked and overflowed.
853 // When the stack is in the overflowed state, objects marked as overflowed 853 // When the stack is in the overflowed state, objects marked as overflowed
854 // have been reached and marked but their children have not been visited yet. 854 // have been reached and marked but their children have not been visited yet.
855 // After emptying the marking stack, we clear the overflow flag and traverse 855 // After emptying the marking stack, we clear the overflow flag and traverse
856 // the heap looking for objects marked as overflowed, push them on the stack, 856 // the heap looking for objects marked as overflowed, push them on the stack,
857 // and continue with marking. This process repeats until all reachable 857 // and continue with marking. This process repeats until all reachable
858 // objects have been marked. 858 // objects have been marked.
859 859
860 class CodeFlusher { 860 void CodeFlusher::ProcessJSFunctionCandidates() {
861 public: 861 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
862 explicit CodeFlusher(Isolate* isolate)
863 : isolate_(isolate),
864 jsfunction_candidates_head_(NULL),
865 shared_function_info_candidates_head_(NULL) {}
866 862
867 void AddCandidate(SharedFunctionInfo* shared_info) { 863 JSFunction* candidate = jsfunction_candidates_head_;
868 SetNextCandidate(shared_info, shared_function_info_candidates_head_); 864 JSFunction* next_candidate;
869 shared_function_info_candidates_head_ = shared_info; 865 while (candidate != NULL) {
866 next_candidate = GetNextCandidate(candidate);
867
868 SharedFunctionInfo* shared = candidate->shared();
869
870 Code* code = shared->code();
871 MarkBit code_mark = Marking::MarkBitFrom(code);
872 if (!code_mark.Get()) {
873 shared->set_code(lazy_compile);
874 candidate->set_code(lazy_compile);
875 } else {
876 candidate->set_code(shared->code());
877 }
878
879 // We are in the middle of a GC cycle so the write barrier in the code
880 // setter did not record the slot update and we have to do that manually.
881 Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
882 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
883 isolate_->heap()->mark_compact_collector()->
884 RecordCodeEntrySlot(slot, target);
885
886 Object** shared_code_slot =
887 HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
888 isolate_->heap()->mark_compact_collector()->
889 RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot);
890
891 candidate = next_candidate;
870 } 892 }
871 893
872 void AddCandidate(JSFunction* function) { 894 jsfunction_candidates_head_ = NULL;
873 ASSERT(function->code() == function->shared()->code()); 895 }
874 896
875 SetNextCandidate(function, jsfunction_candidates_head_); 897
876 jsfunction_candidates_head_ = function; 898 void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
899 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
900
901 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
902 SharedFunctionInfo* next_candidate;
903 while (candidate != NULL) {
904 next_candidate = GetNextCandidate(candidate);
905 SetNextCandidate(candidate, NULL);
906
907 Code* code = candidate->code();
908 MarkBit code_mark = Marking::MarkBitFrom(code);
909 if (!code_mark.Get()) {
910 candidate->set_code(lazy_compile);
911 }
912
913 Object** code_slot =
914 HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
915 isolate_->heap()->mark_compact_collector()->
916 RecordSlot(code_slot, code_slot, *code_slot);
917
918 candidate = next_candidate;
877 } 919 }
878 920
879 void ProcessCandidates() { 921 shared_function_info_candidates_head_ = NULL;
880 ProcessSharedFunctionInfoCandidates(); 922 }
881 ProcessJSFunctionCandidates();
882 }
883
884 private:
885 void ProcessJSFunctionCandidates() {
886 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
887
888 JSFunction* candidate = jsfunction_candidates_head_;
889 JSFunction* next_candidate;
890 while (candidate != NULL) {
891 next_candidate = GetNextCandidate(candidate);
892
893 SharedFunctionInfo* shared = candidate->shared();
894
895 Code* code = shared->code();
896 MarkBit code_mark = Marking::MarkBitFrom(code);
897 if (!code_mark.Get()) {
898 shared->set_code(lazy_compile);
899 candidate->set_code(lazy_compile);
900 } else {
901 candidate->set_code(shared->code());
902 }
903
904 // We are in the middle of a GC cycle so the write barrier in the code
905 // setter did not record the slot update and we have to do that manually.
906 Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
907 Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
908 isolate_->heap()->mark_compact_collector()->
909 RecordCodeEntrySlot(slot, target);
910
911 RecordSharedFunctionInfoCodeSlot(shared);
912
913 candidate = next_candidate;
914 }
915
916 jsfunction_candidates_head_ = NULL;
917 }
918
919
920 void ProcessSharedFunctionInfoCandidates() {
921 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
922
923 SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
924 SharedFunctionInfo* next_candidate;
925 while (candidate != NULL) {
926 next_candidate = GetNextCandidate(candidate);
927 SetNextCandidate(candidate, NULL);
928
929 Code* code = candidate->code();
930 MarkBit code_mark = Marking::MarkBitFrom(code);
931 if (!code_mark.Get()) {
932 candidate->set_code(lazy_compile);
933 }
934
935 RecordSharedFunctionInfoCodeSlot(candidate);
936
937 candidate = next_candidate;
938 }
939
940 shared_function_info_candidates_head_ = NULL;
941 }
942
943 void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
944 Object** slot = HeapObject::RawField(shared,
945 SharedFunctionInfo::kCodeOffset);
946 isolate_->heap()->mark_compact_collector()->
947 RecordSlot(slot, slot, HeapObject::cast(*slot));
948 }
949
950 static JSFunction** GetNextCandidateField(JSFunction* candidate) {
951 return reinterpret_cast<JSFunction**>(
952 candidate->address() + JSFunction::kCodeEntryOffset);
953 }
954
955 static JSFunction* GetNextCandidate(JSFunction* candidate) {
956 return *GetNextCandidateField(candidate);
957 }
958
959 static void SetNextCandidate(JSFunction* candidate,
960 JSFunction* next_candidate) {
961 *GetNextCandidateField(candidate) = next_candidate;
962 }
963
964 static SharedFunctionInfo** GetNextCandidateField(
965 SharedFunctionInfo* candidate) {
966 Code* code = candidate->code();
967 return reinterpret_cast<SharedFunctionInfo**>(
968 code->address() + Code::kGCMetadataOffset);
969 }
970
971 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
972 return reinterpret_cast<SharedFunctionInfo*>(
973 candidate->code()->gc_metadata());
974 }
975
976 static void SetNextCandidate(SharedFunctionInfo* candidate,
977 SharedFunctionInfo* next_candidate) {
978 candidate->code()->set_gc_metadata(next_candidate);
979 }
980
981 Isolate* isolate_;
982 JSFunction* jsfunction_candidates_head_;
983 SharedFunctionInfo* shared_function_info_candidates_head_;
984
985 DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
986 };
987 923
988 924
989 MarkCompactCollector::~MarkCompactCollector() { 925 MarkCompactCollector::~MarkCompactCollector() {
990 if (code_flusher_ != NULL) { 926 if (code_flusher_ != NULL) {
991 delete code_flusher_; 927 delete code_flusher_;
992 code_flusher_ = NULL; 928 code_flusher_ = NULL;
993 } 929 }
994 } 930 }
995 931
996 932
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
1156 if (!o->IsHeapObject()) continue; 1092 if (!o->IsHeapObject()) continue;
1157 collector->RecordSlot(anchor_slot, p, o); 1093 collector->RecordSlot(anchor_slot, p, o);
1158 HeapObject* obj = HeapObject::cast(o); 1094 HeapObject* obj = HeapObject::cast(o);
1159 MarkBit mark = Marking::MarkBitFrom(obj); 1095 MarkBit mark = Marking::MarkBitFrom(obj);
1160 if (mark.Get()) continue; 1096 if (mark.Get()) continue;
1161 VisitUnmarkedObject(collector, obj); 1097 VisitUnmarkedObject(collector, obj);
1162 } 1098 }
1163 return true; 1099 return true;
1164 } 1100 }
1165 1101
1102 INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) {
1103 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
1104 shared->BeforeVisitingPointers();
1105 }
1106
1166 static void VisitJSWeakMap(Map* map, HeapObject* object) { 1107 static void VisitJSWeakMap(Map* map, HeapObject* object) {
1167 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); 1108 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1168 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); 1109 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
1169 1110
1170 // Enqueue weak map in linked list of encountered weak maps. 1111 // Enqueue weak map in linked list of encountered weak maps.
1171 if (weak_map->next() == Smi::FromInt(0)) { 1112 if (weak_map->next() == Smi::FromInt(0)) {
1172 weak_map->set_next(collector->encountered_weak_maps()); 1113 weak_map->set_next(collector->encountered_weak_maps());
1173 collector->set_encountered_weak_maps(weak_map); 1114 collector->set_encountered_weak_maps(weak_map);
1174 } 1115 }
1175 1116
(...skipping 23 matching lines...) Expand all
1199 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); 1140 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map()));
1200 ASSERT(MarkCompactCollector::IsMarked(table->map())); 1141 ASSERT(MarkCompactCollector::IsMarked(table->map()));
1201 } 1142 }
1202 1143
1203 private: 1144 private:
1204 template<int id> 1145 template<int id>
1205 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); 1146 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj);
1206 1147
1207 // Code flushing support. 1148 // Code flushing support.
1208 1149
1209 // How many collections newly compiled code object will survive before being
1210 // flushed.
1211 static const int kCodeAgeThreshold = 5;
1212
1213 static const int kRegExpCodeThreshold = 5; 1150 static const int kRegExpCodeThreshold = 5;
1214 1151
1215 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
1216 Object* undefined = heap->undefined_value();
1217 return (info->script() != undefined) &&
1218 (reinterpret_cast<Script*>(info->script())->source() != undefined);
1219 }
1220
1221
1222 inline static bool IsCompiled(JSFunction* function) {
1223 return function->code() !=
1224 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
1225 }
1226
1227 inline static bool IsCompiled(SharedFunctionInfo* function) {
1228 return function->code() !=
1229 function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
1230 }
1231
1232 inline static bool IsFlushable(Heap* heap, JSFunction* function) {
1233 SharedFunctionInfo* shared_info = function->unchecked_shared();
1234
1235 // Code is either on stack, in compilation cache or referenced
1236 // by optimized version of function.
1237 MarkBit code_mark = Marking::MarkBitFrom(function->code());
1238 if (code_mark.Get()) {
1239 if (!Marking::MarkBitFrom(shared_info).Get()) {
1240 shared_info->set_code_age(0);
1241 }
1242 return false;
1243 }
1244
1245 // We do not flush code for optimized functions.
1246 if (function->code() != shared_info->code()) {
1247 return false;
1248 }
1249
1250 return IsFlushable(heap, shared_info);
1251 }
1252
1253 inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
1254 // Code is either on stack, in compilation cache or referenced
1255 // by optimized version of function.
1256 MarkBit code_mark =
1257 Marking::MarkBitFrom(shared_info->code());
1258 if (code_mark.Get()) {
1259 return false;
1260 }
1261
1262 // The function must be compiled and have the source code available,
1263 // to be able to recompile it in case we need the function again.
1264 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
1265 return false;
1266 }
1267
1268 // We never flush code for Api functions.
1269 Object* function_data = shared_info->function_data();
1270 if (function_data->IsFunctionTemplateInfo()) {
1271 return false;
1272 }
1273
1274 // Only flush code for functions.
1275 if (shared_info->code()->kind() != Code::FUNCTION) {
1276 return false;
1277 }
1278
1279 // Function must be lazy compilable.
1280 if (!shared_info->allows_lazy_compilation()) {
1281 return false;
1282 }
1283
1284 // If this is a full script wrapped in a function we do no flush the code.
1285 if (shared_info->is_toplevel()) {
1286 return false;
1287 }
1288
1289 // Age this shared function info.
1290 if (shared_info->code_age() < kCodeAgeThreshold) {
1291 shared_info->set_code_age(shared_info->code_age() + 1);
1292 return false;
1293 }
1294
1295 return true;
1296 }
1297
1298
1299 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
1300 if (!IsFlushable(heap, function)) return false;
1301
1302 // This function's code looks flushable. But we have to postpone the
1303 // decision until we see all functions that point to the same
1304 // SharedFunctionInfo because some of them might be optimized.
1305 // That would make the nonoptimized version of the code nonflushable,
1306 // because it is required for bailing out from optimized code.
1307 heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
1308 return true;
1309 }
1310
1311 static inline bool IsValidNotBuiltinContext(Object* ctx) {
1312 return ctx->IsContext() &&
1313 !Context::cast(ctx)->global_object()->IsJSBuiltinsObject();
1314 }
1315
1316
1317 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
1318 SharedFunctionInfo::cast(object)->BeforeVisitingPointers();
1319
1320 FixedBodyVisitor<MarkCompactMarkingVisitor,
1321 SharedFunctionInfo::BodyDescriptor,
1322 void>::Visit(map, object);
1323 }
1324
1325
1326 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, 1152 static void UpdateRegExpCodeAgeAndFlush(Heap* heap,
1327 JSRegExp* re, 1153 JSRegExp* re,
1328 bool is_ascii) { 1154 bool is_ascii) {
1329 // Make sure that the fixed array is in fact initialized on the RegExp. 1155 // Make sure that the fixed array is in fact initialized on the RegExp.
1330 // We could potentially trigger a GC when initializing the RegExp. 1156 // We could potentially trigger a GC when initializing the RegExp.
1331 if (HeapObject::cast(re->data())->map()->instance_type() != 1157 if (HeapObject::cast(re->data())->map()->instance_type() !=
1332 FIXED_ARRAY_TYPE) return; 1158 FIXED_ARRAY_TYPE) return;
1333 1159
1334 // Make sure this is a RegExp that actually contains code. 1160 // Make sure this is a RegExp that actually contains code.
1335 if (re->TypeTagUnchecked() != JSRegExp::IRREGEXP) return; 1161 if (re->TypeTagUnchecked() != JSRegExp::IRREGEXP) return;
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1389 return; 1215 return;
1390 } 1216 }
1391 JSRegExp* re = reinterpret_cast<JSRegExp*>(object); 1217 JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
1392 // Flush code or set age on both ASCII and two byte code. 1218 // Flush code or set age on both ASCII and two byte code.
1393 UpdateRegExpCodeAgeAndFlush(heap, re, true); 1219 UpdateRegExpCodeAgeAndFlush(heap, re, true);
1394 UpdateRegExpCodeAgeAndFlush(heap, re, false); 1220 UpdateRegExpCodeAgeAndFlush(heap, re, false);
1395 // Visit the fields of the RegExp, including the updated FixedArray. 1221 // Visit the fields of the RegExp, including the updated FixedArray.
1396 VisitJSRegExp(map, object); 1222 VisitJSRegExp(map, object);
1397 } 1223 }
1398 1224
1399
1400 static void VisitSharedFunctionInfoAndFlushCode(Map* map,
1401 HeapObject* object) {
1402 Heap* heap = map->GetHeap();
1403 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
1404 if (shared->ic_age() != heap->global_ic_age()) {
1405 shared->ResetForNewContext(heap->global_ic_age());
1406 }
1407
1408 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
1409 if (!collector->is_code_flushing_enabled()) {
1410 VisitSharedFunctionInfoGeneric(map, object);
1411 return;
1412 }
1413 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
1414 }
1415
1416
1417 static void VisitSharedFunctionInfoAndFlushCodeGeneric(
1418 Map* map, HeapObject* object, bool known_flush_code_candidate) {
1419 Heap* heap = map->GetHeap();
1420 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
1421
1422 shared->BeforeVisitingPointers();
1423
1424 if (!known_flush_code_candidate) {
1425 known_flush_code_candidate = IsFlushable(heap, shared);
1426 if (known_flush_code_candidate) {
1427 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
1428 }
1429 }
1430
1431 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
1432 }
1433
1434
1435 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
1436 Heap* heap = map->GetHeap();
1437 MarkCompactCollector* collector = heap->mark_compact_collector();
1438 if (!collector->is_code_flushing_enabled()) {
1439 VisitJSFunction(map, object);
1440 return;
1441 }
1442
1443 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
1444 // The function must have a valid context and not be a builtin.
1445 bool flush_code_candidate = false;
1446 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
1447 flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
1448 }
1449
1450 if (!flush_code_candidate) {
1451 Code* code = jsfunction->shared()->code();
1452 MarkBit code_mark = Marking::MarkBitFrom(code);
1453 collector->MarkObject(code, code_mark);
1454
1455 if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
1456 collector->MarkInlinedFunctionsCode(jsfunction->code());
1457 }
1458 }
1459
1460 VisitJSFunctionFields(map,
1461 reinterpret_cast<JSFunction*>(object),
1462 flush_code_candidate);
1463 }
1464
1465
1466 static void VisitJSFunction(Map* map, HeapObject* object) {
1467 VisitJSFunctionFields(map,
1468 reinterpret_cast<JSFunction*>(object),
1469 false);
1470 }
1471
1472
1473 static inline void VisitJSFunctionFields(Map* map,
1474 JSFunction* object,
1475 bool flush_code_candidate) {
1476 Heap* heap = map->GetHeap();
1477
1478 Object** start_slot =
1479 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
1480 Object** end_slot =
1481 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
1482 VisitPointers(heap, start_slot, start_slot, end_slot);
1483
1484 if (!flush_code_candidate) {
1485 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
1486 } else {
1487 // Don't visit code object.
1488
1489 // Visit shared function info to avoid double checking of its
1490 // flushability.
1491 SharedFunctionInfo* shared_info = object->unchecked_shared();
1492 MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info);
1493 if (!shared_info_mark.Get()) {
1494 Map* shared_info_map = shared_info->map();
1495 MarkBit shared_info_map_mark =
1496 Marking::MarkBitFrom(shared_info_map);
1497 heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark);
1498 heap->mark_compact_collector()->MarkObject(shared_info_map,
1499 shared_info_map_mark);
1500 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
1501 shared_info,
1502 true);
1503 }
1504 }
1505
1506 start_slot =
1507 HeapObject::RawField(object,
1508 JSFunction::kCodeEntryOffset + kPointerSize);
1509 end_slot =
1510 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
1511 VisitPointers(heap, start_slot, start_slot, end_slot);
1512 }
1513
1514
1515 static void VisitSharedFunctionInfoFields(Heap* heap,
1516 HeapObject* object,
1517 bool flush_code_candidate) {
1518 VisitPointer(heap,
1519 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset));
1520
1521 if (!flush_code_candidate) {
1522 VisitPointer(heap,
1523 HeapObject::RawField(object,
1524 SharedFunctionInfo::kCodeOffset));
1525 }
1526
1527 Object** start_slot =
1528 HeapObject::RawField(object,
1529 SharedFunctionInfo::kOptimizedCodeMapOffset);
1530 Object** end_slot =
1531 HeapObject::RawField(object, SharedFunctionInfo::kSize);
1532
1533 VisitPointers(heap, start_slot, start_slot, end_slot);
1534 }
1535
1536 static VisitorDispatchTable<Callback> non_count_table_; 1225 static VisitorDispatchTable<Callback> non_count_table_;
1537 }; 1226 };
1538 1227
1539 1228
1540 void MarkCompactMarkingVisitor::VisitHugeFixedArray(Heap* heap, 1229 void MarkCompactMarkingVisitor::VisitHugeFixedArray(Heap* heap,
1541 FixedArray* array, 1230 FixedArray* array,
1542 int length) { 1231 int length) {
1543 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address()); 1232 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
1544 1233
1545 ASSERT(chunk->owner()->identity() == LO_SPACE); 1234 ASSERT(chunk->owner()->identity() == LO_SPACE);
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
1683 fixed_array->Size()); 1372 fixed_array->Size());
1684 } 1373 }
1685 ObjectStatsVisitBase(kVisitFixedArray, map, obj); 1374 ObjectStatsVisitBase(kVisitFixedArray, map, obj);
1686 } 1375 }
1687 }; 1376 };
1688 1377
1689 1378
1690 void MarkCompactMarkingVisitor::Initialize() { 1379 void MarkCompactMarkingVisitor::Initialize() {
1691 StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize(); 1380 StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
1692 1381
1693 table_.Register(kVisitSharedFunctionInfo,
1694 &VisitSharedFunctionInfoAndFlushCode);
1695
1696 table_.Register(kVisitJSFunction,
1697 &VisitJSFunctionAndFlushCode);
1698
1699 table_.Register(kVisitJSRegExp, 1382 table_.Register(kVisitJSRegExp,
1700 &VisitRegExpAndFlushCode); 1383 &VisitRegExpAndFlushCode);
1701 1384
1702 table_.Register(kVisitFixedArray, 1385 table_.Register(kVisitFixedArray,
1703 &VisitFixedArray); 1386 &VisitFixedArray);
1704 1387
1705 if (FLAG_track_gc_object_stats) { 1388 if (FLAG_track_gc_object_stats) {
1706 // Copy the visitor table to make call-through possible. 1389 // Copy the visitor table to make call-through possible.
1707 non_count_table_.CopyFrom(&table_); 1390 non_count_table_.CopyFrom(&table_);
1708 #define VISITOR_ID_COUNT_FUNCTION(id) \ 1391 #define VISITOR_ID_COUNT_FUNCTION(id) \
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1767 collector_->MarkObject(shared->code(), code_mark); 1450 collector_->MarkObject(shared->code(), code_mark);
1768 collector_->MarkObject(shared, shared_mark); 1451 collector_->MarkObject(shared, shared_mark);
1769 } 1452 }
1770 } 1453 }
1771 1454
1772 private: 1455 private:
1773 MarkCompactCollector* collector_; 1456 MarkCompactCollector* collector_;
1774 }; 1457 };
1775 1458
1776 1459
1777 void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
1778 // For optimized functions we should retain both non-optimized version
1779 // of its code and non-optimized version of all inlined functions.
1780 // This is required to support bailing out from inlined code.
1781 DeoptimizationInputData* data =
1782 DeoptimizationInputData::cast(code->deoptimization_data());
1783
1784 FixedArray* literals = data->LiteralArray();
1785
1786 for (int i = 0, count = data->InlinedFunctionCount()->value();
1787 i < count;
1788 i++) {
1789 JSFunction* inlined = JSFunction::cast(literals->get(i));
1790 Code* inlined_code = inlined->shared()->code();
1791 MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
1792 MarkObject(inlined_code, inlined_code_mark);
1793 }
1794 }
1795
1796
1797 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, 1460 void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
1798 ThreadLocalTop* top) { 1461 ThreadLocalTop* top) {
1799 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { 1462 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1800 // Note: for the frame that has a pending lazy deoptimization 1463 // Note: for the frame that has a pending lazy deoptimization
1801 // StackFrame::unchecked_code will return a non-optimized code object for 1464 // StackFrame::unchecked_code will return a non-optimized code object for
1802 // the outermost function and StackFrame::LookupCode will return 1465 // the outermost function and StackFrame::LookupCode will return
1803 // actual optimized code object. 1466 // actual optimized code object.
1804 StackFrame* frame = it.frame(); 1467 StackFrame* frame = it.frame();
1805 Code* code = frame->unchecked_code(); 1468 Code* code = frame->unchecked_code();
1806 MarkBit code_mark = Marking::MarkBitFrom(code); 1469 MarkBit code_mark = Marking::MarkBitFrom(code);
1807 MarkObject(code, code_mark); 1470 MarkObject(code, code_mark);
1808 if (frame->is_optimized()) { 1471 if (frame->is_optimized()) {
1809 MarkInlinedFunctionsCode(frame->LookupCode()); 1472 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(),
1473 frame->LookupCode());
1810 } 1474 }
1811 } 1475 }
1812 } 1476 }
1813 1477
1814 1478
1815 void MarkCompactCollector::PrepareForCodeFlushing() { 1479 void MarkCompactCollector::PrepareForCodeFlushing() {
1816 ASSERT(heap() == Isolate::Current()->heap()); 1480 ASSERT(heap() == Isolate::Current()->heap());
1817 1481
1818 // TODO(1609) Currently incremental marker does not support code flushing. 1482 // TODO(1609) Currently incremental marker does not support code flushing.
1819 if (!FLAG_flush_code || was_marked_incrementally_) { 1483 if (!FLAG_flush_code || was_marked_incrementally_) {
(...skipping 2321 matching lines...) Expand 10 before | Expand all | Expand 10 after
4141 while (buffer != NULL) { 3805 while (buffer != NULL) {
4142 SlotsBuffer* next_buffer = buffer->next(); 3806 SlotsBuffer* next_buffer = buffer->next();
4143 DeallocateBuffer(buffer); 3807 DeallocateBuffer(buffer);
4144 buffer = next_buffer; 3808 buffer = next_buffer;
4145 } 3809 }
4146 *buffer_address = NULL; 3810 *buffer_address = NULL;
4147 } 3811 }
4148 3812
4149 3813
4150 } } // namespace v8::internal 3814 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/objects-visiting.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698