Index: src/objects-inl.h |
diff --git a/src/objects-inl.h b/src/objects-inl.h |
index 37c51d703db1679ba037990d411533734bfa9de5..fc2c77e2ca6173f92be23ee0557ae946830492da 100644 |
--- a/src/objects-inl.h |
+++ b/src/objects-inl.h |
@@ -3031,6 +3031,8 @@ ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) |
#endif |
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) |
+ACCESSORS_GCSAFE(SharedFunctionInfo, optimized_code_map, Object, |
+ kOptimizedCodeMapOffset) |
ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) |
ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) |
ACCESSORS(SharedFunctionInfo, instance_class_name, Object, |
@@ -3189,6 +3191,17 @@ void SharedFunctionInfo::set_strict_mode(bool value) { |
} |
+void SharedFunctionInfo::BeforeVisitingPointers() { |
+ if (IsInobjectSlackTrackingInProgress()) DetachInitialMap(); |
+ |
+ // Flush optimized code map on major GC. |
+ // Note: we may experiment with rebuilding it or retaining entries |
+ // which should survive as we iterate through optimized functions |
+ // anyway. |
+ set_optimized_code_map(Smi::FromInt(0)); |
+} |
+ |
+ |
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset) |
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset) |