| Index: src/objects-visiting-inl.h
|
| diff --git a/src/objects-visiting-inl.h b/src/objects-visiting-inl.h
|
| index ea5f1cadbbb5b2d62ca00ccc31134c3dd281b20c..886fd9a8b16b1d552c83d1575d6c67c1cc177246 100644
|
| --- a/src/objects-visiting-inl.h
|
| +++ b/src/objects-visiting-inl.h
|
| @@ -138,9 +138,9 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
|
|
|
| table_.Register(kVisitCode, &VisitCode);
|
|
|
| - // Registration for kVisitSharedFunctionInfo is done by StaticVisitor.
|
| + table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
|
|
|
| - // Registration for kVisitJSFunction is done by StaticVisitor.
|
| + table_.Register(kVisitJSFunction, &VisitJSFunction);
|
|
|
| // Registration for kVisitJSRegExp is done by StaticVisitor.
|
|
|
| @@ -284,6 +284,71 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCode(
|
|
|
|
|
| template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
|
| + Map* map, HeapObject* object) {
|
| + Heap* heap = map->GetHeap();
|
| + SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
|
| + if (shared->ic_age() != heap->global_ic_age()) {
|
| + shared->ResetForNewContext(heap->global_ic_age());
|
| + }
|
| + MarkCompactCollector* collector = heap->mark_compact_collector();
|
| + if (collector->is_code_flushing_enabled()) {
|
| + if (IsFlushable(heap, shared)) {
|
| + // This function's code looks flushable. But we have to postpone
|
| + // the decision until we see all functions that point to the same
|
| + // SharedFunctionInfo because some of them might be optimized.
|
| + // That would also make the non-optimized version of the code
|
| + // non-flushable, because it is required for bailing out from
|
| + // optimized code.
|
| + collector->code_flusher()->AddCandidate(shared);
|
| + // Treat the reference to the code object weakly.
|
| + VisitSharedFunctionInfoWeakCode(heap, object);
|
| + return;
|
| + }
|
| + }
|
| + VisitSharedFunctionInfoStrongCode(heap, object);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
|
| + Map* map, HeapObject* object) {
|
| + Heap* heap = map->GetHeap();
|
| + JSFunction* function = JSFunction::cast(object);
|
| + MarkCompactCollector* collector = heap->mark_compact_collector();
|
| + if (collector->is_code_flushing_enabled()) {
|
| + if (IsFlushable(heap, function)) {
|
| + // This function's code looks flushable. But we have to postpone
|
| + // the decision until we see all functions that point to the same
|
| + // SharedFunctionInfo because some of them might be optimized.
|
| + // That would also make the non-optimized version of the code
|
| + // non-flushable, because it is required for bailing out from
|
| + // optimized code.
|
| + collector->code_flusher()->AddCandidate(function);
|
| + // Visit shared function info immediately to avoid double checking
|
| + // of its flushability later. This is just an optimization because
|
| + // the shared function info would eventually be visited.
|
| + SharedFunctionInfo* shared = function->unchecked_shared();
|
| + if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
|
| + StaticVisitor::MarkObject(heap, shared->map());
|
| + VisitSharedFunctionInfoWeakCode(heap, shared);
|
| + }
|
| + // Treat the reference to the code object weakly.
|
| + VisitJSFunctionWeakCode(heap, object);
|
| + return;
|
| + } else {
|
| + // Visit all unoptimized code objects to prevent flushing them.
|
| + StaticVisitor::MarkObject(heap, function->shared()->code());
|
| + if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
|
| + MarkInlinedFunctionsCode(heap, function->code());
|
| + }
|
| + }
|
| + }
|
| + VisitJSFunctionStrongCode(heap, object);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
|
| Map* map, HeapObject* object) {
|
| int last_property_offset =
|
| @@ -356,6 +421,200 @@ void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
|
| }
|
|
|
|
|
| +template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
|
| + Heap* heap, Code* code) {
|
| + // For optimized functions we should retain both non-optimized version
|
| + // of its code and non-optimized version of all inlined functions.
|
| + // This is required to support bailing out from inlined code.
|
| + DeoptimizationInputData* data =
|
| + DeoptimizationInputData::cast(code->deoptimization_data());
|
| + FixedArray* literals = data->LiteralArray();
|
| + for (int i = 0, count = data->InlinedFunctionCount()->value();
|
| + i < count;
|
| + i++) {
|
| + JSFunction* inlined = JSFunction::cast(literals->get(i));
|
| + StaticVisitor::MarkObject(heap, inlined->shared()->code());
|
| + }
|
| +}
|
| +
|
| +
|
| +inline static bool IsValidNonBuiltinContext(Object* context) {
|
| + return context->IsContext() &&
|
| + !Context::cast(context)->global_object()->IsJSBuiltinsObject();
|
| +}
|
| +
|
| +
|
| +inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
|
| + Object* undefined = heap->undefined_value();
|
| + return (info->script() != undefined) &&
|
| + (reinterpret_cast<Script*>(info->script())->source() != undefined);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
|
| + Heap* heap, JSFunction* function) {
|
| + SharedFunctionInfo* shared_info = function->unchecked_shared();
|
| +
|
| + // Code is either on stack, in compilation cache or referenced
|
| + // by optimized version of function.
|
| + MarkBit code_mark = Marking::MarkBitFrom(function->code());
|
| + if (code_mark.Get()) {
|
| + if (!Marking::MarkBitFrom(shared_info).Get()) {
|
| + shared_info->set_code_age(0);
|
| + }
|
| + return false;
|
| + }
|
| +
|
| + // The function must have a valid context and not be a builtin.
|
| + if (!IsValidNonBuiltinContext(function->unchecked_context())) {
|
| + return false;
|
| + }
|
| +
|
| + // We do not flush code for optimized functions.
|
| + if (function->code() != shared_info->code()) {
|
| + return false;
|
| + }
|
| +
|
| + return IsFlushable(heap, shared_info);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
|
| + Heap* heap, SharedFunctionInfo* shared_info) {
|
| + // Code is either on stack, in compilation cache or referenced
|
| + // by optimized version of function.
|
| + MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
|
| + if (code_mark.Get()) {
|
| + return false;
|
| + }
|
| +
|
| + // The function must be compiled and have the source code available,
|
| + // to be able to recompile it in case we need the function again.
|
| + if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
|
| + return false;
|
| + }
|
| +
|
| + // We never flush code for API functions.
|
| + Object* function_data = shared_info->function_data();
|
| + if (function_data->IsFunctionTemplateInfo()) {
|
| + return false;
|
| + }
|
| +
|
| + // Only flush code for functions.
|
| + if (shared_info->code()->kind() != Code::FUNCTION) {
|
| + return false;
|
| + }
|
| +
|
| + // Function must be lazy compilable.
|
| + if (!shared_info->allows_lazy_compilation()) {
|
| + return false;
|
| + }
|
| +
|
| + // If this is a full script wrapped in a function we do no flush the code.
|
| + if (shared_info->is_toplevel()) {
|
| + return false;
|
| + }
|
| +
|
| + // TODO(mstarzinger): The following will soon be replaced by a new way of
|
| + // aging code, that is based on an aging stub in the function prologue.
|
| +
|
| + // How many collections newly compiled code object will survive before being
|
| + // flushed.
|
| + static const int kCodeAgeThreshold = 5;
|
| +
|
| + // Age this shared function info.
|
| + if (shared_info->code_age() < kCodeAgeThreshold) {
|
| + shared_info->set_code_age(shared_info->code_age() + 1);
|
| + return false;
|
| + }
|
| +
|
| + return true;
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
|
| + Heap* heap, HeapObject* object) {
|
| + StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
|
| + Object** start_slot =
|
| + HeapObject::RawField(object,
|
| + SharedFunctionInfo::BodyDescriptor::kStartOffset);
|
| + Object** end_slot =
|
| + HeapObject::RawField(object,
|
| + SharedFunctionInfo::BodyDescriptor::kEndOffset);
|
| + StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
|
| + Heap* heap, HeapObject* object) {
|
| + StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
|
| + Object** name_slot =
|
| + HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
|
| + StaticVisitor::VisitPointer(heap, name_slot);
|
| +
|
| + // Skip visiting kCodeOffset as it is treated weakly here.
|
| + STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
|
| + SharedFunctionInfo::kCodeOffset);
|
| + STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
|
| + SharedFunctionInfo::kOptimizedCodeMapOffset);
|
| +
|
| + Object** start_slot =
|
| + HeapObject::RawField(object,
|
| + SharedFunctionInfo::kOptimizedCodeMapOffset);
|
| + Object** end_slot =
|
| + HeapObject::RawField(object,
|
| + SharedFunctionInfo::BodyDescriptor::kEndOffset);
|
| + StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
|
| + Heap* heap, HeapObject* object) {
|
| + Object** start_slot =
|
| + HeapObject::RawField(object, JSFunction::kPropertiesOffset);
|
| + Object** end_slot =
|
| + HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
|
| + StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
|
| +
|
| + VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
|
| + STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
|
| + JSFunction::kPrototypeOrInitialMapOffset);
|
| +
|
| + start_slot =
|
| + HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
|
| + end_slot =
|
| + HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
|
| + StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
|
| +}
|
| +
|
| +
|
| +template<typename StaticVisitor>
|
| +void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
|
| + Heap* heap, HeapObject* object) {
|
| + Object** start_slot =
|
| + HeapObject::RawField(object, JSFunction::kPropertiesOffset);
|
| + Object** end_slot =
|
| + HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
|
| + StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
|
| +
|
| + // Skip visiting kCodeEntryOffset as it is treated weakly here.
|
| + STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
|
| + JSFunction::kPrototypeOrInitialMapOffset);
|
| +
|
| + start_slot =
|
| + HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
|
| + end_slot =
|
| + HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
|
| + StaticVisitor::VisitPointers(heap, start_slot, start_slot, end_slot);
|
| +}
|
| +
|
| +
|
| void Code::CodeIterateBody(ObjectVisitor* v) {
|
| int mode_mask = RelocInfo::kCodeTargetMask |
|
| RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
|
|
|