Chromium Code Reviews| Index: src/objects.cc |
| diff --git a/src/objects.cc b/src/objects.cc |
| index f9e8bd22c15bc6e8820f717abd47071bac11b4ef..1f05b6d922f630df7dbeee2952190b47a51bcae7 100644 |
| --- a/src/objects.cc |
| +++ b/src/objects.cc |
| @@ -9907,6 +9907,43 @@ bool JSFunction::PassesFilter(const char* raw_filter) { |
| } |
| +void JSFunction::Optimize(const char* reason) { |
|
titzer
2014/02/18 09:24:48
Looks like you copied this right out of the compil
Hannes Payer (out of office)
2014/05/12 09:37:08
This code was part of runtime-profiler. It should
|
| + ASSERT(IsOptimizable()); |
| + |
| + if (FLAG_trace_opt && PassesFilter(FLAG_hydrogen_filter)) { |
| + PrintF("[marking "); |
| + ShortPrint(); |
| + PrintF(" for recompilation, reason: %s", reason); |
| + if (FLAG_type_info_threshold > 0) { |
| + int typeinfo, total, percentage; |
| + Code* code = shared()->code(); |
| + code->GetICCounts(&typeinfo, &total, &percentage); |
| + PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage); |
| + } |
| + PrintF("]\n"); |
| + } |
| + |
| + Isolate* isolate = GetIsolate(); |
| + |
| + if (isolate->concurrent_recompilation_enabled() && |
| + !isolate->bootstrapper()->IsActive()) { |
| + if (isolate->concurrent_osr_enabled() && |
| + isolate->optimizing_compiler_thread()->IsQueuedForOSR(this)) { |
| + // Do not attempt regular recompilation if we already queued this for OSR. |
| + // TODO(yangguo): This is necessary so that we don't install optimized |
| + // code on a function that is already optimized, since OSR and regular |
| + // recompilation race. This goes away as soon as OSR becomes one-shot. |
| + return; |
| + } |
| + ASSERT(!IsInOptimizationQueue()); |
| + MarkForConcurrentOptimization(); |
| + } else { |
| + // The next call to the function will trigger optimization. |
| + MarkForOptimization(); |
| + } |
| +} |
| + |
| + |
| MaybeObject* Oddball::Initialize(Heap* heap, |
| const char* to_string, |
| Object* to_number, |
| @@ -11268,6 +11305,23 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength( |
| } |
| +void Code::GetICCounts(int* ic_with_type_info_count, |
| + int* ic_total_count, |
| + int* percentage) { |
| + *ic_total_count = 0; |
| + *ic_with_type_info_count = 0; |
| + Object* raw_info = type_feedback_info(); |
| + if (raw_info->IsTypeFeedbackInfo()) { |
| + TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info); |
| + *ic_with_type_info_count = info->ic_with_type_info_count(); |
| + *ic_total_count = info->ic_total_count(); |
| + } |
| + *percentage = *ic_total_count > 0 |
| + ? 100 * *ic_with_type_info_count / *ic_total_count |
| + : 100; |
| +} |
| + |
| + |
| bool Code::IsWeakEmbeddedObject(Kind kind, Object* object) { |
| if (kind != Code::OPTIMIZED_FUNCTION) return false; |
| @@ -11753,7 +11807,8 @@ bool DependentCode::Contains(DependencyGroup group, Code* code) { |
| bool DependentCode::MarkCodeForDeoptimization( |
| Isolate* isolate, |
| - DependentCode::DependencyGroup group) { |
| + DependentCode::DependencyGroup group, |
| + bool reoptimize) { |
| DisallowHeapAllocation no_allocation_scope; |
| DependentCode::GroupStartIndexes starts(this); |
| int start = starts.at(group); |
| @@ -11769,6 +11824,7 @@ bool DependentCode::MarkCodeForDeoptimization( |
| if (!code->marked_for_deoptimization()) { |
| code->set_marked_for_deoptimization(true); |
| marked = true; |
| + if (reoptimize) code->set_marked_for_reoptimization(true); |
| } |
| } else { |
| CompilationInfo* info = compilation_info_at(i); |
| @@ -12739,7 +12795,7 @@ void JSObject::TransitionElementsKind(Handle<JSObject> object, |
| } |
| -const double AllocationSite::kPretenureRatio = 0.60; |
| +const double AllocationSite::kPretenureRatio = 0.50; |
| void AllocationSite::ResetPretenureDecision() { |