Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/runtime.cc

Issue 23842004: Pass PC offset into runtime when compiling for OSR. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: addressed comments. Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 8563 matching lines...) Expand 10 before | Expand all | Expand 10 after
8574 8574
8575 8575
8576 RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) { 8576 RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) {
8577 HandleScope scope(isolate); 8577 HandleScope scope(isolate);
8578 ASSERT(args.length() == 1); 8578 ASSERT(args.length() == 1);
8579 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); 8579 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
8580 return Smi::FromInt(function->shared()->opt_count()); 8580 return Smi::FromInt(function->shared()->opt_count());
8581 } 8581 }
8582 8582
8583 8583
8584 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
8585 Handle<JSFunction> function,
8586 Handle<Code> unoptimized) {
8587 // Keep track of whether we've succeeded in optimizing.
8588 if (!unoptimized->optimizable()) return false;
8589 // If we are trying to do OSR when there are already optimized
8590 // activations of the function, it means (a) the function is directly or
8591 // indirectly recursive and (b) an optimized invocation has been
8592 // deoptimized so that we are currently in an unoptimized activation.
8593 // Check for optimized activations of this function.
8594 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
8595 JavaScriptFrame* frame = it.frame();
8596 if (frame->is_optimized() && frame->function() == *function) return false;
8597 }
8598
8599 return true;
8600 }
8601
8602
8584 RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) { 8603 RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
8585 HandleScope scope(isolate); 8604 HandleScope scope(isolate);
8586 ASSERT(args.length() == 1); 8605 ASSERT(args.length() == 2);
8587 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); 8606 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
8607 CONVERT_NUMBER_CHECKED(uint32_t, pc_offset, Uint32, args[1]);
8608 Handle<Code> unoptimized(function->shared()->code(), isolate);
8609
8610 #ifdef DEBUG
8611 JavaScriptFrameIterator it(isolate);
8612 JavaScriptFrame* frame = it.frame();
8613 ASSERT_EQ(frame->function(), *function);
8614 ASSERT_EQ(frame->LookupCode(), *unoptimized);
8615 ASSERT(unoptimized->contains(frame->pc()));
8616
8617 ASSERT(pc_offset ==
8618 static_cast<uint32_t>(frame->pc() - unoptimized->instruction_start()));
8619 #endif // DEBUG
8588 8620
8589 // We're not prepared to handle a function with arguments object. 8621 // We're not prepared to handle a function with arguments object.
8590 ASSERT(!function->shared()->uses_arguments()); 8622 ASSERT(!function->shared()->uses_arguments());
8591 8623
8592 // If the optimization attempt succeeds, return the code object which 8624 Handle<Code> result = Handle<Code>::null();
8593 // the unoptimized code can jump into. 8625 BailoutId ast_id = BailoutId::None();
8594 Handle<Code> code = 8626
8595 (FLAG_concurrent_recompilation && FLAG_concurrent_osr) 8627 if (FLAG_concurrent_recompilation && FLAG_concurrent_osr) {
8596 ? Compiler::CompileForConcurrentOSR(function) 8628 if (isolate->optimizing_compiler_thread()->
8597 : Compiler::CompileForOnStackReplacement(function); 8629 IsQueuedForOSR(function, pc_offset)) {
8598 if (!code.is_null()) { 8630 // Still waiting for the optimizing compiler thread to finish. Carry on.
8599 #if DEBUG 8631 if (FLAG_trace_osr) {
8600 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 8632 PrintF("[COSR - polling recompile tasks for ");
8633 function->PrintName();
8634 PrintF("]\n");
8635 }
8636 return NULL;
8637 }
8638
8639 OptimizingCompiler* compiler = isolate->optimizing_compiler_thread()->
8640 FindReadyOSRCandidate(function, pc_offset);
8641
8642 if (compiler == NULL) {
8643 if (IsSuitableForOnStackReplacement(isolate, function, unoptimized) &&
8644 Compiler::RecompileConcurrent(function, pc_offset)) {
8645 if (function->IsMarkedForLazyRecompilation() ||
8646 function->IsMarkedForConcurrentRecompilation()) {
8647 // Prevent regular recompilation if we queue this for OSR.
8648 // TODO(yangguo): remove this as soon as OSR becomes one-shot.
8649 function->ReplaceCode(function->shared()->code());
8650 }
8651 return NULL;
8652 }
8653 // Fall through to the end in case of failure.
8654 } else {
8655 // TODO(titzer): don't install the OSR code into the function.
8656 ast_id = compiler->info()->osr_ast_id();
8657 result = Compiler::InstallOptimizedCode(compiler);
8658 }
8659 } else if (IsSuitableForOnStackReplacement(isolate, function, unoptimized)) {
8660 ast_id = unoptimized->TranslatePcOffsetToAstId(pc_offset);
8661 ASSERT(!ast_id.IsNone());
8662 if (FLAG_trace_osr) {
8663 PrintF("[OSR - replacing at AST id %d in ", ast_id.ToInt());
8664 function->PrintName();
8665 PrintF("]\n");
8666 }
8667 // Attempt OSR compilation.
8668 result = JSFunction::CompileOsr(function, ast_id, CLEAR_EXCEPTION);
8669 }
8670
8671 // Revert the patched interrupt now, regardless of whether OSR succeeds.
8672 Deoptimizer::RevertInterruptCode(isolate, *unoptimized);
8673
8674 // Check whether we ended up with usable optimized code.
8675 if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
8601 DeoptimizationInputData* data = 8676 DeoptimizationInputData* data =
8602 DeoptimizationInputData::cast(code->deoptimization_data()); 8677 DeoptimizationInputData::cast(result->deoptimization_data());
8603 ASSERT(!BailoutId(data->OsrAstId()->value()).IsNone()); 8678
8604 #endif 8679 if (data->OsrPcOffset()->value() >= 0) {
8605 // TODO(titzer): this is a massive hack to make the deopt counts 8680 ASSERT(BailoutId(data->OsrAstId()->value()) == ast_id);
8606 // match. Fix heuristics for reenabling optimizations! 8681 if (FLAG_trace_osr) {
8607 function->shared()->increment_deopt_count(); 8682 PrintF("[OSR - entry at AST id %d, offset %d in optimized code]\n",
8608 return *code; 8683 ast_id.ToInt(), data->OsrPcOffset()->value());
8609 } else { 8684 }
8610 if (function->IsMarkedForLazyRecompilation() || 8685 // TODO(titzer): this is a massive hack to make the deopt counts
8611 function->IsMarkedForConcurrentRecompilation()) { 8686 // match. Fix heuristics for reenabling optimizations!
8612 function->ReplaceCode(function->shared()->code()); 8687 function->shared()->increment_deopt_count();
8688 return *result;
8613 } 8689 }
8614 return NULL;
8615 } 8690 }
8691
8692 if (FLAG_trace_osr) {
8693 PrintF("[OSR - optimization failed for ");
8694 function->PrintName();
8695 PrintF("]\n");
8696 }
8697
8698 if (function->IsMarkedForLazyRecompilation() ||
8699 function->IsMarkedForConcurrentRecompilation()) {
8700 function->ReplaceCode(function->shared()->code());
8701 }
8702 return NULL;
8616 } 8703 }
8617 8704
8618 8705
8619 RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAllocationTimeout) { 8706 RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAllocationTimeout) {
8620 SealHandleScope shs(isolate); 8707 SealHandleScope shs(isolate);
8621 ASSERT(args.length() == 2); 8708 ASSERT(args.length() == 2);
8622 #ifdef DEBUG 8709 #ifdef DEBUG
8623 CONVERT_SMI_ARG_CHECKED(interval, 0); 8710 CONVERT_SMI_ARG_CHECKED(interval, 0);
8624 CONVERT_SMI_ARG_CHECKED(timeout, 1); 8711 CONVERT_SMI_ARG_CHECKED(timeout, 1);
8625 isolate->heap()->set_allocation_timeout(timeout); 8712 isolate->heap()->set_allocation_timeout(timeout);
(...skipping 6050 matching lines...) Expand 10 before | Expand all | Expand 10 after
14676 // Handle last resort GC and make sure to allow future allocations 14763 // Handle last resort GC and make sure to allow future allocations
14677 // to grow the heap without causing GCs (if possible). 14764 // to grow the heap without causing GCs (if possible).
14678 isolate->counters()->gc_last_resort_from_js()->Increment(); 14765 isolate->counters()->gc_last_resort_from_js()->Increment();
14679 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, 14766 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags,
14680 "Runtime::PerformGC"); 14767 "Runtime::PerformGC");
14681 } 14768 }
14682 } 14769 }
14683 14770
14684 14771
14685 } } // namespace v8::internal 14772 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/runtime.h ('k') | src/runtime-profiler.cc » ('j') | src/runtime-profiler.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698