OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 8170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8181 RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) { | 8181 RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) { |
8182 HandleScope scope(isolate); | 8182 HandleScope scope(isolate); |
8183 ASSERT(args.length() == 1); | 8183 ASSERT(args.length() == 1); |
8184 Handle<JSFunction> function = args.at<JSFunction>(0); | 8184 Handle<JSFunction> function = args.at<JSFunction>(0); |
8185 | 8185 |
8186 if (!AllowOptimization(isolate, function)) { | 8186 if (!AllowOptimization(isolate, function)) { |
8187 function->ReplaceCode(function->shared()->code()); | 8187 function->ReplaceCode(function->shared()->code()); |
8188 return function->code(); | 8188 return function->code(); |
8189 } | 8189 } |
8190 function->shared()->code()->set_profiler_ticks(0); | 8190 function->shared()->code()->set_profiler_ticks(0); |
8191 if (JSFunction::CompileOptimized(function, | 8191 if (JSFunction::CompileOptimized(function, CLEAR_EXCEPTION)) { |
8192 BailoutId::None(), | |
8193 CLEAR_EXCEPTION)) { | |
8194 return function->code(); | 8192 return function->code(); |
8195 } | 8193 } |
8196 if (FLAG_trace_opt) { | 8194 if (FLAG_trace_opt) { |
8197 PrintF("[failed to optimize "); | 8195 PrintF("[failed to optimize "); |
8198 function->PrintName(); | 8196 function->PrintName(); |
8199 PrintF(": optimized compilation failed]\n"); | 8197 PrintF(": optimized compilation failed]\n"); |
8200 } | 8198 } |
8201 function->ReplaceCode(function->shared()->code()); | 8199 function->ReplaceCode(function->shared()->code()); |
8202 return function->code(); | 8200 return function->code(); |
8203 } | 8201 } |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8276 | 8274 |
8277 // Make sure to materialize objects before causing any allocation. | 8275 // Make sure to materialize objects before causing any allocation. |
8278 JavaScriptFrameIterator it(isolate); | 8276 JavaScriptFrameIterator it(isolate); |
8279 deoptimizer->MaterializeHeapObjects(&it); | 8277 deoptimizer->MaterializeHeapObjects(&it); |
8280 delete deoptimizer; | 8278 delete deoptimizer; |
8281 | 8279 |
8282 JavaScriptFrame* frame = it.frame(); | 8280 JavaScriptFrame* frame = it.frame(); |
8283 RUNTIME_ASSERT(frame->function()->IsJSFunction()); | 8281 RUNTIME_ASSERT(frame->function()->IsJSFunction()); |
8284 Handle<JSFunction> function(frame->function(), isolate); | 8282 Handle<JSFunction> function(frame->function(), isolate); |
8285 Handle<Code> optimized_code(function->code()); | 8283 Handle<Code> optimized_code(function->code()); |
8286 RUNTIME_ASSERT((type != Deoptimizer::EAGER && | |
8287 type != Deoptimizer::SOFT) || function->IsOptimized()); | |
8288 | 8284 |
8289 // Avoid doing too much work when running with --always-opt and keep | 8285 // Avoid doing too much work when running with --always-opt and keep |
8290 // the optimized code around. | 8286 // the optimized code around. |
8291 if (FLAG_always_opt || type == Deoptimizer::LAZY) { | 8287 if (FLAG_always_opt || type == Deoptimizer::LAZY) { |
8292 return isolate->heap()->undefined_value(); | 8288 return isolate->heap()->undefined_value(); |
8293 } | 8289 } |
8294 | 8290 |
8295 // Find other optimized activations of the function or functions that | 8291 // Find other optimized activations of the function or functions that |
8296 // share the same optimized code. | 8292 // share the same optimized code. |
8297 bool has_other_activations = false; | 8293 bool has_other_activations = false; |
Michael Starzinger
2013/07/31 14:55:50
We should only look for activations of optimized c
Michael Starzinger
2013/07/31 16:29:33
As discussed offline: Yes, your are right, if func
| |
8298 while (!it.done()) { | 8294 while (!it.done()) { |
8299 JavaScriptFrame* frame = it.frame(); | 8295 JavaScriptFrame* frame = it.frame(); |
8300 JSFunction* other_function = frame->function(); | 8296 JSFunction* other_function = frame->function(); |
8301 if (frame->is_optimized() && other_function->code() == function->code()) { | 8297 if (frame->is_optimized() && other_function->code() == function->code()) { |
8302 has_other_activations = true; | 8298 has_other_activations = true; |
8303 break; | 8299 break; |
8304 } | 8300 } |
8305 it.Advance(); | 8301 it.Advance(); |
8306 } | 8302 } |
8307 | 8303 |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8481 // Check for optimized activations of this function. | 8477 // Check for optimized activations of this function. |
8482 JavaScriptFrameIterator it(isolate); | 8478 JavaScriptFrameIterator it(isolate); |
8483 while (succeeded && !it.done()) { | 8479 while (succeeded && !it.done()) { |
8484 JavaScriptFrame* frame = it.frame(); | 8480 JavaScriptFrame* frame = it.frame(); |
8485 succeeded = !frame->is_optimized() || frame->function() != *function; | 8481 succeeded = !frame->is_optimized() || frame->function() != *function; |
8486 it.Advance(); | 8482 it.Advance(); |
8487 } | 8483 } |
8488 } | 8484 } |
8489 | 8485 |
8490 BailoutId ast_id = BailoutId::None(); | 8486 BailoutId ast_id = BailoutId::None(); |
8487 Handle<Code> osr_code = Handle<Code>::null(); | |
8491 if (succeeded) { | 8488 if (succeeded) { |
8492 // The top JS function is this one, the PC is somewhere in the | 8489 // The top JS function is this one, the PC is somewhere in the |
8493 // unoptimized code. | 8490 // unoptimized code. |
8494 JavaScriptFrameIterator it(isolate); | 8491 JavaScriptFrameIterator it(isolate); |
8495 JavaScriptFrame* frame = it.frame(); | 8492 JavaScriptFrame* frame = it.frame(); |
8496 ASSERT(frame->function() == *function); | 8493 ASSERT(frame->function() == *function); |
8497 ASSERT(frame->LookupCode() == *unoptimized); | 8494 ASSERT(frame->LookupCode() == *unoptimized); |
8498 ASSERT(unoptimized->contains(frame->pc())); | 8495 ASSERT(unoptimized->contains(frame->pc())); |
8499 | 8496 |
8500 // Use linear search of the unoptimized code's back edge table to find | 8497 // Use linear search of the unoptimized code's back edge table to find |
(...skipping 15 matching lines...) Expand all Loading... | |
8516 table_cursor += FullCodeGenerator::kBackEdgeEntrySize; | 8513 table_cursor += FullCodeGenerator::kBackEdgeEntrySize; |
8517 } | 8514 } |
8518 ASSERT(!ast_id.IsNone()); | 8515 ASSERT(!ast_id.IsNone()); |
8519 if (FLAG_trace_osr) { | 8516 if (FLAG_trace_osr) { |
8520 PrintF("[replacing on-stack at AST id %d, loop depth %d in ", | 8517 PrintF("[replacing on-stack at AST id %d, loop depth %d in ", |
8521 ast_id.ToInt(), loop_depth); | 8518 ast_id.ToInt(), loop_depth); |
8522 function->PrintName(); | 8519 function->PrintName(); |
8523 PrintF("]\n"); | 8520 PrintF("]\n"); |
8524 } | 8521 } |
8525 | 8522 |
8526 // Try to compile the optimized code. A true return value from | 8523 // Try to compile the function for OSR. A non-null return value indicates |
8527 // CompileOptimized means that compilation succeeded, not necessarily | 8524 // the compilation succeeded for the given AST id. |
8528 // that optimization succeeded. | 8525 osr_code = JSFunction::CompileOsr(function, ast_id, CLEAR_EXCEPTION); |
8529 if (JSFunction::CompileOptimized(function, ast_id, CLEAR_EXCEPTION) && | 8526 |
8530 function->IsOptimized()) { | 8527 if (!osr_code.is_null() && |
8528 osr_code->kind() == Code::OPTIMIZED_FUNCTION) { | |
8531 DeoptimizationInputData* data = DeoptimizationInputData::cast( | 8529 DeoptimizationInputData* data = DeoptimizationInputData::cast( |
8532 function->code()->deoptimization_data()); | 8530 osr_code->deoptimization_data()); |
8533 if (data->OsrPcOffset()->value() >= 0) { | 8531 if (data->OsrPcOffset()->value() >= 0 |
8532 && BailoutId(data->OsrAstId()->value()) == ast_id) { | |
8534 if (FLAG_trace_osr) { | 8533 if (FLAG_trace_osr) { |
8535 PrintF("[on-stack replacement offset %d in optimized code]\n", | 8534 PrintF("[on-stack replacement offset %d in optimized code]\n", |
8536 data->OsrPcOffset()->value()); | 8535 data->OsrPcOffset()->value()); |
8537 } | 8536 } |
8538 ASSERT(BailoutId(data->OsrAstId()->value()) == ast_id); | |
8539 } else { | 8537 } else { |
8540 // We may never generate the desired OSR entry if we emit an | 8538 // The code we got back did not match our OSR compile request. |
8541 // early deoptimize. | 8539 osr_code = Handle<Code>::null(); |
8542 succeeded = false; | |
8543 } | 8540 } |
8544 } else { | 8541 } else { |
8545 succeeded = false; | 8542 osr_code = Handle<Code>::null(); |
8546 } | 8543 } |
8547 } | 8544 } |
8548 | 8545 |
8549 // Revert to the original interrupt calls in the original unoptimized code. | 8546 // Revert to the original interrupt calls in the original unoptimized code. |
8550 if (FLAG_trace_osr) { | 8547 if (FLAG_trace_osr) { |
8551 PrintF("[restoring original interrupt calls in "); | 8548 PrintF("[restoring original interrupt calls in "); |
8552 function->PrintName(); | 8549 function->PrintName(); |
8553 PrintF("]\n"); | 8550 PrintF("]\n"); |
8554 } | 8551 } |
8555 InterruptStub interrupt_stub; | 8552 InterruptStub interrupt_stub; |
8556 Handle<Code> interrupt_code = interrupt_stub.GetCode(isolate); | 8553 Handle<Code> interrupt_code = interrupt_stub.GetCode(isolate); |
8557 Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement(); | 8554 Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement(); |
8558 Deoptimizer::RevertInterruptCode(*unoptimized, | 8555 Deoptimizer::RevertInterruptCode(*unoptimized, |
8559 *interrupt_code, | 8556 *interrupt_code, |
8560 *replacement_code); | 8557 *replacement_code); |
8561 | 8558 |
8562 // If the optimization attempt succeeded, return the AST id tagged as a | 8559 // Return the code object to the calling builtin. If non-null, the builtin |
8563 // smi. This tells the builtin that we need to translate the unoptimized | 8560 // will jump directly to its OSR entrypoint. |
8564 // frame to an optimized one. | 8561 return osr_code.is_null() ? NULL: *osr_code; |
Michael Starzinger
2013/07/31 14:55:50
nit: Missing white-space in front of the colon.
| |
8565 if (succeeded) { | |
8566 ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION); | |
8567 return Smi::FromInt(ast_id.ToInt()); | |
8568 } else { | |
8569 if (function->IsMarkedForLazyRecompilation()) { | |
8570 function->ReplaceCode(function->shared()->code()); | |
8571 } | |
8572 return Smi::FromInt(-1); | |
8573 } | |
8574 } | 8562 } |
8575 | 8563 |
8576 | 8564 |
8577 RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) { | 8565 RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) { |
8578 SealHandleScope shs(isolate); | 8566 SealHandleScope shs(isolate); |
8579 RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); | 8567 RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); |
8580 return isolate->heap()->undefined_value(); | 8568 return isolate->heap()->undefined_value(); |
8581 } | 8569 } |
8582 | 8570 |
8583 | 8571 |
(...skipping 5323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
13907 // Handle last resort GC and make sure to allow future allocations | 13895 // Handle last resort GC and make sure to allow future allocations |
13908 // to grow the heap without causing GCs (if possible). | 13896 // to grow the heap without causing GCs (if possible). |
13909 isolate->counters()->gc_last_resort_from_js()->Increment(); | 13897 isolate->counters()->gc_last_resort_from_js()->Increment(); |
13910 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, | 13898 isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, |
13911 "Runtime::PerformGC"); | 13899 "Runtime::PerformGC"); |
13912 } | 13900 } |
13913 } | 13901 } |
13914 | 13902 |
13915 | 13903 |
13916 } } // namespace v8::internal | 13904 } } // namespace v8::internal |
OLD | NEW |