Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 945 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 956 return InstallFullCode(info); | 956 return InstallFullCode(info); |
| 957 } | 957 } |
| 958 } | 958 } |
| 959 } | 959 } |
| 960 | 960 |
| 961 ASSERT(info->code().is_null()); | 961 ASSERT(info->code().is_null()); |
| 962 return false; | 962 return false; |
| 963 } | 963 } |
| 964 | 964 |
| 965 | 965 |
| 966 void Compiler::RecompileConcurrent(Handle<JSFunction> closure) { | 966 void Compiler::RecompileConcurrent(Handle<JSFunction> closure, |
| 967 ASSERT(closure->IsMarkedForConcurrentRecompilation()); | 967 uint32_t osr_pc_offset) { |
| 968 ASSERT(FLAG_concurrent_recompilation); | |
|
titzer
2013/09/02 17:03:35
Don't assert flags; they should only be used to tu
| |
| 969 ASSERT(osr_pc_offset != 0 || closure->IsMarkedForConcurrentRecompilation()); | |
| 968 | 970 |
| 969 Isolate* isolate = closure->GetIsolate(); | 971 Isolate* isolate = closure->GetIsolate(); |
| 970 // Here we prepare compile data for the concurrent recompilation thread, but | 972 // Here we prepare compile data for the concurrent recompilation thread, but |
| 971 // this still happens synchronously and interrupts execution. | 973 // this still happens synchronously and interrupts execution. |
| 972 Logger::TimerEventScope timer( | 974 Logger::TimerEventScope timer( |
| 973 isolate, Logger::TimerEventScope::v8_recompile_synchronous); | 975 isolate, Logger::TimerEventScope::v8_recompile_synchronous); |
| 974 | 976 |
| 975 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) { | 977 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) { |
| 976 if (FLAG_trace_concurrent_recompilation) { | 978 if (FLAG_trace_concurrent_recompilation) { |
| 977 PrintF(" ** Compilation queue full, will retry optimizing "); | 979 PrintF(" ** Compilation queue full, will retry optimizing "); |
| 978 closure->PrintName(); | 980 closure->PrintName(); |
| 979 PrintF(" on next run.\n"); | 981 PrintF(" on next run.\n"); |
| 980 } | 982 } |
| 981 return; | 983 return; |
| 982 } | 984 } |
| 983 | 985 |
| 984 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure)); | 986 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure)); |
| 987 Handle<SharedFunctionInfo> shared = info->shared_info(); | |
| 988 | |
| 989 if (osr_pc_offset != 0) { | |
| 990 ASSERT(FLAG_speculative_concurrent_osr); | |
| 991 // Translate pc offset into AST id. | |
| 992 DisallowHeapAllocation no_gc; | |
| 993 FullCodeGenerator::BackEdgeTableIterator back_edges(shared->code(), &no_gc); | |
|
titzer
2013/09/02 17:03:35
It seems weird to make a BackEdgeTableIterator and
Yang
2013/09/03 08:50:10
Done.
| |
| 994 if (!back_edges.FindPcOffset(osr_pc_offset)) UNREACHABLE(); | |
| 995 info->SetOptimizing(back_edges.ast_id()); | |
| 996 | |
| 997 if (FLAG_trace_osr) { | |
| 998 PrintF("[COSR - attempt to queue "); | |
| 999 closure->PrintName(); | |
| 1000 PrintF(" for concurrent compilation at AST id %d, loop depth %d]\n", | |
| 1001 back_edges.ast_id().ToInt(), back_edges.loop_depth()); | |
| 1002 } | |
| 1003 } else { | |
| 1004 info->SetOptimizing(BailoutId::None()); | |
| 1005 } | |
| 1006 | |
| 985 VMState<COMPILER> state(isolate); | 1007 VMState<COMPILER> state(isolate); |
| 986 PostponeInterruptsScope postpone(isolate); | 1008 PostponeInterruptsScope postpone(isolate); |
| 987 | 1009 |
| 988 Handle<SharedFunctionInfo> shared = info->shared_info(); | |
| 989 int compiled_size = shared->end_position() - shared->start_position(); | 1010 int compiled_size = shared->end_position() - shared->start_position(); |
| 990 isolate->counters()->total_compile_size()->Increment(compiled_size); | 1011 isolate->counters()->total_compile_size()->Increment(compiled_size); |
| 991 info->SetOptimizing(BailoutId::None()); | |
| 992 | 1012 |
| 993 { | 1013 { |
| 994 CompilationHandleScope handle_scope(*info); | 1014 CompilationHandleScope handle_scope(*info); |
| 995 | 1015 |
| 996 if (InstallCodeFromOptimizedCodeMap(*info)) { | 1016 if (InstallCodeFromOptimizedCodeMap(*info)) { |
| 997 return; | 1017 return; |
| 998 } | 1018 } |
| 999 | 1019 |
| 1000 if (Parser::Parse(*info)) { | 1020 if (Parser::Parse(*info)) { |
| 1001 LanguageMode language_mode = info->function()->language_mode(); | 1021 LanguageMode language_mode = info->function()->language_mode(); |
| 1002 info->SetLanguageMode(language_mode); | 1022 info->SetLanguageMode(language_mode); |
| 1003 shared->set_language_mode(language_mode); | 1023 shared->set_language_mode(language_mode); |
| 1004 info->SaveHandles(); | 1024 info->SaveHandles(); |
| 1005 | 1025 |
| 1006 if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) { | 1026 if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) { |
| 1007 OptimizingCompiler* compiler = | 1027 OptimizingCompiler* compiler = |
| 1008 new(info->zone()) OptimizingCompiler(*info); | 1028 new(info->zone()) OptimizingCompiler(*info, osr_pc_offset); |
| 1009 OptimizingCompiler::Status status = compiler->CreateGraph(); | 1029 OptimizingCompiler::Status status = compiler->CreateGraph(); |
| 1010 if (status == OptimizingCompiler::SUCCEEDED) { | 1030 if (status == OptimizingCompiler::SUCCEEDED) { |
| 1011 info.Detach(); | 1031 info.Detach(); |
| 1012 shared->code()->set_profiler_ticks(0); | 1032 shared->code()->set_profiler_ticks(0); |
| 1013 isolate->optimizing_compiler_thread()->QueueForOptimization(compiler); | 1033 isolate->optimizing_compiler_thread()->QueueForOptimization(compiler); |
| 1014 } else if (status == OptimizingCompiler::BAILED_OUT) { | 1034 } else if (status == OptimizingCompiler::BAILED_OUT) { |
| 1015 isolate->clear_pending_exception(); | 1035 isolate->clear_pending_exception(); |
| 1016 InstallFullCode(*info); | 1036 InstallFullCode(*info); |
| 1017 } | 1037 } |
| 1018 } | 1038 } |
| 1019 } | 1039 } |
| 1020 } | 1040 } |
| 1021 | 1041 |
| 1022 if (shared->code()->back_edges_patched_for_osr()) { | 1042 // If we don't compile for on-stack replacement in the background thread, |
| 1023 // At this point we either put the function on recompilation queue or | 1043 // reset the OSR attempt to avoid recompilation being preempted by OSR. |
| 1024 // aborted optimization. In either case we want to continue executing | 1044 if (!FLAG_speculative_concurrent_osr && |
| 1025 // the unoptimized code without running into OSR. If the unoptimized | 1045 shared->code()->back_edges_patched_for_osr()) { |
| 1026 // code has been patched for OSR, unpatch it. | |
| 1027 Deoptimizer::RevertInterruptCode(isolate, shared->code()); | 1046 Deoptimizer::RevertInterruptCode(isolate, shared->code()); |
| 1028 } | 1047 } |
| 1029 | 1048 |
| 1030 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); | 1049 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); |
| 1031 } | 1050 } |
| 1032 | 1051 |
| 1033 | 1052 |
| 1034 void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) { | 1053 void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) { |
| 1035 SmartPointer<CompilationInfo> info(optimizing_compiler->info()); | 1054 SmartPointer<CompilationInfo> info(optimizing_compiler->info()); |
| 1036 // The function may have already been optimized by OSR. Simply continue. | 1055 // The function may have already been optimized by OSR. Simply continue. |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1087 info->AbortOptimization(); | 1106 info->AbortOptimization(); |
| 1088 InstallFullCode(*info); | 1107 InstallFullCode(*info); |
| 1089 } | 1108 } |
| 1090 // Optimized code is finally replacing unoptimized code. Reset the latter's | 1109 // Optimized code is finally replacing unoptimized code. Reset the latter's |
| 1091 // profiler ticks to prevent too soon re-opt after a deopt. | 1110 // profiler ticks to prevent too soon re-opt after a deopt. |
| 1092 info->shared_info()->code()->set_profiler_ticks(0); | 1111 info->shared_info()->code()->set_profiler_ticks(0); |
| 1093 ASSERT(!info->closure()->IsMarkedForInstallingRecompiledCode()); | 1112 ASSERT(!info->closure()->IsMarkedForInstallingRecompiledCode()); |
| 1094 } | 1113 } |
| 1095 | 1114 |
| 1096 | 1115 |
| 1116 static uint32_t CurrentPcOffset(Isolate* isolate, | |
|
titzer
2013/09/02 17:03:35
Why are you doing a search for the PC of the top f
Yang
2013/09/03 08:50:10
Already thought of that. Let me work on that on a
| |
| 1117 Handle<JSFunction> function, | |
| 1118 Handle<Code> unoptimized) { | |
| 1119 JavaScriptFrameIterator it(isolate); | |
| 1120 JavaScriptFrame* frame = it.frame(); | |
| 1121 ASSERT(frame->function() == *function); | |
| 1122 ASSERT(frame->LookupCode() == *unoptimized); | |
| 1123 ASSERT(unoptimized->contains(frame->pc())); | |
| 1124 | |
| 1125 // Use linear search of the unoptimized code's back edge table to find | |
| 1126 // the AST id matching the PC. | |
| 1127 return static_cast<uint32_t>(frame->pc() - unoptimized->instruction_start()); | |
| 1128 } | |
| 1129 | |
| 1130 | |
| 1131 static bool IsSuitableForOnStackReplacement(Isolate* isolate, | |
| 1132 Handle<JSFunction> function, | |
| 1133 Handle<Code> unoptimized) { | |
| 1134 // Keep track of whether we've succeeded in optimizing. | |
| 1135 if (!unoptimized->optimizable()) return false; | |
| 1136 // If we are trying to do OSR when there are already optimized | |
| 1137 // activations of the function, it means (a) the function is directly or | |
| 1138 // indirectly recursive and (b) an optimized invocation has been | |
| 1139 // deoptimized so that we are currently in an unoptimized activation. | |
| 1140 // Check for optimized activations of this function. | |
| 1141 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { | |
| 1142 JavaScriptFrame* frame = it.frame(); | |
| 1143 if (frame->is_optimized() && frame->function() == *function) return false; | |
| 1144 } | |
| 1145 | |
| 1146 return true; | |
| 1147 } | |
| 1148 | |
| 1149 | |
| 1150 bool Compiler::CompileForOnStackReplacement(Handle<JSFunction> function, | |
| 1151 BailoutId* ast_id) { | |
| 1152 Isolate* isolate = function->GetIsolate(); | |
| 1153 // We have hit a back edge in an unoptimized frame for a function that was | |
| 1154 // selected for on-stack replacement. Find the unoptimized code object. | |
| 1155 Handle<Code> unoptimized(function->shared()->code(), isolate); | |
| 1156 | |
| 1157 Deoptimizer::RevertInterruptCode(isolate, *unoptimized); | |
| 1158 if (FLAG_trace_osr) { | |
| 1159 PrintF("[OSR - restored original interrupt calls in "); | |
| 1160 function->PrintName(); | |
| 1161 PrintF("]\n"); | |
| 1162 } | |
| 1163 | |
| 1164 if (!IsSuitableForOnStackReplacement(isolate, function, unoptimized)) { | |
| 1165 return false; | |
| 1166 } | |
| 1167 | |
| 1168 uint32_t pc_offset = CurrentPcOffset(isolate, function, unoptimized); | |
| 1169 | |
| 1170 { DisallowHeapAllocation no_gc; | |
| 1171 FullCodeGenerator::BackEdgeTableIterator back_edges(*unoptimized, &no_gc); | |
| 1172 if (!back_edges.FindPcOffset(pc_offset)) UNREACHABLE(); | |
| 1173 *ast_id = back_edges.ast_id(); | |
| 1174 | |
| 1175 if (FLAG_trace_osr) { | |
| 1176 PrintF("[OSR - replacing at AST id %d, loop depth %d in ", | |
| 1177 ast_id->ToInt(), back_edges.loop_depth()); | |
| 1178 function->PrintName(); | |
| 1179 PrintF("]\n"); | |
| 1180 } | |
| 1181 } | |
| 1182 | |
| 1183 // Try to compile the optimized code. A true return value from | |
| 1184 // CompileOptimized means that compilation succeeded, not necessarily | |
| 1185 // that optimization succeeded. | |
| 1186 if (JSFunction::CompileOptimized(function, *ast_id, CLEAR_EXCEPTION) && | |
| 1187 function->IsOptimized()) { | |
| 1188 DeoptimizationInputData* data = DeoptimizationInputData::cast( | |
| 1189 function->code()->deoptimization_data()); | |
| 1190 if (data->OsrPcOffset()->value() >= 0) { | |
| 1191 if (FLAG_trace_osr) { | |
| 1192 PrintF("[OSR - entry, offset %d in optimized code]\n", | |
| 1193 data->OsrPcOffset()->value()); | |
| 1194 } | |
| 1195 ASSERT(BailoutId(data->OsrAstId()->value()) == *ast_id); | |
| 1196 return true; | |
| 1197 } | |
| 1198 } else { | |
| 1199 if (FLAG_trace_osr) { | |
| 1200 PrintF("[OSR - optimization failed for "); | |
| 1201 function->PrintName(); | |
| 1202 PrintF("]\n"); | |
| 1203 } | |
| 1204 } | |
| 1205 return false; | |
| 1206 } | |
| 1207 | |
| 1208 | |
| 1209 bool Compiler::CompileForConcurrentOSR(Handle<JSFunction> function, | |
| 1210 BailoutId* ast_id) { | |
| 1211 ASSERT(FLAG_concurrent_recompilation && FLAG_speculative_concurrent_osr); | |
| 1212 | |
| 1213 Isolate* isolate = function->GetIsolate(); | |
| 1214 Handle<Code> unoptimized(function->shared()->code(), isolate); | |
| 1215 | |
| 1216 uint32_t pc_offset = CurrentPcOffset(isolate, function, unoptimized); | |
| 1217 | |
| 1218 if (isolate->optimizing_compiler_thread()-> | |
| 1219 IsQueuedForOSR(function, pc_offset)) { | |
| 1220 // Still waiting for the optimizing compiler thread to finish. Carry on. | |
| 1221 if (FLAG_trace_osr) { | |
| 1222 PrintF("[COSR - polling recompile tasks for "); | |
| 1223 function->PrintName(); | |
| 1224 PrintF("]\n"); | |
| 1225 } | |
| 1226 return false; | |
| 1227 } | |
| 1228 | |
| 1229 OptimizingCompiler* compiler = isolate->optimizing_compiler_thread()-> | |
| 1230 FindReadyOSRCandidate(function, pc_offset); | |
| 1231 | |
| 1232 if (compiler != NULL) { | |
| 1233 if (FLAG_trace_osr) { | |
| 1234 PrintF("[COSR - optimization complete for "); | |
| 1235 function->PrintName(); | |
| 1236 PrintF(", restoring interrupt calls]\n"); | |
| 1237 } | |
| 1238 Deoptimizer::RevertInterruptCode(isolate, *unoptimized); | |
| 1239 | |
| 1240 *ast_id = compiler->info()->osr_ast_id(); | |
| 1241 | |
| 1242 InstallOptimizedCode(compiler); | |
| 1243 isolate->optimizing_compiler_thread()->RemoveStaleOSRCandidates(); | |
| 1244 | |
| 1245 if (!function->IsOptimized()) { | |
| 1246 if (FLAG_trace_osr) { | |
| 1247 PrintF("[COSR - optimization failed for "); | |
| 1248 function->PrintName(); | |
| 1249 PrintF("]\n"); | |
| 1250 } | |
| 1251 return false; | |
| 1252 } | |
| 1253 | |
| 1254 DeoptimizationInputData* data = DeoptimizationInputData::cast( | |
| 1255 function->code()->deoptimization_data()); | |
| 1256 | |
| 1257 if (data->OsrPcOffset()->value() >= 0) { | |
| 1258 ASSERT(BailoutId(data->OsrAstId()->value()) == *ast_id); | |
| 1259 if (FLAG_trace_osr) { | |
| 1260 PrintF("[COSR - entry at AST id %d, offset %d in optimized code]\n", | |
| 1261 ast_id->ToInt(), data->OsrPcOffset()->value()); | |
| 1262 } | |
| 1263 return true; | |
| 1264 } | |
| 1265 return false; | |
| 1266 } | |
| 1267 | |
| 1268 if (!IsSuitableForOnStackReplacement(isolate, function, unoptimized)) { | |
| 1269 if (FLAG_trace_osr) { | |
| 1270 PrintF("[COSR - "); | |
| 1271 function->PrintName(); | |
| 1272 PrintF(" is unsuitable, restoring interrupt calls]\n"); | |
| 1273 } | |
| 1274 Deoptimizer::RevertInterruptCode(isolate, *unoptimized); | |
| 1275 return false; | |
| 1276 } | |
| 1277 | |
| 1278 RecompileConcurrent(function, pc_offset); | |
| 1279 return false; | |
| 1280 } | |
| 1281 | |
| 1282 | |
| 1097 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal, | 1283 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal, |
| 1098 Handle<Script> script) { | 1284 Handle<Script> script) { |
| 1099 // Precondition: code has been parsed and scopes have been analyzed. | 1285 // Precondition: code has been parsed and scopes have been analyzed. |
| 1100 CompilationInfoWithZone info(script); | 1286 CompilationInfoWithZone info(script); |
| 1101 info.SetFunction(literal); | 1287 info.SetFunction(literal); |
| 1102 info.SetScope(literal->scope()); | 1288 info.SetScope(literal->scope()); |
| 1103 info.SetLanguageMode(literal->scope()->language_mode()); | 1289 info.SetLanguageMode(literal->scope()->language_mode()); |
| 1104 | 1290 |
| 1105 Isolate* isolate = info.isolate(); | 1291 Isolate* isolate = info.isolate(); |
| 1106 Factory* factory = isolate->factory(); | 1292 Factory* factory = isolate->factory(); |
| (...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1256 AllowHandleDereference allow_deref; | 1442 AllowHandleDereference allow_deref; |
| 1257 bool tracing_on = info()->IsStub() | 1443 bool tracing_on = info()->IsStub() |
| 1258 ? FLAG_trace_hydrogen_stubs | 1444 ? FLAG_trace_hydrogen_stubs |
| 1259 : (FLAG_trace_hydrogen && | 1445 : (FLAG_trace_hydrogen && |
| 1260 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter)); | 1446 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter)); |
| 1261 return (tracing_on && | 1447 return (tracing_on && |
| 1262 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL); | 1448 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL); |
| 1263 } | 1449 } |
| 1264 | 1450 |
| 1265 } } // namespace v8::internal | 1451 } } // namespace v8::internal |
| OLD | NEW |