OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "src/v8.h" |
| 6 |
| 7 #include "src/arguments.h" |
| 8 #include "src/compiler.h" |
| 9 #include "src/deoptimizer.h" |
| 10 #include "src/frames.h" |
| 11 #include "src/full-codegen.h" |
| 12 #include "src/isolate.h" |
| 13 #include "src/isolate-inl.h" |
| 14 #include "src/runtime/runtime.h" |
| 15 #include "src/runtime/runtime-utils.h" |
| 16 #include "src/v8threads.h" |
| 17 #include "src/vm-state.h" |
| 18 #include "src/vm-state-inl.h" |
| 19 |
| 20 namespace v8 { |
| 21 namespace internal { |
| 22 |
| 23 RUNTIME_FUNCTION(Runtime_CompileLazy) { |
| 24 HandleScope scope(isolate); |
| 25 DCHECK(args.length() == 1); |
| 26 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); |
| 27 #ifdef DEBUG |
| 28 if (FLAG_trace_lazy && !function->shared()->is_compiled()) { |
| 29 PrintF("[unoptimized: "); |
| 30 function->PrintName(); |
| 31 PrintF("]\n"); |
| 32 } |
| 33 #endif |
| 34 |
| 35 // Compile the target function. |
| 36 DCHECK(function->shared()->allows_lazy_compilation()); |
| 37 |
| 38 Handle<Code> code; |
| 39 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code, |
| 40 Compiler::GetLazyCode(function)); |
| 41 DCHECK(code->kind() == Code::FUNCTION || |
| 42 code->kind() == Code::OPTIMIZED_FUNCTION); |
| 43 function->ReplaceCode(*code); |
| 44 return *code; |
| 45 } |
| 46 |
| 47 |
| 48 RUNTIME_FUNCTION(Runtime_CompileOptimized) { |
| 49 HandleScope scope(isolate); |
| 50 DCHECK(args.length() == 2); |
| 51 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); |
| 52 CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1); |
| 53 |
| 54 Handle<Code> unoptimized(function->shared()->code()); |
| 55 if (!isolate->use_crankshaft() || |
| 56 function->shared()->optimization_disabled() || |
| 57 isolate->DebuggerHasBreakPoints()) { |
| 58 // If the function is not optimizable or debugger is active continue |
| 59 // using the code from the full compiler. |
| 60 if (FLAG_trace_opt) { |
| 61 PrintF("[failed to optimize "); |
| 62 function->PrintName(); |
| 63 PrintF(": is code optimizable: %s, is debugger enabled: %s]\n", |
| 64 function->shared()->optimization_disabled() ? "F" : "T", |
| 65 isolate->DebuggerHasBreakPoints() ? "T" : "F"); |
| 66 } |
| 67 function->ReplaceCode(*unoptimized); |
| 68 return function->code(); |
| 69 } |
| 70 |
| 71 Compiler::ConcurrencyMode mode = |
| 72 concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT; |
| 73 Handle<Code> code; |
| 74 if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) { |
| 75 function->ReplaceCode(*code); |
| 76 } else { |
| 77 function->ReplaceCode(function->shared()->code()); |
| 78 } |
| 79 |
| 80 DCHECK(function->code()->kind() == Code::FUNCTION || |
| 81 function->code()->kind() == Code::OPTIMIZED_FUNCTION || |
| 82 function->IsInOptimizationQueue()); |
| 83 return function->code(); |
| 84 } |
| 85 |
| 86 |
| 87 RUNTIME_FUNCTION(Runtime_NotifyStubFailure) { |
| 88 HandleScope scope(isolate); |
| 89 DCHECK(args.length() == 0); |
| 90 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate); |
| 91 DCHECK(AllowHeapAllocation::IsAllowed()); |
| 92 delete deoptimizer; |
| 93 return isolate->heap()->undefined_value(); |
| 94 } |
| 95 |
| 96 |
| 97 class ActivationsFinder : public ThreadVisitor { |
| 98 public: |
| 99 Code* code_; |
| 100 bool has_code_activations_; |
| 101 |
| 102 explicit ActivationsFinder(Code* code) |
| 103 : code_(code), has_code_activations_(false) {} |
| 104 |
| 105 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { |
| 106 JavaScriptFrameIterator it(isolate, top); |
| 107 VisitFrames(&it); |
| 108 } |
| 109 |
| 110 void VisitFrames(JavaScriptFrameIterator* it) { |
| 111 for (; !it->done(); it->Advance()) { |
| 112 JavaScriptFrame* frame = it->frame(); |
| 113 if (code_->contains(frame->pc())) has_code_activations_ = true; |
| 114 } |
| 115 } |
| 116 }; |
| 117 |
| 118 |
| 119 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) { |
| 120 HandleScope scope(isolate); |
| 121 DCHECK(args.length() == 1); |
| 122 CONVERT_SMI_ARG_CHECKED(type_arg, 0); |
| 123 Deoptimizer::BailoutType type = |
| 124 static_cast<Deoptimizer::BailoutType>(type_arg); |
| 125 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate); |
| 126 DCHECK(AllowHeapAllocation::IsAllowed()); |
| 127 |
| 128 Handle<JSFunction> function = deoptimizer->function(); |
| 129 Handle<Code> optimized_code = deoptimizer->compiled_code(); |
| 130 |
| 131 DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION); |
| 132 DCHECK(type == deoptimizer->bailout_type()); |
| 133 |
| 134 // Make sure to materialize objects before causing any allocation. |
| 135 JavaScriptFrameIterator it(isolate); |
| 136 deoptimizer->MaterializeHeapObjects(&it); |
| 137 delete deoptimizer; |
| 138 |
| 139 JavaScriptFrame* frame = it.frame(); |
| 140 RUNTIME_ASSERT(frame->function()->IsJSFunction()); |
| 141 DCHECK(frame->function() == *function); |
| 142 |
| 143 // Avoid doing too much work when running with --always-opt and keep |
| 144 // the optimized code around. |
| 145 if (FLAG_always_opt || type == Deoptimizer::LAZY) { |
| 146 return isolate->heap()->undefined_value(); |
| 147 } |
| 148 |
| 149 // Search for other activations of the same function and code. |
| 150 ActivationsFinder activations_finder(*optimized_code); |
| 151 activations_finder.VisitFrames(&it); |
| 152 isolate->thread_manager()->IterateArchivedThreads(&activations_finder); |
| 153 |
| 154 if (!activations_finder.has_code_activations_) { |
| 155 if (function->code() == *optimized_code) { |
| 156 if (FLAG_trace_deopt) { |
| 157 PrintF("[removing optimized code for: "); |
| 158 function->PrintName(); |
| 159 PrintF("]\n"); |
| 160 } |
| 161 function->ReplaceCode(function->shared()->code()); |
| 162 // Evict optimized code for this function from the cache so that it |
| 163 // doesn't get used for new closures. |
| 164 function->shared()->EvictFromOptimizedCodeMap(*optimized_code, |
| 165 "notify deoptimized"); |
| 166 } |
| 167 } else { |
| 168 // TODO(titzer): we should probably do DeoptimizeCodeList(code) |
| 169 // unconditionally if the code is not already marked for deoptimization. |
| 170 // If there is an index by shared function info, all the better. |
| 171 Deoptimizer::DeoptimizeFunction(*function); |
| 172 } |
| 173 |
| 174 return isolate->heap()->undefined_value(); |
| 175 } |
| 176 |
| 177 |
| 178 static bool IsSuitableForOnStackReplacement(Isolate* isolate, |
| 179 Handle<JSFunction> function, |
| 180 Handle<Code> current_code) { |
| 181 // Keep track of whether we've succeeded in optimizing. |
| 182 if (!isolate->use_crankshaft() || !current_code->optimizable()) return false; |
| 183 // If we are trying to do OSR when there are already optimized |
| 184 // activations of the function, it means (a) the function is directly or |
| 185 // indirectly recursive and (b) an optimized invocation has been |
| 186 // deoptimized so that we are currently in an unoptimized activation. |
| 187 // Check for optimized activations of this function. |
| 188 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { |
| 189 JavaScriptFrame* frame = it.frame(); |
| 190 if (frame->is_optimized() && frame->function() == *function) return false; |
| 191 } |
| 192 |
| 193 return true; |
| 194 } |
| 195 |
| 196 |
| 197 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) { |
| 198 HandleScope scope(isolate); |
| 199 DCHECK(args.length() == 1); |
| 200 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); |
| 201 Handle<Code> caller_code(function->shared()->code()); |
| 202 |
| 203 // We're not prepared to handle a function with arguments object. |
| 204 DCHECK(!function->shared()->uses_arguments()); |
| 205 |
| 206 RUNTIME_ASSERT(FLAG_use_osr); |
| 207 |
| 208 // Passing the PC in the javascript frame from the caller directly is |
| 209 // not GC safe, so we walk the stack to get it. |
| 210 JavaScriptFrameIterator it(isolate); |
| 211 JavaScriptFrame* frame = it.frame(); |
| 212 if (!caller_code->contains(frame->pc())) { |
| 213 // Code on the stack may not be the code object referenced by the shared |
| 214 // function info. It may have been replaced to include deoptimization data. |
| 215 caller_code = Handle<Code>(frame->LookupCode()); |
| 216 } |
| 217 |
| 218 uint32_t pc_offset = |
| 219 static_cast<uint32_t>(frame->pc() - caller_code->instruction_start()); |
| 220 |
| 221 #ifdef DEBUG |
| 222 DCHECK_EQ(frame->function(), *function); |
| 223 DCHECK_EQ(frame->LookupCode(), *caller_code); |
| 224 DCHECK(caller_code->contains(frame->pc())); |
| 225 #endif // DEBUG |
| 226 |
| 227 |
| 228 BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset); |
| 229 DCHECK(!ast_id.IsNone()); |
| 230 |
| 231 Compiler::ConcurrencyMode mode = |
| 232 isolate->concurrent_osr_enabled() && |
| 233 (function->shared()->ast_node_count() > 512) |
| 234 ? Compiler::CONCURRENT |
| 235 : Compiler::NOT_CONCURRENT; |
| 236 Handle<Code> result = Handle<Code>::null(); |
| 237 |
| 238 OptimizedCompileJob* job = NULL; |
| 239 if (mode == Compiler::CONCURRENT) { |
| 240 // Gate the OSR entry with a stack check. |
| 241 BackEdgeTable::AddStackCheck(caller_code, pc_offset); |
| 242 // Poll already queued compilation jobs. |
| 243 OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread(); |
| 244 if (thread->IsQueuedForOSR(function, ast_id)) { |
| 245 if (FLAG_trace_osr) { |
| 246 PrintF("[OSR - Still waiting for queued: "); |
| 247 function->PrintName(); |
| 248 PrintF(" at AST id %d]\n", ast_id.ToInt()); |
| 249 } |
| 250 return NULL; |
| 251 } |
| 252 |
| 253 job = thread->FindReadyOSRCandidate(function, ast_id); |
| 254 } |
| 255 |
| 256 if (job != NULL) { |
| 257 if (FLAG_trace_osr) { |
| 258 PrintF("[OSR - Found ready: "); |
| 259 function->PrintName(); |
| 260 PrintF(" at AST id %d]\n", ast_id.ToInt()); |
| 261 } |
| 262 result = Compiler::GetConcurrentlyOptimizedCode(job); |
| 263 } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) { |
| 264 if (FLAG_trace_osr) { |
| 265 PrintF("[OSR - Compiling: "); |
| 266 function->PrintName(); |
| 267 PrintF(" at AST id %d]\n", ast_id.ToInt()); |
| 268 } |
| 269 MaybeHandle<Code> maybe_result = |
| 270 Compiler::GetOptimizedCode(function, caller_code, mode, ast_id); |
| 271 if (maybe_result.ToHandle(&result) && |
| 272 result.is_identical_to(isolate->builtins()->InOptimizationQueue())) { |
| 273 // Optimization is queued. Return to check later. |
| 274 return NULL; |
| 275 } |
| 276 } |
| 277 |
| 278 // Revert the patched back edge table, regardless of whether OSR succeeds. |
| 279 BackEdgeTable::Revert(isolate, *caller_code); |
| 280 |
| 281 // Check whether we ended up with usable optimized code. |
| 282 if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) { |
| 283 DeoptimizationInputData* data = |
| 284 DeoptimizationInputData::cast(result->deoptimization_data()); |
| 285 |
| 286 if (data->OsrPcOffset()->value() >= 0) { |
| 287 DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id); |
| 288 if (FLAG_trace_osr) { |
| 289 PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n", |
| 290 ast_id.ToInt(), data->OsrPcOffset()->value()); |
| 291 } |
| 292 // TODO(titzer): this is a massive hack to make the deopt counts |
| 293 // match. Fix heuristics for reenabling optimizations! |
| 294 function->shared()->increment_deopt_count(); |
| 295 |
| 296 // TODO(titzer): Do not install code into the function. |
| 297 function->ReplaceCode(*result); |
| 298 return *result; |
| 299 } |
| 300 } |
| 301 |
| 302 // Failed. |
| 303 if (FLAG_trace_osr) { |
| 304 PrintF("[OSR - Failed: "); |
| 305 function->PrintName(); |
| 306 PrintF(" at AST id %d]\n", ast_id.ToInt()); |
| 307 } |
| 308 |
| 309 if (!function->IsOptimized()) { |
| 310 function->ReplaceCode(function->shared()->code()); |
| 311 } |
| 312 return NULL; |
| 313 } |
| 314 |
| 315 |
| 316 RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) { |
| 317 HandleScope scope(isolate); |
| 318 DCHECK(args.length() == 1); |
| 319 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); |
| 320 |
| 321 // First check if this is a real stack overflow. |
| 322 StackLimitCheck check(isolate); |
| 323 if (check.JsHasOverflowed()) { |
| 324 SealHandleScope shs(isolate); |
| 325 return isolate->StackOverflow(); |
| 326 } |
| 327 |
| 328 isolate->optimizing_compiler_thread()->InstallOptimizedFunctions(); |
| 329 return (function->IsOptimized()) ? function->code() |
| 330 : function->shared()->code(); |
| 331 } |
| 332 |
| 333 |
| 334 bool CodeGenerationFromStringsAllowed(Isolate* isolate, |
| 335 Handle<Context> context) { |
| 336 DCHECK(context->allow_code_gen_from_strings()->IsFalse()); |
| 337 // Check with callback if set. |
| 338 AllowCodeGenerationFromStringsCallback callback = |
| 339 isolate->allow_code_gen_callback(); |
| 340 if (callback == NULL) { |
| 341 // No callback set and code generation disallowed. |
| 342 return false; |
| 343 } else { |
| 344 // Callback set. Let it decide if code generation is allowed. |
| 345 VMState<EXTERNAL> state(isolate); |
| 346 return callback(v8::Utils::ToLocal(context)); |
| 347 } |
| 348 } |
| 349 |
| 350 |
| 351 RUNTIME_FUNCTION(Runtime_CompileString) { |
| 352 HandleScope scope(isolate); |
| 353 DCHECK(args.length() == 2); |
| 354 CONVERT_ARG_HANDLE_CHECKED(String, source, 0); |
| 355 CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1); |
| 356 |
| 357 // Extract native context. |
| 358 Handle<Context> context(isolate->native_context()); |
| 359 |
| 360 // Check if native context allows code generation from |
| 361 // strings. Throw an exception if it doesn't. |
| 362 if (context->allow_code_gen_from_strings()->IsFalse() && |
| 363 !CodeGenerationFromStringsAllowed(isolate, context)) { |
| 364 Handle<Object> error_message = |
| 365 context->ErrorMessageForCodeGenerationFromStrings(); |
| 366 THROW_NEW_ERROR_RETURN_FAILURE( |
| 367 isolate, NewEvalError("code_gen_from_strings", |
| 368 HandleVector<Object>(&error_message, 1))); |
| 369 } |
| 370 |
| 371 // Compile source string in the native context. |
| 372 ParseRestriction restriction = function_literal_only |
| 373 ? ONLY_SINGLE_FUNCTION_LITERAL |
| 374 : NO_PARSE_RESTRICTION; |
| 375 Handle<JSFunction> fun; |
| 376 ASSIGN_RETURN_FAILURE_ON_EXCEPTION( |
| 377 isolate, fun, |
| 378 Compiler::GetFunctionFromEval(source, context, SLOPPY, restriction, |
| 379 RelocInfo::kNoPosition)); |
| 380 return *fun; |
| 381 } |
| 382 |
| 383 |
| 384 static ObjectPair CompileGlobalEval(Isolate* isolate, Handle<String> source, |
| 385 Handle<Object> receiver, |
| 386 StrictMode strict_mode, |
| 387 int scope_position) { |
| 388 Handle<Context> context = Handle<Context>(isolate->context()); |
| 389 Handle<Context> native_context = Handle<Context>(context->native_context()); |
| 390 |
| 391 // Check if native context allows code generation from |
| 392 // strings. Throw an exception if it doesn't. |
| 393 if (native_context->allow_code_gen_from_strings()->IsFalse() && |
| 394 !CodeGenerationFromStringsAllowed(isolate, native_context)) { |
| 395 Handle<Object> error_message = |
| 396 native_context->ErrorMessageForCodeGenerationFromStrings(); |
| 397 Handle<Object> error; |
| 398 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError( |
| 399 "code_gen_from_strings", HandleVector<Object>(&error_message, 1)); |
| 400 if (maybe_error.ToHandle(&error)) isolate->Throw(*error); |
| 401 return MakePair(isolate->heap()->exception(), NULL); |
| 402 } |
| 403 |
| 404 // Deal with a normal eval call with a string argument. Compile it |
| 405 // and return the compiled function bound in the local context. |
| 406 static const ParseRestriction restriction = NO_PARSE_RESTRICTION; |
| 407 Handle<JSFunction> compiled; |
| 408 ASSIGN_RETURN_ON_EXCEPTION_VALUE( |
| 409 isolate, compiled, |
| 410 Compiler::GetFunctionFromEval(source, context, strict_mode, restriction, |
| 411 scope_position), |
| 412 MakePair(isolate->heap()->exception(), NULL)); |
| 413 return MakePair(*compiled, *receiver); |
| 414 } |
| 415 |
| 416 |
| 417 RUNTIME_FUNCTION_RETURN_PAIR(Runtime_ResolvePossiblyDirectEval) { |
| 418 HandleScope scope(isolate); |
| 419 DCHECK(args.length() == 5); |
| 420 |
| 421 Handle<Object> callee = args.at<Object>(0); |
| 422 |
| 423 // If "eval" didn't refer to the original GlobalEval, it's not a |
| 424 // direct call to eval. |
| 425 // (And even if it is, but the first argument isn't a string, just let |
| 426 // execution default to an indirect call to eval, which will also return |
| 427 // the first argument without doing anything). |
| 428 if (*callee != isolate->native_context()->global_eval_fun() || |
| 429 !args[1]->IsString()) { |
| 430 return MakePair(*callee, isolate->heap()->undefined_value()); |
| 431 } |
| 432 |
| 433 DCHECK(args[3]->IsSmi()); |
| 434 DCHECK(args.smi_at(3) == SLOPPY || args.smi_at(3) == STRICT); |
| 435 StrictMode strict_mode = static_cast<StrictMode>(args.smi_at(3)); |
| 436 DCHECK(args[4]->IsSmi()); |
| 437 return CompileGlobalEval(isolate, args.at<String>(1), args.at<Object>(2), |
| 438 strict_mode, args.smi_at(4)); |
| 439 } |
| 440 } |
| 441 } // namespace v8::internal |
OLD | NEW |