| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/disasm.h" | 10 #include "src/disasm.h" |
| (...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 346 static_cast<OptimizedFrame*>(it.frame())->function(); | 346 static_cast<OptimizedFrame*>(it.frame())->function(); |
| 347 CodeTracer::Scope scope(isolate->GetCodeTracer()); | 347 CodeTracer::Scope scope(isolate->GetCodeTracer()); |
| 348 PrintF(scope.file(), "[deoptimizer found activation of function: "); | 348 PrintF(scope.file(), "[deoptimizer found activation of function: "); |
| 349 function->PrintName(scope.file()); | 349 function->PrintName(scope.file()); |
| 350 PrintF(scope.file(), | 350 PrintF(scope.file(), |
| 351 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 351 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
| 352 } | 352 } |
| 353 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc()); | 353 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc()); |
| 354 int deopt_index = safepoint.deoptimization_index(); | 354 int deopt_index = safepoint.deoptimization_index(); |
| 355 // Turbofan deopt is checked when we are patching addresses on stack. | 355 // Turbofan deopt is checked when we are patching addresses on stack. |
| 356 bool turbofanned = code->is_turbofanned(); | 356 bool turbofanned = code->is_turbofanned() && !FLAG_turbo_deoptimization; |
| 357 bool safe_to_deopt = | 357 bool safe_to_deopt = |
| 358 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned; | 358 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned; |
| 359 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned); | 359 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned); |
| 360 if (topmost_optimized_code == NULL) { | 360 if (topmost_optimized_code == NULL) { |
| 361 topmost_optimized_code = code; | 361 topmost_optimized_code = code; |
| 362 safe_to_deopt_topmost_optimized_code = safe_to_deopt; | 362 safe_to_deopt_topmost_optimized_code = safe_to_deopt; |
| 363 } | 363 } |
| 364 } | 364 } |
| 365 } | 365 } |
| 366 #endif | 366 #endif |
| (...skipping 27 matching lines...) Expand all Loading... |
| 394 // Move the code to the _deoptimized_ code list. | 394 // Move the code to the _deoptimized_ code list. |
| 395 code->set_next_code_link(context->DeoptimizedCodeListHead()); | 395 code->set_next_code_link(context->DeoptimizedCodeListHead()); |
| 396 context->SetDeoptimizedCodeListHead(code); | 396 context->SetDeoptimizedCodeListHead(code); |
| 397 } else { | 397 } else { |
| 398 // Not marked; preserve this element. | 398 // Not marked; preserve this element. |
| 399 prev = code; | 399 prev = code; |
| 400 } | 400 } |
| 401 element = next; | 401 element = next; |
| 402 } | 402 } |
| 403 | 403 |
| 404 if (FLAG_turbo_deoptimization) { | |
| 405 PatchStackForMarkedCode(isolate); | |
| 406 } | |
| 407 | |
| 408 // TODO(titzer): we need a handle scope only because of the macro assembler, | 404 // TODO(titzer): we need a handle scope only because of the macro assembler, |
| 409 // which is only used in EnsureCodeForDeoptimizationEntry. | 405 // which is only used in EnsureCodeForDeoptimizationEntry. |
| 410 HandleScope scope(isolate); | 406 HandleScope scope(isolate); |
| 411 | 407 |
| 412 // Now patch all the codes for deoptimization. | 408 // Now patch all the codes for deoptimization. |
| 413 for (int i = 0; i < codes.length(); i++) { | 409 for (int i = 0; i < codes.length(); i++) { |
| 414 #ifdef DEBUG | 410 #ifdef DEBUG |
| 415 if (codes[i] == topmost_optimized_code) { | 411 if (codes[i] == topmost_optimized_code) { |
| 416 DCHECK(safe_to_deopt_topmost_optimized_code); | 412 DCHECK(safe_to_deopt_topmost_optimized_code); |
| 417 } | 413 } |
| 418 #endif | 414 #endif |
| 419 // It is finally time to die, code object. | 415 // It is finally time to die, code object. |
| 420 | 416 |
| 421 // Remove the code from optimized code map. | 417 // Remove the code from optimized code map. |
| 422 DeoptimizationInputData* deopt_data = | 418 DeoptimizationInputData* deopt_data = |
| 423 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); | 419 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); |
| 424 SharedFunctionInfo* shared = | 420 SharedFunctionInfo* shared = |
| 425 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); | 421 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); |
| 426 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); | 422 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); |
| 427 | 423 |
| 428 // Do platform-specific patching to force any activations to lazy deopt. | 424 // Do platform-specific patching to force any activations to lazy deopt. |
| 429 // | 425 if (!codes[i]->is_turbofanned() || FLAG_turbo_deoptimization) { |
| 430 // We skip patching Turbofan code - we patch return addresses on stack. | |
| 431 // TODO(jarin) We should still zap the code object (but we have to | |
| 432 // be careful not to zap the deoptimization block). | |
| 433 if (!codes[i]->is_turbofanned()) { | |
| 434 PatchCodeForDeoptimization(isolate, codes[i]); | 426 PatchCodeForDeoptimization(isolate, codes[i]); |
| 435 | 427 |
| 436 // We might be in the middle of incremental marking with compaction. | 428 // We might be in the middle of incremental marking with compaction. |
| 437 // Tell collector to treat this code object in a special way and | 429 // Tell collector to treat this code object in a special way and |
| 438 // ignore all slots that might have been recorded on it. | 430 // ignore all slots that might have been recorded on it. |
| 439 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); | 431 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); |
| 440 } | 432 } |
| 441 } | 433 } |
| 442 } | 434 } |
| 443 | 435 |
| 444 | 436 |
| 445 // For all marked Turbofanned code on stack, change the return address to go | |
| 446 // to the deoptimization block. | |
| 447 void Deoptimizer::PatchStackForMarkedCode(Isolate* isolate) { | |
| 448 // TODO(jarin) We should tolerate missing patch entry for the topmost frame. | |
| 449 for (StackFrameIterator it(isolate, isolate->thread_local_top()); !it.done(); | |
| 450 it.Advance()) { | |
| 451 StackFrame::Type type = it.frame()->type(); | |
| 452 if (type == StackFrame::OPTIMIZED) { | |
| 453 Code* code = it.frame()->LookupCode(); | |
| 454 if (code->is_turbofanned() && code->marked_for_deoptimization()) { | |
| 455 JSFunction* function = | |
| 456 static_cast<OptimizedFrame*>(it.frame())->function(); | |
| 457 Address* pc_address = it.frame()->pc_address(); | |
| 458 int pc_offset = | |
| 459 static_cast<int>(*pc_address - code->instruction_start()); | |
| 460 SafepointEntry safepoint_entry = code->GetSafepointEntry(*pc_address); | |
| 461 unsigned new_pc_offset = safepoint_entry.deoptimization_pc(); | |
| 462 | |
| 463 if (FLAG_trace_deopt) { | |
| 464 CodeTracer::Scope scope(isolate->GetCodeTracer()); | |
| 465 PrintF(scope.file(), "[patching stack address for function: "); | |
| 466 function->PrintName(scope.file()); | |
| 467 PrintF(scope.file(), " (Pc offset %i -> %i)]\n", pc_offset, | |
| 468 new_pc_offset); | |
| 469 } | |
| 470 | |
| 471 CHECK(new_pc_offset != Safepoint::kNoDeoptimizationPc); | |
| 472 *pc_address += static_cast<int>(new_pc_offset) - pc_offset; | |
| 473 } | |
| 474 } | |
| 475 } | |
| 476 } | |
| 477 | |
| 478 | |
| 479 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { | 437 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { |
| 480 if (FLAG_trace_deopt) { | 438 if (FLAG_trace_deopt) { |
| 481 CodeTracer::Scope scope(isolate->GetCodeTracer()); | 439 CodeTracer::Scope scope(isolate->GetCodeTracer()); |
| 482 PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); | 440 PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); |
| 483 } | 441 } |
| 484 DisallowHeapAllocation no_allocation; | 442 DisallowHeapAllocation no_allocation; |
| 485 // For all contexts, mark all code, then deoptimize. | 443 // For all contexts, mark all code, then deoptimize. |
| 486 Object* context = isolate->heap()->native_contexts_list(); | 444 Object* context = isolate->heap()->native_contexts_list(); |
| 487 while (!context->IsUndefined()) { | 445 while (!context->IsUndefined()) { |
| 488 Context* native_context = Context::cast(context); | 446 Context* native_context = Context::cast(context); |
| (...skipping 3181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3670 | 3628 |
| 3671 | 3629 |
| 3672 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3630 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 3673 v->VisitPointer(bit_cast<Object**>(&function_)); | 3631 v->VisitPointer(bit_cast<Object**>(&function_)); |
| 3674 v->VisitPointer(&context_); | 3632 v->VisitPointer(&context_); |
| 3675 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3633 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 3676 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3634 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 3677 } | 3635 } |
| 3678 | 3636 |
| 3679 } } // namespace v8::internal | 3637 } } // namespace v8::internal |
| OLD | NEW |