OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/cpu-profiler.h" | 9 #include "src/cpu-profiler.h" |
10 #include "src/deoptimizer.h" | 10 #include "src/deoptimizer.h" |
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
335 Code* topmost_optimized_code = NULL; | 335 Code* topmost_optimized_code = NULL; |
336 bool safe_to_deopt_topmost_optimized_code = false; | 336 bool safe_to_deopt_topmost_optimized_code = false; |
337 // Make sure all activations of optimized code can deopt at their current PC. | 337 // Make sure all activations of optimized code can deopt at their current PC. |
338 // The topmost optimized code has special handling because it cannot be | 338 // The topmost optimized code has special handling because it cannot be |
339 // deoptimized due to weak object dependency. | 339 // deoptimized due to weak object dependency. |
340 for (StackFrameIterator it(isolate, isolate->thread_local_top()); | 340 for (StackFrameIterator it(isolate, isolate->thread_local_top()); |
341 !it.done(); it.Advance()) { | 341 !it.done(); it.Advance()) { |
342 StackFrame::Type type = it.frame()->type(); | 342 StackFrame::Type type = it.frame()->type(); |
343 if (type == StackFrame::OPTIMIZED) { | 343 if (type == StackFrame::OPTIMIZED) { |
344 Code* code = it.frame()->LookupCode(); | 344 Code* code = it.frame()->LookupCode(); |
| 345 JSFunction* function = |
| 346 static_cast<OptimizedFrame*>(it.frame())->function(); |
345 if (FLAG_trace_deopt) { | 347 if (FLAG_trace_deopt) { |
346 JSFunction* function = | |
347 static_cast<OptimizedFrame*>(it.frame())->function(); | |
348 CodeTracer::Scope scope(isolate->GetCodeTracer()); | 348 CodeTracer::Scope scope(isolate->GetCodeTracer()); |
349 PrintF(scope.file(), "[deoptimizer found activation of function: "); | 349 PrintF(scope.file(), "[deoptimizer found activation of function: "); |
350 function->PrintName(scope.file()); | 350 function->PrintName(scope.file()); |
351 PrintF(scope.file(), | 351 PrintF(scope.file(), |
352 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 352 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
353 } | 353 } |
354 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc()); | 354 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc()); |
355 int deopt_index = safepoint.deoptimization_index(); | 355 int deopt_index = safepoint.deoptimization_index(); |
356 // Turbofan deopt is checked when we are patching addresses on stack. | 356 // Turbofan deopt is checked when we are patching addresses on stack. |
357 bool turbofanned = code->is_turbofanned() && !FLAG_turbo_deoptimization; | 357 bool turbofanned = code->is_turbofanned() && |
| 358 function->shared()->asm_function() && |
| 359 !FLAG_turbo_asm_deoptimization; |
358 bool safe_to_deopt = | 360 bool safe_to_deopt = |
359 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned; | 361 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned; |
360 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned); | 362 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned); |
361 if (topmost_optimized_code == NULL) { | 363 if (topmost_optimized_code == NULL) { |
362 topmost_optimized_code = code; | 364 topmost_optimized_code = code; |
363 safe_to_deopt_topmost_optimized_code = safe_to_deopt; | 365 safe_to_deopt_topmost_optimized_code = safe_to_deopt; |
364 } | 366 } |
365 } | 367 } |
366 } | 368 } |
367 #endif | 369 #endif |
368 | 370 |
369 // Move marked code from the optimized code list to the deoptimized | 371 // Move marked code from the optimized code list to the deoptimized |
370 // code list, collecting them into a ZoneList. | 372 // code list, collecting them into a ZoneList. |
371 Zone zone; | 373 Zone zone; |
372 ZoneList<Code*> codes(10, &zone); | 374 ZoneList<Code*> codes(10, &zone); |
373 | 375 |
374 // Walk over all optimized code objects in this native context. | 376 // Walk over all optimized code objects in this native context. |
375 Code* prev = NULL; | 377 Code* prev = NULL; |
376 Object* element = context->OptimizedCodeListHead(); | 378 Object* element = context->OptimizedCodeListHead(); |
377 while (!element->IsUndefined()) { | 379 while (!element->IsUndefined()) { |
378 Code* code = Code::cast(element); | 380 Code* code = Code::cast(element); |
379 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION); | 381 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION); |
380 Object* next = code->next_code_link(); | 382 Object* next = code->next_code_link(); |
381 | 383 |
382 if (code->marked_for_deoptimization()) { | 384 if (code->marked_for_deoptimization()) { |
383 DCHECK(!code->is_turbofanned() || FLAG_turbo_deoptimization); | |
384 // Put the code into the list for later patching. | 385 // Put the code into the list for later patching. |
385 codes.Add(code, &zone); | 386 codes.Add(code, &zone); |
386 | 387 |
387 if (prev != NULL) { | 388 if (prev != NULL) { |
388 // Skip this code in the optimized code list. | 389 // Skip this code in the optimized code list. |
389 prev->set_next_code_link(next); | 390 prev->set_next_code_link(next); |
390 } else { | 391 } else { |
391 // There was no previous node, the next node is the new head. | 392 // There was no previous node, the next node is the new head. |
392 context->SetOptimizedCodeListHead(next); | 393 context->SetOptimizedCodeListHead(next); |
393 } | 394 } |
(...skipping 22 matching lines...) Expand all Loading... |
416 // It is finally time to die, code object. | 417 // It is finally time to die, code object. |
417 | 418 |
418 // Remove the code from optimized code map. | 419 // Remove the code from optimized code map. |
419 DeoptimizationInputData* deopt_data = | 420 DeoptimizationInputData* deopt_data = |
420 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); | 421 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); |
421 SharedFunctionInfo* shared = | 422 SharedFunctionInfo* shared = |
422 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); | 423 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); |
423 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); | 424 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); |
424 | 425 |
425 // Do platform-specific patching to force any activations to lazy deopt. | 426 // Do platform-specific patching to force any activations to lazy deopt. |
426 if (!codes[i]->is_turbofanned() || FLAG_turbo_deoptimization) { | 427 PatchCodeForDeoptimization(isolate, codes[i]); |
427 PatchCodeForDeoptimization(isolate, codes[i]); | |
428 | 428 |
429 // We might be in the middle of incremental marking with compaction. | 429 // We might be in the middle of incremental marking with compaction. |
430 // Tell collector to treat this code object in a special way and | 430 // Tell collector to treat this code object in a special way and |
431 // ignore all slots that might have been recorded on it. | 431 // ignore all slots that might have been recorded on it. |
432 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); | 432 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); |
433 } | |
434 } | 433 } |
435 } | 434 } |
436 | 435 |
437 | 436 |
438 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { | 437 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { |
439 if (FLAG_trace_deopt) { | 438 if (FLAG_trace_deopt) { |
440 CodeTracer::Scope scope(isolate->GetCodeTracer()); | 439 CodeTracer::Scope scope(isolate->GetCodeTracer()); |
441 PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); | 440 PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); |
442 } | 441 } |
443 DisallowHeapAllocation no_allocation; | 442 DisallowHeapAllocation no_allocation; |
(...skipping 3363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3807 int raw_position = static_cast<int>(info->data()); | 3806 int raw_position = static_cast<int>(info->data()); |
3808 last_position = raw_position ? SourcePosition::FromRaw(raw_position) | 3807 last_position = raw_position ? SourcePosition::FromRaw(raw_position) |
3809 : SourcePosition::Unknown(); | 3808 : SourcePosition::Unknown(); |
3810 } else if (info->rmode() == RelocInfo::DEOPT_REASON) { | 3809 } else if (info->rmode() == RelocInfo::DEOPT_REASON) { |
3811 last_reason = static_cast<Deoptimizer::DeoptReason>(info->data()); | 3810 last_reason = static_cast<Deoptimizer::DeoptReason>(info->data()); |
3812 } | 3811 } |
3813 } | 3812 } |
3814 return DeoptInfo(SourcePosition::Unknown(), NULL, Deoptimizer::kNoReason); | 3813 return DeoptInfo(SourcePosition::Unknown(), NULL, Deoptimizer::kNoReason); |
3815 } | 3814 } |
3816 } } // namespace v8::internal | 3815 } } // namespace v8::internal |
OLD | NEW |