| OLD | NEW | 
|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 374 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 385       // Move the code to the _deoptimized_ code list. | 385       // Move the code to the _deoptimized_ code list. | 
| 386       code->set_next_code_link(context->DeoptimizedCodeListHead()); | 386       code->set_next_code_link(context->DeoptimizedCodeListHead()); | 
| 387       context->SetDeoptimizedCodeListHead(code); | 387       context->SetDeoptimizedCodeListHead(code); | 
| 388     } else { | 388     } else { | 
| 389       // Not marked; preserve this element. | 389       // Not marked; preserve this element. | 
| 390       prev = code; | 390       prev = code; | 
| 391     } | 391     } | 
| 392     element = next; | 392     element = next; | 
| 393   } | 393   } | 
| 394 | 394 | 
|  | 395 #ifdef DEBUG | 
|  | 396   // Make sure all activations of optimized code can deopt at their current PC. | 
|  | 397   for (StackFrameIterator it(isolate, isolate->thread_local_top()); | 
|  | 398        !it.done(); it.Advance()) { | 
|  | 399     StackFrame::Type type = it.frame()->type(); | 
|  | 400     if (type == StackFrame::OPTIMIZED) { | 
|  | 401       Code* code = it.frame()->LookupCode(); | 
|  | 402       if (FLAG_trace_deopt) { | 
|  | 403         JSFunction* function = | 
|  | 404             static_cast<OptimizedFrame*>(it.frame())->function(); | 
|  | 405         CodeTracer::Scope scope(isolate->GetCodeTracer()); | 
|  | 406         PrintF(scope.file(), "[deoptimizer patches for lazy deopt: "); | 
|  | 407         function->PrintName(scope.file()); | 
|  | 408         PrintF(scope.file(), | 
|  | 409                " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 
|  | 410       } | 
|  | 411       SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc()); | 
|  | 412       int deopt_index = safepoint.deoptimization_index(); | 
|  | 413       CHECK(deopt_index != Safepoint::kNoDeoptimizationIndex); | 
|  | 414     } | 
|  | 415   } | 
|  | 416 #endif | 
|  | 417 | 
| 395   // TODO(titzer): we need a handle scope only because of the macro assembler, | 418   // TODO(titzer): we need a handle scope only because of the macro assembler, | 
| 396   // which is only used in EnsureCodeForDeoptimizationEntry. | 419   // which is only used in EnsureCodeForDeoptimizationEntry. | 
| 397   HandleScope scope(isolate); | 420   HandleScope scope(isolate); | 
|  | 421 | 
| 398   // Now patch all the codes for deoptimization. | 422   // Now patch all the codes for deoptimization. | 
| 399   for (int i = 0; i < codes.length(); i++) { | 423   for (int i = 0; i < codes.length(); i++) { | 
| 400     // It is finally time to die, code object. | 424     // It is finally time to die, code object. | 
| 401     // Do platform-specific patching to force any activations to lazy deopt. | 425     // Do platform-specific patching to force any activations to lazy deopt. | 
| 402     PatchCodeForDeoptimization(isolate, codes[i]); | 426     PatchCodeForDeoptimization(isolate, codes[i]); | 
| 403 | 427 | 
| 404     // We might be in the middle of incremental marking with compaction. | 428     // We might be in the middle of incremental marking with compaction. | 
| 405     // Tell collector to treat this code object in a special way and | 429     // Tell collector to treat this code object in a special way and | 
| 406     // ignore all slots that might have been recorded on it. | 430     // ignore all slots that might have been recorded on it. | 
| 407     isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); | 431     isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); | 
| (...skipping 3067 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3475 | 3499 | 
| 3476 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3500 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 
| 3477   v->VisitPointer(BitCast<Object**>(&function_)); | 3501   v->VisitPointer(BitCast<Object**>(&function_)); | 
| 3478   v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3502   v->VisitPointers(parameters_, parameters_ + parameters_count_); | 
| 3479   v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3503   v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 
| 3480 } | 3504 } | 
| 3481 | 3505 | 
| 3482 #endif  // ENABLE_DEBUGGER_SUPPORT | 3506 #endif  // ENABLE_DEBUGGER_SUPPORT | 
| 3483 | 3507 | 
| 3484 } }  // namespace v8::internal | 3508 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|