OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
49 NOT_EXECUTABLE, | 49 NOT_EXECUTABLE, |
50 #else | 50 #else |
51 EXECUTABLE, | 51 EXECUTABLE, |
52 #endif | 52 #endif |
53 NULL); | 53 NULL); |
54 } | 54 } |
55 | 55 |
56 | 56 |
57 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) | 57 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) |
58 : allocator_(allocator), | 58 : allocator_(allocator), |
59 current_(NULL), | |
60 #ifdef ENABLE_DEBUGGER_SUPPORT | 59 #ifdef ENABLE_DEBUGGER_SUPPORT |
61 deoptimized_frame_info_(NULL), | 60 deoptimized_frame_info_(NULL), |
62 #endif | 61 #endif |
63 deoptimizing_code_list_(NULL) { | 62 current_(NULL) { |
64 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { | 63 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { |
65 deopt_entry_code_entries_[i] = -1; | 64 deopt_entry_code_entries_[i] = -1; |
66 deopt_entry_code_[i] = AllocateCodeChunk(allocator); | 65 deopt_entry_code_[i] = AllocateCodeChunk(allocator); |
67 } | 66 } |
68 } | 67 } |
69 | 68 |
70 | 69 |
71 DeoptimizerData::~DeoptimizerData() { | 70 DeoptimizerData::~DeoptimizerData() { |
72 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { | 71 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { |
73 allocator_->Free(deopt_entry_code_[i]); | 72 allocator_->Free(deopt_entry_code_[i]); |
74 deopt_entry_code_[i] = NULL; | 73 deopt_entry_code_[i] = NULL; |
75 } | 74 } |
76 | |
77 DeoptimizingCodeListNode* current = deoptimizing_code_list_; | |
78 while (current != NULL) { | |
79 DeoptimizingCodeListNode* prev = current; | |
80 current = current->next(); | |
81 delete prev; | |
82 } | |
83 deoptimizing_code_list_ = NULL; | |
84 } | 75 } |
85 | 76 |
86 | 77 |
87 #ifdef ENABLE_DEBUGGER_SUPPORT | 78 #ifdef ENABLE_DEBUGGER_SUPPORT |
88 void DeoptimizerData::Iterate(ObjectVisitor* v) { | 79 void DeoptimizerData::Iterate(ObjectVisitor* v) { |
89 if (deoptimized_frame_info_ != NULL) { | 80 if (deoptimized_frame_info_ != NULL) { |
90 deoptimized_frame_info_->Iterate(v); | 81 deoptimized_frame_info_->Iterate(v); |
91 } | 82 } |
92 } | 83 } |
93 #endif | 84 #endif |
94 | 85 |
95 | 86 |
96 Code* DeoptimizerData::FindDeoptimizingCode(Address addr) { | 87 Code* Deoptimizer::FindDeoptimizingCode(Address addr) { |
97 for (DeoptimizingCodeListNode* node = deoptimizing_code_list_; | 88 if (function_->IsHeapObject()) { |
98 node != NULL; | 89 // Search all deoptimizing code in the native context of the function. |
99 node = node->next()) { | 90 Context* native_context = function_->context()->native_context(); |
100 if (node->code()->contains(addr)) return *node->code(); | 91 Object* element = native_context->DeoptimizedCodeListHead(); |
92 while (!element->IsUndefined()) { | |
93 Code* code = Code::cast(element); | |
94 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | |
95 if (code->contains(addr)) return code; | |
96 element = code->next_code_link(); | |
97 } | |
101 } | 98 } |
102 return NULL; | 99 return NULL; |
103 } | 100 } |
104 | 101 |
105 | 102 |
106 void DeoptimizerData::RemoveDeoptimizingCode(Code* code) { | |
107 for (DeoptimizingCodeListNode *prev = NULL, *cur = deoptimizing_code_list_; | |
108 cur != NULL; | |
109 prev = cur, cur = cur->next()) { | |
110 if (*cur->code() == code) { | |
111 if (prev == NULL) { | |
112 deoptimizing_code_list_ = cur->next(); | |
113 } else { | |
114 prev->set_next(cur->next()); | |
115 } | |
116 delete cur; | |
117 return; | |
118 } | |
119 } | |
120 // Deoptimizing code is removed through weak callback. Each object is expected | |
121 // to be removed once and only once. | |
122 UNREACHABLE(); | |
123 } | |
124 | |
125 | |
126 // We rely on this function not causing a GC. It is called from generated code | 103 // We rely on this function not causing a GC. It is called from generated code |
127 // without having a real stack frame in place. | 104 // without having a real stack frame in place. |
128 Deoptimizer* Deoptimizer::New(JSFunction* function, | 105 Deoptimizer* Deoptimizer::New(JSFunction* function, |
129 BailoutType type, | 106 BailoutType type, |
130 unsigned bailout_id, | 107 unsigned bailout_id, |
131 Address from, | 108 Address from, |
132 int fp_to_sp_delta, | 109 int fp_to_sp_delta, |
133 Isolate* isolate) { | 110 Isolate* isolate) { |
134 Deoptimizer* deoptimizer = new Deoptimizer(isolate, | 111 Deoptimizer* deoptimizer = new Deoptimizer(isolate, |
135 function, | 112 function, |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
282 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, | 259 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, |
283 int count, | 260 int count, |
284 BailoutType type) { | 261 BailoutType type) { |
285 TableEntryGenerator generator(masm, type, count); | 262 TableEntryGenerator generator(masm, type, count); |
286 generator.Generate(); | 263 generator.Generate(); |
287 } | 264 } |
288 | 265 |
289 | 266 |
290 void Deoptimizer::VisitAllOptimizedFunctionsForContext( | 267 void Deoptimizer::VisitAllOptimizedFunctionsForContext( |
291 Context* context, OptimizedFunctionVisitor* visitor) { | 268 Context* context, OptimizedFunctionVisitor* visitor) { |
292 Isolate* isolate = context->GetIsolate(); | |
293 Zone zone(isolate); | |
294 DisallowHeapAllocation no_allocation; | 269 DisallowHeapAllocation no_allocation; |
295 | 270 |
296 ASSERT(context->IsNativeContext()); | 271 ASSERT(context->IsNativeContext()); |
297 | 272 |
298 visitor->EnterContext(context); | 273 visitor->EnterContext(context); |
299 | 274 |
300 // Create a snapshot of the optimized functions list. This is needed because | 275 // Visit the list of optimized functions, removing elements that |
301 // visitors might remove more than one link from the list at once. | 276 // no longer refer to optimized code. |
302 ZoneList<JSFunction*> snapshot(1, &zone); | 277 JSFunction* prev = NULL; |
303 Object* element = context->OptimizedFunctionsListHead(); | 278 Object* element = context->OptimizedFunctionsListHead(); |
304 while (!element->IsUndefined()) { | 279 while (!element->IsUndefined()) { |
305 JSFunction* element_function = JSFunction::cast(element); | 280 JSFunction* function = JSFunction::cast(element); |
306 snapshot.Add(element_function, &zone); | 281 Object* next = function->next_function_link(); |
307 element = element_function->next_function_link(); | 282 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION || |
308 } | 283 (visitor->VisitFunction(function), |
309 | 284 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) { |
310 // Run through the snapshot of optimized functions and visit them. | 285 // The function no longer refers to optimized code, or the visitor |
311 for (int i = 0; i < snapshot.length(); ++i) { | 286 // changed the code to which it refers to no longer be optimized code. |
312 visitor->VisitFunction(snapshot.at(i)); | 287 // Remove the function from this list. |
288 if (prev != NULL) { | |
289 prev->set_next_function_link(next); | |
290 } else { | |
291 context->SetOptimizedFunctionsListHead(next); | |
292 } | |
293 // The visitor should not alter the link directly. | |
294 ASSERT(function->next_function_link() == next); | |
295 // Set the next function link to undefined to indicate it is no longer | |
296 // in the optimized functions list. | |
297 function->set_next_function_link(context->GetHeap()->undefined_value()); | |
298 } else { | |
299 // The visitor should not alter the link directly. | |
300 ASSERT(function->next_function_link() == next); | |
301 // preserve this element. | |
302 prev = function; | |
303 } | |
304 element = next; | |
313 } | 305 } |
314 | 306 |
315 visitor->LeaveContext(context); | 307 visitor->LeaveContext(context); |
316 } | 308 } |
317 | 309 |
318 | 310 |
319 void Deoptimizer::VisitAllOptimizedFunctions( | 311 void Deoptimizer::VisitAllOptimizedFunctions( |
320 Isolate* isolate, | 312 Isolate* isolate, |
321 OptimizedFunctionVisitor* visitor) { | 313 OptimizedFunctionVisitor* visitor) { |
322 DisallowHeapAllocation no_allocation; | 314 DisallowHeapAllocation no_allocation; |
323 | 315 |
324 // Run through the list of all native contexts and deoptimize. | 316 // Run through the list of all native contexts. |
325 Object* context = isolate->heap()->native_contexts_list(); | 317 Object* context = isolate->heap()->native_contexts_list(); |
326 while (!context->IsUndefined()) { | 318 while (!context->IsUndefined()) { |
327 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); | 319 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); |
328 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 320 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
329 } | 321 } |
330 } | 322 } |
331 | 323 |
332 | 324 |
333 // Removes the functions selected by the given filter from the optimized | 325 // Unlink functions referring to code marked for deoptimization, then move |
334 // function list of the given context and adds their code to the list of | 326 // marked code from the optimized code list to the deoptimized code list, |
335 // code objects to be deoptimized. | 327 // and patch code for lazy deopt. |
336 static void SelectCodeToDeoptimize(Context* context, | 328 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) { |
337 OptimizedFunctionFilter* filter, | |
338 ZoneList<Code*>* codes, | |
339 Zone* zone, | |
340 Object* undefined) { | |
341 DisallowHeapAllocation no_allocation; | 329 DisallowHeapAllocation no_allocation; |
342 Object* current = context->get(Context::OPTIMIZED_FUNCTIONS_LIST); | |
343 Object* remainder_head = undefined; | |
344 Object* remainder_tail = undefined; | |
345 | 330 |
346 // TODO(titzer): rewrite to not modify unselected functions. | 331 // A "closure" that unlinks optimized code that is going to be |
347 while (current != undefined) { | 332 // deoptimized from the functions that refer to it. |
348 JSFunction* function = JSFunction::cast(current); | 333 class SelectedCodeUnlinker: public OptimizedFunctionVisitor { |
349 current = function->next_function_link(); | 334 public: |
350 if (filter->TakeFunction(function)) { | 335 virtual void EnterContext(Context* context) { } // Don't care. |
351 // Extract this function from the context's list and remember the code. | 336 virtual void LeaveContext(Context* context) { } // Don't care. |
337 virtual void VisitFunction(JSFunction* function) { | |
352 Code* code = function->code(); | 338 Code* code = function->code(); |
353 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 339 if (!code->marked_for_deoptimization()) return; |
354 if (code->marked_for_deoptimization()) { | 340 |
355 ASSERT(codes->Contains(code)); | 341 // Unlink this function and evict from optimized code map. |
356 } else { | |
357 code->set_marked_for_deoptimization(true); | |
358 codes->Add(code, zone); | |
359 } | |
360 SharedFunctionInfo* shared = function->shared(); | 342 SharedFunctionInfo* shared = function->shared(); |
361 // Replace the function's code with the shared code. | |
362 function->set_code(shared->code()); | 343 function->set_code(shared->code()); |
363 // Evict the code from the optimized code map. | |
364 shared->EvictFromOptimizedCodeMap(code, "deoptimized function"); | 344 shared->EvictFromOptimizedCodeMap(code, "deoptimized function"); |
365 // Remove the function from the optimized functions list. | |
366 function->set_next_function_link(undefined); | |
367 | 345 |
368 if (FLAG_trace_deopt) { | 346 if (FLAG_trace_deopt) { |
369 PrintF("[forced deoptimization: "); | 347 PrintF("[deoptimizer unlinked: "); |
370 function->PrintName(); | 348 function->PrintName(); |
371 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 349 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
372 } | 350 } |
351 } | |
352 }; | |
353 | |
354 // Unlink all functions that refer to marked code. | |
355 SelectedCodeUnlinker unlinker; | |
356 VisitAllOptimizedFunctionsForContext(context, &unlinker); | |
357 | |
358 // Move marked code from the optimized code list to the deoptimized | |
359 // code list, collecting them into a ZoneList. | |
360 Isolate* isolate = context->GetHeap()->isolate(); | |
361 Zone zone(isolate); | |
362 HandleScope scope(isolate); | |
363 ZoneList<Handle<Code> > codes(10, &zone); | |
364 | |
365 // Walk over all optimized code objects in this native context. | |
366 Code* prev = NULL; | |
367 Object* element = context->OptimizedCodeListHead(); | |
368 while (!element->IsUndefined()) { | |
369 Code* code = Code::cast(element); | |
370 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | |
371 Object* next = code->next_code_link(); | |
372 if (code->marked_for_deoptimization()) { | |
373 // Put the code into the list for later patching. | |
374 codes.Add(Handle<Code>(code), &zone); | |
375 | |
376 if (prev != NULL) { | |
377 // Skip this code in the optimized code list. | |
378 prev->set_next_code_link(next); | |
379 } else { | |
380 // There was no previous node, the next node is the new head. | |
381 context->SetOptimizedCodeListHead(next); | |
382 } | |
383 | |
384 // Move the code to the _deoptimized_ code list. | |
385 code->set_next_code_link(context->DeoptimizedCodeListHead()); | |
386 context->SetDeoptimizedCodeListHead(code); | |
373 } else { | 387 } else { |
374 // Don't select this function; link it back into the list. | 388 // Not marked; preserve this element. |
375 if (remainder_head == undefined) { | 389 prev = code; |
376 remainder_head = function; | |
377 } else { | |
378 JSFunction::cast(remainder_tail)->set_next_function_link(function); | |
379 } | |
380 remainder_tail = function; | |
381 } | 390 } |
391 element = next; | |
382 } | 392 } |
383 if (remainder_tail != undefined) { | 393 |
384 JSFunction::cast(remainder_tail)->set_next_function_link(undefined); | 394 // Now patch all the codes for deoptimization. |
395 for (int i = 0; i < codes.length(); i++) { | |
396 // It is finally time to die, code object. | |
397 // Do platform-specific patching to force any activations to lazy deopt. | |
398 // (This may actually cause a GC due to a stub being generated.) | |
Michael Starzinger
2013/09/03 22:03:47
Is this actually true? Can this cause a GC, I cann
titzer
2013/09/04 11:19:04
Right...fixed. No GC tolerated.
| |
399 PatchCodeForDeoptimization(isolate, *codes[i]); | |
400 | |
401 // We might be in the middle of incremental marking with compaction. | |
402 // Tell collector to treat this code object in a special way and | |
403 // ignore all slots that might have been recorded on it. | |
404 isolate->heap()->mark_compact_collector()->InvalidateCode(*codes[i]); | |
385 } | 405 } |
386 context->set(Context::OPTIMIZED_FUNCTIONS_LIST, remainder_head); | |
387 } | 406 } |
388 | 407 |
389 | 408 |
390 class DeoptimizeAllFilter : public OptimizedFunctionFilter { | 409 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { |
391 public: | 410 if (FLAG_trace_deopt) { |
392 virtual bool TakeFunction(JSFunction* function) { | 411 PrintF("[deoptimize all code in all contexts]\n"); |
393 return true; | |
394 } | 412 } |
395 }; | 413 DisallowHeapAllocation no_allocation; |
414 // For all contexts, mark all code, then deoptimize. | |
415 Object* context = isolate->heap()->native_contexts_list(); | |
416 while (!context->IsUndefined()) { | |
417 Context* native_context = Context::cast(context); | |
418 MarkAllCodeForContext(native_context); | |
419 DeoptimizeMarkedCodeForContext(native_context); | |
Michael Starzinger
2013/09/03 21:52:31
Since DeoptimizeMarkedCodeForContext() might cause
titzer
2013/09/04 11:19:04
So; you were right, we don't tolerate GC here--tha
| |
420 context = native_context->get(Context::NEXT_CONTEXT_LINK); | |
421 } | |
422 } | |
396 | 423 |
397 | 424 |
398 class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter { | 425 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) { |
399 public: | 426 if (FLAG_trace_deopt) { |
400 explicit DeoptimizeWithMatchingCodeFilter(Code* code) : code_(code) {} | 427 PrintF("[deoptimize marked code in all contexts]\n"); |
401 virtual bool TakeFunction(JSFunction* function) { | |
402 return function->code() == code_; | |
403 } | 428 } |
404 private: | 429 DisallowHeapAllocation no_allocation; |
405 Code* code_; | 430 // For all contexts, deoptimize code already marked. |
406 }; | 431 Object* context = isolate->heap()->native_contexts_list(); |
407 | 432 while (!context->IsUndefined()) { |
408 | 433 Context* native_context = Context::cast(context); |
409 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter { | 434 DeoptimizeMarkedCodeForContext(native_context); |
Michael Starzinger
2013/09/03 21:52:31
Likewise.
| |
410 public: | 435 context = native_context->get(Context::NEXT_CONTEXT_LINK); |
411 virtual bool TakeFunction(JSFunction* function) { | |
412 return function->code()->marked_for_deoptimization(); | |
413 } | 436 } |
414 }; | |
415 | |
416 | |
417 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { | |
418 DisallowHeapAllocation no_allocation; | |
419 | |
420 if (FLAG_trace_deopt) { | |
421 PrintF("[deoptimize all contexts]\n"); | |
422 } | |
423 | |
424 DeoptimizeAllFilter filter; | |
425 DeoptimizeAllFunctionsWith(isolate, &filter); | |
426 } | 437 } |
427 | 438 |
428 | 439 |
429 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { | 440 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { |
430 DisallowHeapAllocation no_allocation; | 441 if (FLAG_trace_deopt) { |
431 DeoptimizeAllFilter filter; | 442 PrintF("[deoptimize global object @ 0x%08" V8PRIxPTR "]\n", |
443 reinterpret_cast<intptr_t>(object)); | |
444 } | |
432 if (object->IsJSGlobalProxy()) { | 445 if (object->IsJSGlobalProxy()) { |
433 Object* proto = object->GetPrototype(); | 446 Object* proto = object->GetPrototype(); |
434 ASSERT(proto->IsJSGlobalObject()); | 447 ASSERT(proto->IsJSGlobalObject()); |
435 DeoptimizeAllFunctionsForContext( | 448 Context* native_context = GlobalObject::cast(proto)->native_context(); |
436 GlobalObject::cast(proto)->native_context(), &filter); | 449 MarkAllCodeForContext(native_context); |
450 DeoptimizeMarkedCodeForContext(native_context); | |
Michael Starzinger
2013/09/03 21:52:31
Likewise.
| |
437 } else if (object->IsGlobalObject()) { | 451 } else if (object->IsGlobalObject()) { |
438 DeoptimizeAllFunctionsForContext( | 452 Context* native_context = GlobalObject::cast(object)->native_context(); |
439 GlobalObject::cast(object)->native_context(), &filter); | 453 MarkAllCodeForContext(native_context); |
454 DeoptimizeMarkedCodeForContext(native_context); | |
455 } | |
456 } | |
457 | |
458 | |
459 void Deoptimizer::MarkAllCodeForContext(Context* context) { | |
460 Object* element = context->OptimizedCodeListHead(); | |
461 while (!element->IsUndefined()) { | |
462 Code* code = Code::cast(element); | |
463 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | |
464 code->set_marked_for_deoptimization(true); | |
465 element = code->next_code_link(); | |
440 } | 466 } |
441 } | 467 } |
442 | 468 |
443 | 469 |
444 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { | 470 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { |
445 Code* code = function->code(); | 471 Code* code = function->code(); |
446 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; | 472 if (code->kind() == Code::OPTIMIZED_FUNCTION) { |
447 DeoptimizeWithMatchingCodeFilter filter(code); | 473 // Mark the code for deoptimization and unlink any functions that also |
448 DeoptimizeAllFunctionsForContext( | 474 // refer to that code. The code cannot be shared across native contexts, |
449 function->context()->native_context(), &filter); | 475 // so we only need to search one. |
450 } | 476 code->set_marked_for_deoptimization(true); |
451 | 477 DeoptimizeMarkedCodeForContext(function->context()->native_context()); |
Michael Starzinger
2013/09/03 21:52:31
Likewise.
| |
452 | |
453 void Deoptimizer::DeoptimizeAllFunctionsForContext( | |
454 Context* context, OptimizedFunctionFilter* filter) { | |
455 ASSERT(context->IsNativeContext()); | |
456 Isolate* isolate = context->GetIsolate(); | |
457 Object* undefined = isolate->heap()->undefined_value(); | |
458 Zone zone(isolate); | |
459 ZoneList<Code*> codes(4, &zone); | |
460 SelectCodeToDeoptimize(context, filter, &codes, &zone, undefined); | |
461 for (int i = 0; i < codes.length(); i++) { | |
462 DeoptimizeCode(isolate, codes.at(i)); | |
463 } | 478 } |
464 } | 479 } |
465 | 480 |
466 | 481 |
467 void Deoptimizer::DeoptimizeAllFunctionsWith(Isolate* isolate, | |
468 OptimizedFunctionFilter* filter) { | |
469 DisallowHeapAllocation no_allocation; | |
470 | |
471 // Run through the list of all native contexts and deoptimize. | |
472 Object* context = isolate->heap()->native_contexts_list(); | |
473 while (!context->IsUndefined()) { | |
474 DeoptimizeAllFunctionsForContext(Context::cast(context), filter); | |
475 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
476 } | |
477 } | |
478 | |
479 | |
480 void Deoptimizer::DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes) { | |
481 if (codes->length() == 0) return; // Nothing to do. | |
482 | |
483 // Mark the code; any functions refering to this code will be selected. | |
484 for (int i = 0; i < codes->length(); i++) { | |
485 ASSERT(!codes->at(i)->marked_for_deoptimization()); | |
486 codes->at(i)->set_marked_for_deoptimization(true); | |
487 } | |
488 | |
489 // For all contexts, remove optimized functions that refer to the selected | |
490 // code from the optimized function lists. | |
491 Object* undefined = isolate->heap()->undefined_value(); | |
492 Zone zone(isolate); | |
493 Object* list = isolate->heap()->native_contexts_list(); | |
494 DeoptimizeMarkedCodeFilter filter; | |
495 while (!list->IsUndefined()) { | |
496 Context* context = Context::cast(list); | |
497 // Note that selecting code unlinks the functions that refer to it. | |
498 SelectCodeToDeoptimize(context, &filter, codes, &zone, undefined); | |
499 list = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
500 } | |
501 | |
502 // Now deoptimize all the code. | |
503 for (int i = 0; i < codes->length(); i++) { | |
504 DeoptimizeCode(isolate, codes->at(i)); | |
505 } | |
506 } | |
507 | |
508 | |
509 void Deoptimizer::DeoptimizeCode(Isolate* isolate, Code* code) { | |
510 HandleScope scope(isolate); | |
511 DisallowHeapAllocation nha; | |
512 | |
513 // Do platform-specific patching of the optimized code. | |
514 PatchCodeForDeoptimization(isolate, code); | |
515 | |
516 // Add the deoptimizing code to the list. | |
517 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | |
518 DeoptimizerData* data = isolate->deoptimizer_data(); | |
519 node->set_next(data->deoptimizing_code_list_); | |
520 data->deoptimizing_code_list_ = node; | |
521 | |
522 // We might be in the middle of incremental marking with compaction. | |
523 // Tell collector to treat this code object in a special way and | |
524 // ignore all slots that might have been recorded on it. | |
525 isolate->heap()->mark_compact_collector()->InvalidateCode(code); | |
526 } | |
527 | |
528 | |
529 void Deoptimizer::HandleWeakDeoptimizedCode(v8::Isolate* isolate, | |
530 v8::Persistent<v8::Value>* obj, | |
531 void* parameter) { | |
532 DeoptimizingCodeListNode* node = | |
533 reinterpret_cast<DeoptimizingCodeListNode*>(parameter); | |
534 DeoptimizerData* data = | |
535 reinterpret_cast<Isolate*>(isolate)->deoptimizer_data(); | |
536 data->RemoveDeoptimizingCode(*node->code()); | |
537 #ifdef DEBUG | |
538 for (DeoptimizingCodeListNode* current = data->deoptimizing_code_list_; | |
539 current != NULL; | |
540 current = current->next()) { | |
541 ASSERT(current != node); | |
542 } | |
543 #endif | |
544 } | |
545 | |
546 | |
547 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { | 482 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { |
548 deoptimizer->DoComputeOutputFrames(); | 483 deoptimizer->DoComputeOutputFrames(); |
549 } | 484 } |
550 | 485 |
551 | 486 |
552 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, | 487 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, |
553 StackFrame::Type frame_type) { | 488 StackFrame::Type frame_type) { |
554 switch (deopt_type) { | 489 switch (deopt_type) { |
555 case EAGER: | 490 case EAGER: |
556 case SOFT: | 491 case SOFT: |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
640 input_->SetFrameType(frame_type); | 575 input_->SetFrameType(frame_type); |
641 } | 576 } |
642 | 577 |
643 | 578 |
644 Code* Deoptimizer::FindOptimizedCode(JSFunction* function, | 579 Code* Deoptimizer::FindOptimizedCode(JSFunction* function, |
645 Code* optimized_code) { | 580 Code* optimized_code) { |
646 switch (bailout_type_) { | 581 switch (bailout_type_) { |
647 case Deoptimizer::SOFT: | 582 case Deoptimizer::SOFT: |
648 case Deoptimizer::EAGER: | 583 case Deoptimizer::EAGER: |
649 case Deoptimizer::LAZY: { | 584 case Deoptimizer::LAZY: { |
650 Code* compiled_code = | 585 Code* compiled_code = FindDeoptimizingCode(from_); |
651 isolate_->deoptimizer_data()->FindDeoptimizingCode(from_); | |
652 return (compiled_code == NULL) | 586 return (compiled_code == NULL) |
653 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) | 587 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) |
654 : compiled_code; | 588 : compiled_code; |
655 } | 589 } |
656 case Deoptimizer::OSR: { | 590 case Deoptimizer::OSR: { |
657 // The function has already been optimized and we're transitioning | 591 // The function has already been optimized and we're transitioning |
658 // from the unoptimized shared version to the optimized one in the | 592 // from the unoptimized shared version to the optimized one in the |
659 // function. The return address (from_) points to unoptimized code. | 593 // function. The return address (from_) points to unoptimized code. |
660 Code* compiled_code = function->code(); | 594 Code* compiled_code = function->code(); |
661 ASSERT(compiled_code->kind() == Code::OPTIMIZED_FUNCTION); | 595 ASSERT(compiled_code->kind() == Code::OPTIMIZED_FUNCTION); |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
758 shared->SourceCodePrint(&stream, -1); | 692 shared->SourceCodePrint(&stream, -1); |
759 PrintF("[source:\n%s\n]", *stream.ToCString()); | 693 PrintF("[source:\n%s\n]", *stream.ToCString()); |
760 | 694 |
761 FATAL("unable to find pc offset during deoptimization"); | 695 FATAL("unable to find pc offset during deoptimization"); |
762 return -1; | 696 return -1; |
763 } | 697 } |
764 | 698 |
765 | 699 |
766 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { | 700 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { |
767 int length = 0; | 701 int length = 0; |
768 DeoptimizingCodeListNode* node = | 702 // count all entries in the deoptimizing code list of every context. |
Michael Starzinger
2013/09/03 21:52:31
nit: Capitalize sentence.
titzer
2013/09/04 11:19:04
Done.
| |
769 isolate->deoptimizer_data()->deoptimizing_code_list_; | 703 Object* context = isolate->heap()->native_contexts_list(); |
770 while (node != NULL) { | 704 while (!context->IsUndefined()) { |
771 length++; | 705 Context* native_context = Context::cast(context); |
772 node = node->next(); | 706 Object* element = native_context->DeoptimizedCodeListHead(); |
707 while (!element->IsUndefined()) { | |
708 Code* code = Code::cast(element); | |
709 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | |
710 length++; | |
711 element = code->next_code_link(); | |
712 } | |
713 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
773 } | 714 } |
774 return length; | 715 return length; |
775 } | 716 } |
776 | 717 |
777 | 718 |
778 // We rely on this function not causing a GC. It is called from generated code | 719 // We rely on this function not causing a GC. It is called from generated code |
779 // without having a real stack frame in place. | 720 // without having a real stack frame in place. |
780 void Deoptimizer::DoComputeOutputFrames() { | 721 void Deoptimizer::DoComputeOutputFrames() { |
781 if (bailout_type_ == OSR) { | 722 if (bailout_type_ == OSR) { |
782 DoComputeOsrOutputFrame(); | 723 DoComputeOsrOutputFrame(); |
(...skipping 2313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3096 case CAPTURED_OBJECT: | 3037 case CAPTURED_OBJECT: |
3097 return "CAPTURED_OBJECT"; | 3038 return "CAPTURED_OBJECT"; |
3098 } | 3039 } |
3099 UNREACHABLE(); | 3040 UNREACHABLE(); |
3100 return ""; | 3041 return ""; |
3101 } | 3042 } |
3102 | 3043 |
3103 #endif | 3044 #endif |
3104 | 3045 |
3105 | 3046 |
3106 DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) { | |
3107 GlobalHandles* global_handles = code->GetIsolate()->global_handles(); | |
3108 // Globalize the code object and make it weak. | |
3109 code_ = Handle<Code>::cast(global_handles->Create(code)); | |
3110 global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()), | |
3111 this, | |
3112 Deoptimizer::HandleWeakDeoptimizedCode); | |
3113 } | |
3114 | |
3115 | |
3116 DeoptimizingCodeListNode::~DeoptimizingCodeListNode() { | |
3117 GlobalHandles* global_handles = code_->GetIsolate()->global_handles(); | |
3118 global_handles->Destroy(reinterpret_cast<Object**>(code_.location())); | |
3119 } | |
3120 | |
3121 | |
3122 // We can't intermix stack decoding and allocations because | 3047 // We can't intermix stack decoding and allocations because |
3123 // deoptimization infrastracture is not GC safe. | 3048 // deoptimization infrastracture is not GC safe. |
3124 // Thus we build a temporary structure in malloced space. | 3049 // Thus we build a temporary structure in malloced space. |
3125 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, | 3050 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, |
3126 DeoptimizationInputData* data, | 3051 DeoptimizationInputData* data, |
3127 JavaScriptFrame* frame) { | 3052 JavaScriptFrame* frame) { |
3128 Translation::Opcode opcode = | 3053 Translation::Opcode opcode = |
3129 static_cast<Translation::Opcode>(iterator->Next()); | 3054 static_cast<Translation::Opcode>(iterator->Next()); |
3130 | 3055 |
3131 switch (opcode) { | 3056 switch (opcode) { |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3309 | 3234 |
3310 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3235 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
3311 v->VisitPointer(BitCast<Object**>(&function_)); | 3236 v->VisitPointer(BitCast<Object**>(&function_)); |
3312 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3237 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
3313 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3238 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
3314 } | 3239 } |
3315 | 3240 |
3316 #endif // ENABLE_DEBUGGER_SUPPORT | 3241 #endif // ENABLE_DEBUGGER_SUPPORT |
3317 | 3242 |
3318 } } // namespace v8::internal | 3243 } } // namespace v8::internal |
OLD | NEW |