OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
49 NOT_EXECUTABLE, | 49 NOT_EXECUTABLE, |
50 #else | 50 #else |
51 EXECUTABLE, | 51 EXECUTABLE, |
52 #endif | 52 #endif |
53 NULL); | 53 NULL); |
54 } | 54 } |
55 | 55 |
56 | 56 |
57 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) | 57 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) |
58 : allocator_(allocator), | 58 : allocator_(allocator), |
59 current_(NULL), | |
60 #ifdef ENABLE_DEBUGGER_SUPPORT | 59 #ifdef ENABLE_DEBUGGER_SUPPORT |
61 deoptimized_frame_info_(NULL), | 60 deoptimized_frame_info_(NULL), |
62 #endif | 61 #endif |
63 deoptimizing_code_list_(NULL) { | 62 current_(NULL) { |
64 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { | 63 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { |
65 deopt_entry_code_entries_[i] = -1; | 64 deopt_entry_code_entries_[i] = -1; |
66 deopt_entry_code_[i] = AllocateCodeChunk(allocator); | 65 deopt_entry_code_[i] = AllocateCodeChunk(allocator); |
67 } | 66 } |
68 } | 67 } |
69 | 68 |
70 | 69 |
71 DeoptimizerData::~DeoptimizerData() { | 70 DeoptimizerData::~DeoptimizerData() { |
72 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { | 71 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { |
73 allocator_->Free(deopt_entry_code_[i]); | 72 allocator_->Free(deopt_entry_code_[i]); |
74 deopt_entry_code_[i] = NULL; | 73 deopt_entry_code_[i] = NULL; |
75 } | 74 } |
76 | |
77 DeoptimizingCodeListNode* current = deoptimizing_code_list_; | |
78 while (current != NULL) { | |
79 DeoptimizingCodeListNode* prev = current; | |
80 current = current->next(); | |
81 delete prev; | |
82 } | |
83 deoptimizing_code_list_ = NULL; | |
84 } | 75 } |
85 | 76 |
86 | 77 |
87 #ifdef ENABLE_DEBUGGER_SUPPORT | 78 #ifdef ENABLE_DEBUGGER_SUPPORT |
88 void DeoptimizerData::Iterate(ObjectVisitor* v) { | 79 void DeoptimizerData::Iterate(ObjectVisitor* v) { |
89 if (deoptimized_frame_info_ != NULL) { | 80 if (deoptimized_frame_info_ != NULL) { |
90 deoptimized_frame_info_->Iterate(v); | 81 deoptimized_frame_info_->Iterate(v); |
91 } | 82 } |
92 } | 83 } |
93 #endif | 84 #endif |
94 | 85 |
95 | 86 |
96 Code* DeoptimizerData::FindDeoptimizingCode(Address addr) { | 87 Code* Deoptimizer::FindDeoptimizingCode(Address addr) { |
97 for (DeoptimizingCodeListNode* node = deoptimizing_code_list_; | 88 if (function_->IsHeapObject()) { |
98 node != NULL; | 89 // Search all deoptimizing code in the native context of the function. |
99 node = node->next()) { | 90 Context* native_context = function_->context()->native_context(); |
100 if (node->code()->contains(addr)) return *node->code(); | 91 Object* element = native_context->DeoptimizedCodeListHead(); |
| 92 while (!element->IsUndefined()) { |
| 93 Code* code = Code::cast(element); |
| 94 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 95 if (code->contains(addr)) return code; |
| 96 element = code->next_code_link(); |
| 97 } |
101 } | 98 } |
102 return NULL; | 99 return NULL; |
103 } | 100 } |
104 | 101 |
105 | 102 |
106 void DeoptimizerData::RemoveDeoptimizingCode(Code* code) { | |
107 for (DeoptimizingCodeListNode *prev = NULL, *cur = deoptimizing_code_list_; | |
108 cur != NULL; | |
109 prev = cur, cur = cur->next()) { | |
110 if (*cur->code() == code) { | |
111 if (prev == NULL) { | |
112 deoptimizing_code_list_ = cur->next(); | |
113 } else { | |
114 prev->set_next(cur->next()); | |
115 } | |
116 delete cur; | |
117 return; | |
118 } | |
119 } | |
120 // Deoptimizing code is removed through weak callback. Each object is expected | |
121 // to be removed once and only once. | |
122 UNREACHABLE(); | |
123 } | |
124 | |
125 | |
126 // We rely on this function not causing a GC. It is called from generated code | 103 // We rely on this function not causing a GC. It is called from generated code |
127 // without having a real stack frame in place. | 104 // without having a real stack frame in place. |
128 Deoptimizer* Deoptimizer::New(JSFunction* function, | 105 Deoptimizer* Deoptimizer::New(JSFunction* function, |
129 BailoutType type, | 106 BailoutType type, |
130 unsigned bailout_id, | 107 unsigned bailout_id, |
131 Address from, | 108 Address from, |
132 int fp_to_sp_delta, | 109 int fp_to_sp_delta, |
133 Isolate* isolate) { | 110 Isolate* isolate) { |
134 Deoptimizer* deoptimizer = new Deoptimizer(isolate, | 111 Deoptimizer* deoptimizer = new Deoptimizer(isolate, |
135 function, | 112 function, |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
282 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, | 259 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, |
283 int count, | 260 int count, |
284 BailoutType type) { | 261 BailoutType type) { |
285 TableEntryGenerator generator(masm, type, count); | 262 TableEntryGenerator generator(masm, type, count); |
286 generator.Generate(); | 263 generator.Generate(); |
287 } | 264 } |
288 | 265 |
289 | 266 |
290 void Deoptimizer::VisitAllOptimizedFunctionsForContext( | 267 void Deoptimizer::VisitAllOptimizedFunctionsForContext( |
291 Context* context, OptimizedFunctionVisitor* visitor) { | 268 Context* context, OptimizedFunctionVisitor* visitor) { |
292 Isolate* isolate = context->GetIsolate(); | |
293 Zone zone(isolate); | |
294 DisallowHeapAllocation no_allocation; | 269 DisallowHeapAllocation no_allocation; |
295 | 270 |
296 ASSERT(context->IsNativeContext()); | 271 ASSERT(context->IsNativeContext()); |
297 | 272 |
298 visitor->EnterContext(context); | 273 visitor->EnterContext(context); |
299 | 274 |
300 // Create a snapshot of the optimized functions list. This is needed because | 275 // Visit the list of optimized functions, removing elements that |
301 // visitors might remove more than one link from the list at once. | 276 // no longer refer to optimized code. |
302 ZoneList<JSFunction*> snapshot(1, &zone); | 277 JSFunction* prev = NULL; |
303 Object* element = context->OptimizedFunctionsListHead(); | 278 Object* element = context->OptimizedFunctionsListHead(); |
304 while (!element->IsUndefined()) { | 279 while (!element->IsUndefined()) { |
305 JSFunction* element_function = JSFunction::cast(element); | 280 JSFunction* function = JSFunction::cast(element); |
306 snapshot.Add(element_function, &zone); | 281 Object* next = function->next_function_link(); |
307 element = element_function->next_function_link(); | 282 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION || |
308 } | 283 (visitor->VisitFunction(function), |
309 | 284 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) { |
310 // Run through the snapshot of optimized functions and visit them. | 285 // The function no longer refers to optimized code, or the visitor |
311 for (int i = 0; i < snapshot.length(); ++i) { | 286 // changed the code to which it refers to no longer be optimized code. |
312 visitor->VisitFunction(snapshot.at(i)); | 287 // Remove the function from this list. |
| 288 if (prev != NULL) { |
| 289 prev->set_next_function_link(next); |
| 290 } else { |
| 291 context->SetOptimizedFunctionsListHead(next); |
| 292 } |
| 293 // The visitor should not alter the link directly. |
| 294 ASSERT(function->next_function_link() == next); |
| 295 // Set the next function link to undefined to indicate it is no longer |
| 296 // in the optimized functions list. |
| 297 function->set_next_function_link(context->GetHeap()->undefined_value()); |
| 298 } else { |
| 299 // The visitor should not alter the link directly. |
| 300 ASSERT(function->next_function_link() == next); |
| 301 // preserve this element. |
| 302 prev = function; |
| 303 } |
| 304 element = next; |
313 } | 305 } |
314 | 306 |
315 visitor->LeaveContext(context); | 307 visitor->LeaveContext(context); |
316 } | 308 } |
317 | 309 |
318 | 310 |
319 void Deoptimizer::VisitAllOptimizedFunctions( | 311 void Deoptimizer::VisitAllOptimizedFunctions( |
320 Isolate* isolate, | 312 Isolate* isolate, |
321 OptimizedFunctionVisitor* visitor) { | 313 OptimizedFunctionVisitor* visitor) { |
322 DisallowHeapAllocation no_allocation; | 314 DisallowHeapAllocation no_allocation; |
323 | 315 |
324 // Run through the list of all native contexts and deoptimize. | 316 // Run through the list of all native contexts. |
325 Object* context = isolate->heap()->native_contexts_list(); | 317 Object* context = isolate->heap()->native_contexts_list(); |
326 while (!context->IsUndefined()) { | 318 while (!context->IsUndefined()) { |
327 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); | 319 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); |
328 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 320 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
329 } | 321 } |
330 } | 322 } |
331 | 323 |
332 | 324 |
333 // Removes the functions selected by the given filter from the optimized | 325 // Unlink functions referring to code marked for deoptimization, then move |
334 // function list of the given context and adds their code to the list of | 326 // marked code from the optimized code list to the deoptimized code list, |
335 // code objects to be deoptimized. | 327 // and patch code for lazy deopt. |
336 static void SelectCodeToDeoptimize(Context* context, | 328 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) { |
337 OptimizedFunctionFilter* filter, | |
338 ZoneList<Code*>* codes, | |
339 Zone* zone, | |
340 Object* undefined) { | |
341 DisallowHeapAllocation no_allocation; | 329 DisallowHeapAllocation no_allocation; |
342 Object* current = context->get(Context::OPTIMIZED_FUNCTIONS_LIST); | |
343 Object* remainder_head = undefined; | |
344 Object* remainder_tail = undefined; | |
345 | 330 |
346 // TODO(titzer): rewrite to not modify unselected functions. | 331 // A "closure" that unlinks optimized code that is going to be |
347 while (current != undefined) { | 332 // deoptimized from the functions that refer to it. |
348 JSFunction* function = JSFunction::cast(current); | 333 class SelectedCodeUnlinker: public OptimizedFunctionVisitor { |
349 current = function->next_function_link(); | 334 public: |
350 if (filter->TakeFunction(function)) { | 335 virtual void EnterContext(Context* context) { } // Don't care. |
351 // Extract this function from the context's list and remember the code. | 336 virtual void LeaveContext(Context* context) { } // Don't care. |
| 337 virtual void VisitFunction(JSFunction* function) { |
352 Code* code = function->code(); | 338 Code* code = function->code(); |
353 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 339 if (!code->marked_for_deoptimization()) return; |
354 if (code->marked_for_deoptimization()) { | 340 |
355 ASSERT(codes->Contains(code)); | 341 // Unlink this function and evict from optimized code map. |
356 } else { | |
357 code->set_marked_for_deoptimization(true); | |
358 codes->Add(code, zone); | |
359 } | |
360 SharedFunctionInfo* shared = function->shared(); | 342 SharedFunctionInfo* shared = function->shared(); |
361 // Replace the function's code with the shared code. | |
362 function->set_code(shared->code()); | 343 function->set_code(shared->code()); |
363 // Evict the code from the optimized code map. | |
364 shared->EvictFromOptimizedCodeMap(code, "deoptimized function"); | 344 shared->EvictFromOptimizedCodeMap(code, "deoptimized function"); |
365 // Remove the function from the optimized functions list. | |
366 function->set_next_function_link(undefined); | |
367 | 345 |
368 if (FLAG_trace_deopt) { | 346 if (FLAG_trace_deopt) { |
369 PrintF("[forced deoptimization: "); | 347 PrintF("[deoptimizer unlinked: "); |
370 function->PrintName(); | 348 function->PrintName(); |
371 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 349 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
372 } | 350 } |
| 351 } |
| 352 }; |
| 353 |
| 354 // Unlink all functions that refer to marked code. |
| 355 SelectedCodeUnlinker unlinker; |
| 356 VisitAllOptimizedFunctionsForContext(context, &unlinker); |
| 357 |
| 358 // Move marked code from the optimized code list to the deoptimized |
| 359 // code list, collecting them into a ZoneList. |
| 360 Isolate* isolate = context->GetHeap()->isolate(); |
| 361 Zone zone(isolate); |
| 362 ZoneList<Code*> codes(10, &zone); |
| 363 |
| 364 // Walk over all optimized code objects in this native context. |
| 365 Code* prev = NULL; |
| 366 Object* element = context->OptimizedCodeListHead(); |
| 367 while (!element->IsUndefined()) { |
| 368 Code* code = Code::cast(element); |
| 369 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 370 Object* next = code->next_code_link(); |
| 371 if (code->marked_for_deoptimization()) { |
| 372 // Put the code into the list for later patching. |
| 373 codes.Add(code, &zone); |
| 374 |
| 375 if (prev != NULL) { |
| 376 // Skip this code in the optimized code list. |
| 377 prev->set_next_code_link(next); |
| 378 } else { |
| 379 // There was no previous node, the next node is the new head. |
| 380 context->SetOptimizedCodeListHead(next); |
| 381 } |
| 382 |
| 383 // Move the code to the _deoptimized_ code list. |
| 384 code->set_next_code_link(context->DeoptimizedCodeListHead()); |
| 385 context->SetDeoptimizedCodeListHead(code); |
373 } else { | 386 } else { |
374 // Don't select this function; link it back into the list. | 387 // Not marked; preserve this element. |
375 if (remainder_head == undefined) { | 388 prev = code; |
376 remainder_head = function; | |
377 } else { | |
378 JSFunction::cast(remainder_tail)->set_next_function_link(function); | |
379 } | |
380 remainder_tail = function; | |
381 } | 389 } |
| 390 element = next; |
382 } | 391 } |
383 if (remainder_tail != undefined) { | 392 |
384 JSFunction::cast(remainder_tail)->set_next_function_link(undefined); | 393 // TODO(titzer): we need a handle scope only because of the macro assembler, |
| 394 // which is only used in EnsureCodeForDeoptimizationEntry. |
| 395 HandleScope scope(isolate); |
| 396 // Now patch all the codes for deoptimization. |
| 397 for (int i = 0; i < codes.length(); i++) { |
| 398 // It is finally time to die, code object. |
| 399 // Do platform-specific patching to force any activations to lazy deopt. |
| 400 PatchCodeForDeoptimization(isolate, codes[i]); |
| 401 |
| 402 // We might be in the middle of incremental marking with compaction. |
| 403 // Tell collector to treat this code object in a special way and |
| 404 // ignore all slots that might have been recorded on it. |
| 405 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]); |
385 } | 406 } |
386 context->set(Context::OPTIMIZED_FUNCTIONS_LIST, remainder_head); | |
387 } | 407 } |
388 | 408 |
389 | 409 |
390 class DeoptimizeAllFilter : public OptimizedFunctionFilter { | 410 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { |
391 public: | 411 if (FLAG_trace_deopt) { |
392 virtual bool TakeFunction(JSFunction* function) { | 412 PrintF("[deoptimize all code in all contexts]\n"); |
393 return true; | |
394 } | 413 } |
395 }; | 414 DisallowHeapAllocation no_allocation; |
| 415 // For all contexts, mark all code, then deoptimize. |
| 416 Object* context = isolate->heap()->native_contexts_list(); |
| 417 while (!context->IsUndefined()) { |
| 418 Context* native_context = Context::cast(context); |
| 419 MarkAllCodeForContext(native_context); |
| 420 DeoptimizeMarkedCodeForContext(native_context); |
| 421 context = native_context->get(Context::NEXT_CONTEXT_LINK); |
| 422 } |
| 423 } |
396 | 424 |
397 | 425 |
398 class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter { | 426 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) { |
399 public: | 427 if (FLAG_trace_deopt) { |
400 explicit DeoptimizeWithMatchingCodeFilter(Code* code) : code_(code) {} | 428 PrintF("[deoptimize marked code in all contexts]\n"); |
401 virtual bool TakeFunction(JSFunction* function) { | |
402 return function->code() == code_; | |
403 } | 429 } |
404 private: | 430 DisallowHeapAllocation no_allocation; |
405 Code* code_; | 431 // For all contexts, deoptimize code already marked. |
406 }; | 432 Object* context = isolate->heap()->native_contexts_list(); |
407 | 433 while (!context->IsUndefined()) { |
408 | 434 Context* native_context = Context::cast(context); |
409 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter { | 435 DeoptimizeMarkedCodeForContext(native_context); |
410 public: | 436 context = native_context->get(Context::NEXT_CONTEXT_LINK); |
411 virtual bool TakeFunction(JSFunction* function) { | |
412 return function->code()->marked_for_deoptimization(); | |
413 } | 437 } |
414 }; | |
415 | |
416 | |
417 void Deoptimizer::DeoptimizeAll(Isolate* isolate) { | |
418 DisallowHeapAllocation no_allocation; | |
419 | |
420 if (FLAG_trace_deopt) { | |
421 PrintF("[deoptimize all contexts]\n"); | |
422 } | |
423 | |
424 DeoptimizeAllFilter filter; | |
425 DeoptimizeAllFunctionsWith(isolate, &filter); | |
426 } | 438 } |
427 | 439 |
428 | 440 |
429 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { | 441 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) { |
430 DisallowHeapAllocation no_allocation; | 442 if (FLAG_trace_deopt) { |
431 DeoptimizeAllFilter filter; | 443 PrintF("[deoptimize global object @ 0x%08" V8PRIxPTR "]\n", |
| 444 reinterpret_cast<intptr_t>(object)); |
| 445 } |
432 if (object->IsJSGlobalProxy()) { | 446 if (object->IsJSGlobalProxy()) { |
433 Object* proto = object->GetPrototype(); | 447 Object* proto = object->GetPrototype(); |
434 ASSERT(proto->IsJSGlobalObject()); | 448 ASSERT(proto->IsJSGlobalObject()); |
435 DeoptimizeAllFunctionsForContext( | 449 Context* native_context = GlobalObject::cast(proto)->native_context(); |
436 GlobalObject::cast(proto)->native_context(), &filter); | 450 MarkAllCodeForContext(native_context); |
| 451 DeoptimizeMarkedCodeForContext(native_context); |
437 } else if (object->IsGlobalObject()) { | 452 } else if (object->IsGlobalObject()) { |
438 DeoptimizeAllFunctionsForContext( | 453 Context* native_context = GlobalObject::cast(object)->native_context(); |
439 GlobalObject::cast(object)->native_context(), &filter); | 454 MarkAllCodeForContext(native_context); |
| 455 DeoptimizeMarkedCodeForContext(native_context); |
| 456 } |
| 457 } |
| 458 |
| 459 |
| 460 void Deoptimizer::MarkAllCodeForContext(Context* context) { |
| 461 Object* element = context->OptimizedCodeListHead(); |
| 462 while (!element->IsUndefined()) { |
| 463 Code* code = Code::cast(element); |
| 464 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 465 code->set_marked_for_deoptimization(true); |
| 466 element = code->next_code_link(); |
440 } | 467 } |
441 } | 468 } |
442 | 469 |
443 | 470 |
444 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { | 471 void Deoptimizer::DeoptimizeFunction(JSFunction* function) { |
445 Code* code = function->code(); | 472 Code* code = function->code(); |
446 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; | 473 if (code->kind() == Code::OPTIMIZED_FUNCTION) { |
447 DeoptimizeWithMatchingCodeFilter filter(code); | 474 // Mark the code for deoptimization and unlink any functions that also |
448 DeoptimizeAllFunctionsForContext( | 475 // refer to that code. The code cannot be shared across native contexts, |
449 function->context()->native_context(), &filter); | 476 // so we only need to search one. |
450 } | 477 code->set_marked_for_deoptimization(true); |
451 | 478 DeoptimizeMarkedCodeForContext(function->context()->native_context()); |
452 | |
453 void Deoptimizer::DeoptimizeAllFunctionsForContext( | |
454 Context* context, OptimizedFunctionFilter* filter) { | |
455 ASSERT(context->IsNativeContext()); | |
456 Isolate* isolate = context->GetIsolate(); | |
457 Object* undefined = isolate->heap()->undefined_value(); | |
458 Zone zone(isolate); | |
459 ZoneList<Code*> codes(4, &zone); | |
460 SelectCodeToDeoptimize(context, filter, &codes, &zone, undefined); | |
461 for (int i = 0; i < codes.length(); i++) { | |
462 DeoptimizeCode(isolate, codes.at(i)); | |
463 } | 479 } |
464 } | 480 } |
465 | 481 |
466 | 482 |
467 void Deoptimizer::DeoptimizeAllFunctionsWith(Isolate* isolate, | |
468 OptimizedFunctionFilter* filter) { | |
469 DisallowHeapAllocation no_allocation; | |
470 | |
471 // Run through the list of all native contexts and deoptimize. | |
472 Object* context = isolate->heap()->native_contexts_list(); | |
473 while (!context->IsUndefined()) { | |
474 DeoptimizeAllFunctionsForContext(Context::cast(context), filter); | |
475 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
476 } | |
477 } | |
478 | |
479 | |
480 void Deoptimizer::DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes) { | |
481 if (codes->length() == 0) return; // Nothing to do. | |
482 | |
483 // Mark the code; any functions refering to this code will be selected. | |
484 for (int i = 0; i < codes->length(); i++) { | |
485 ASSERT(!codes->at(i)->marked_for_deoptimization()); | |
486 codes->at(i)->set_marked_for_deoptimization(true); | |
487 } | |
488 | |
489 // For all contexts, remove optimized functions that refer to the selected | |
490 // code from the optimized function lists. | |
491 Object* undefined = isolate->heap()->undefined_value(); | |
492 Zone zone(isolate); | |
493 Object* list = isolate->heap()->native_contexts_list(); | |
494 DeoptimizeMarkedCodeFilter filter; | |
495 while (!list->IsUndefined()) { | |
496 Context* context = Context::cast(list); | |
497 // Note that selecting code unlinks the functions that refer to it. | |
498 SelectCodeToDeoptimize(context, &filter, codes, &zone, undefined); | |
499 list = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
500 } | |
501 | |
502 // Now deoptimize all the code. | |
503 for (int i = 0; i < codes->length(); i++) { | |
504 DeoptimizeCode(isolate, codes->at(i)); | |
505 } | |
506 } | |
507 | |
508 | |
509 void Deoptimizer::DeoptimizeCode(Isolate* isolate, Code* code) { | |
510 HandleScope scope(isolate); | |
511 DisallowHeapAllocation nha; | |
512 | |
513 // Do platform-specific patching of the optimized code. | |
514 PatchCodeForDeoptimization(isolate, code); | |
515 | |
516 // Add the deoptimizing code to the list. | |
517 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | |
518 DeoptimizerData* data = isolate->deoptimizer_data(); | |
519 node->set_next(data->deoptimizing_code_list_); | |
520 data->deoptimizing_code_list_ = node; | |
521 | |
522 // We might be in the middle of incremental marking with compaction. | |
523 // Tell collector to treat this code object in a special way and | |
524 // ignore all slots that might have been recorded on it. | |
525 isolate->heap()->mark_compact_collector()->InvalidateCode(code); | |
526 } | |
527 | |
528 | |
529 void Deoptimizer::HandleWeakDeoptimizedCode(v8::Isolate* isolate, | |
530 v8::Persistent<v8::Value>* obj, | |
531 void* parameter) { | |
532 DeoptimizingCodeListNode* node = | |
533 reinterpret_cast<DeoptimizingCodeListNode*>(parameter); | |
534 DeoptimizerData* data = | |
535 reinterpret_cast<Isolate*>(isolate)->deoptimizer_data(); | |
536 data->RemoveDeoptimizingCode(*node->code()); | |
537 #ifdef DEBUG | |
538 for (DeoptimizingCodeListNode* current = data->deoptimizing_code_list_; | |
539 current != NULL; | |
540 current = current->next()) { | |
541 ASSERT(current != node); | |
542 } | |
543 #endif | |
544 } | |
545 | |
546 | |
547 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { | 483 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) { |
548 deoptimizer->DoComputeOutputFrames(); | 484 deoptimizer->DoComputeOutputFrames(); |
549 } | 485 } |
550 | 486 |
551 | 487 |
552 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, | 488 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, |
553 StackFrame::Type frame_type) { | 489 StackFrame::Type frame_type) { |
554 switch (deopt_type) { | 490 switch (deopt_type) { |
555 case EAGER: | 491 case EAGER: |
556 case SOFT: | 492 case SOFT: |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
640 input_->SetFrameType(frame_type); | 576 input_->SetFrameType(frame_type); |
641 } | 577 } |
642 | 578 |
643 | 579 |
644 Code* Deoptimizer::FindOptimizedCode(JSFunction* function, | 580 Code* Deoptimizer::FindOptimizedCode(JSFunction* function, |
645 Code* optimized_code) { | 581 Code* optimized_code) { |
646 switch (bailout_type_) { | 582 switch (bailout_type_) { |
647 case Deoptimizer::SOFT: | 583 case Deoptimizer::SOFT: |
648 case Deoptimizer::EAGER: | 584 case Deoptimizer::EAGER: |
649 case Deoptimizer::LAZY: { | 585 case Deoptimizer::LAZY: { |
650 Code* compiled_code = | 586 Code* compiled_code = FindDeoptimizingCode(from_); |
651 isolate_->deoptimizer_data()->FindDeoptimizingCode(from_); | |
652 return (compiled_code == NULL) | 587 return (compiled_code == NULL) |
653 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) | 588 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) |
654 : compiled_code; | 589 : compiled_code; |
655 } | 590 } |
656 case Deoptimizer::OSR: { | 591 case Deoptimizer::OSR: { |
657 // The function has already been optimized and we're transitioning | 592 // The function has already been optimized and we're transitioning |
658 // from the unoptimized shared version to the optimized one in the | 593 // from the unoptimized shared version to the optimized one in the |
659 // function. The return address (from_) points to unoptimized code. | 594 // function. The return address (from_) points to unoptimized code. |
660 Code* compiled_code = function->code(); | 595 Code* compiled_code = function->code(); |
661 ASSERT(compiled_code->kind() == Code::OPTIMIZED_FUNCTION); | 596 ASSERT(compiled_code->kind() == Code::OPTIMIZED_FUNCTION); |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
758 shared->SourceCodePrint(&stream, -1); | 693 shared->SourceCodePrint(&stream, -1); |
759 PrintF("[source:\n%s\n]", *stream.ToCString()); | 694 PrintF("[source:\n%s\n]", *stream.ToCString()); |
760 | 695 |
761 FATAL("unable to find pc offset during deoptimization"); | 696 FATAL("unable to find pc offset during deoptimization"); |
762 return -1; | 697 return -1; |
763 } | 698 } |
764 | 699 |
765 | 700 |
766 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { | 701 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { |
767 int length = 0; | 702 int length = 0; |
768 DeoptimizingCodeListNode* node = | 703 // Count all entries in the deoptimizing code list of every context. |
769 isolate->deoptimizer_data()->deoptimizing_code_list_; | 704 Object* context = isolate->heap()->native_contexts_list(); |
770 while (node != NULL) { | 705 while (!context->IsUndefined()) { |
771 length++; | 706 Context* native_context = Context::cast(context); |
772 node = node->next(); | 707 Object* element = native_context->DeoptimizedCodeListHead(); |
| 708 while (!element->IsUndefined()) { |
| 709 Code* code = Code::cast(element); |
| 710 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 711 length++; |
| 712 element = code->next_code_link(); |
| 713 } |
| 714 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
773 } | 715 } |
774 return length; | 716 return length; |
775 } | 717 } |
776 | 718 |
777 | 719 |
778 // We rely on this function not causing a GC. It is called from generated code | 720 // We rely on this function not causing a GC. It is called from generated code |
779 // without having a real stack frame in place. | 721 // without having a real stack frame in place. |
780 void Deoptimizer::DoComputeOutputFrames() { | 722 void Deoptimizer::DoComputeOutputFrames() { |
781 if (bailout_type_ == OSR) { | 723 if (bailout_type_ == OSR) { |
782 DoComputeOsrOutputFrame(); | 724 DoComputeOsrOutputFrame(); |
(...skipping 2331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3114 case CAPTURED_OBJECT: | 3056 case CAPTURED_OBJECT: |
3115 return "CAPTURED_OBJECT"; | 3057 return "CAPTURED_OBJECT"; |
3116 } | 3058 } |
3117 UNREACHABLE(); | 3059 UNREACHABLE(); |
3118 return ""; | 3060 return ""; |
3119 } | 3061 } |
3120 | 3062 |
3121 #endif | 3063 #endif |
3122 | 3064 |
3123 | 3065 |
3124 DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) { | |
3125 GlobalHandles* global_handles = code->GetIsolate()->global_handles(); | |
3126 // Globalize the code object and make it weak. | |
3127 code_ = Handle<Code>::cast(global_handles->Create(code)); | |
3128 global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()), | |
3129 this, | |
3130 Deoptimizer::HandleWeakDeoptimizedCode); | |
3131 } | |
3132 | |
3133 | |
3134 DeoptimizingCodeListNode::~DeoptimizingCodeListNode() { | |
3135 GlobalHandles* global_handles = code_->GetIsolate()->global_handles(); | |
3136 global_handles->Destroy(reinterpret_cast<Object**>(code_.location())); | |
3137 } | |
3138 | |
3139 | |
3140 // We can't intermix stack decoding and allocations because | 3066 // We can't intermix stack decoding and allocations because |
3141 // deoptimization infrastracture is not GC safe. | 3067 // deoptimization infrastracture is not GC safe. |
3142 // Thus we build a temporary structure in malloced space. | 3068 // Thus we build a temporary structure in malloced space. |
3143 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, | 3069 SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, |
3144 DeoptimizationInputData* data, | 3070 DeoptimizationInputData* data, |
3145 JavaScriptFrame* frame) { | 3071 JavaScriptFrame* frame) { |
3146 Translation::Opcode opcode = | 3072 Translation::Opcode opcode = |
3147 static_cast<Translation::Opcode>(iterator->Next()); | 3073 static_cast<Translation::Opcode>(iterator->Next()); |
3148 | 3074 |
3149 switch (opcode) { | 3075 switch (opcode) { |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3327 | 3253 |
3328 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3254 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
3329 v->VisitPointer(BitCast<Object**>(&function_)); | 3255 v->VisitPointer(BitCast<Object**>(&function_)); |
3330 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3256 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
3331 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3257 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
3332 } | 3258 } |
3333 | 3259 |
3334 #endif // ENABLE_DEBUGGER_SUPPORT | 3260 #endif // ENABLE_DEBUGGER_SUPPORT |
3335 | 3261 |
3336 } } // namespace v8::internal | 3262 } } // namespace v8::internal |
OLD | NEW |