| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 616 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 627 for (int i = 0; i < output_count_; ++i) { | 627 for (int i = 0; i < output_count_; ++i) { |
| 628 if (output_[i] != input_) delete output_[i]; | 628 if (output_[i] != input_) delete output_[i]; |
| 629 } | 629 } |
| 630 delete[] output_; | 630 delete[] output_; |
| 631 input_ = NULL; | 631 input_ = NULL; |
| 632 output_ = NULL; | 632 output_ = NULL; |
| 633 ASSERT(!HEAP->allow_allocation(true)); | 633 ASSERT(!HEAP->allow_allocation(true)); |
| 634 } | 634 } |
| 635 | 635 |
| 636 | 636 |
| 637 Address Deoptimizer::GetDeoptimizationEntry(int id, | 637 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, |
| 638 int id, |
| 638 BailoutType type, | 639 BailoutType type, |
| 639 GetEntryMode mode) { | 640 GetEntryMode mode) { |
| 640 ASSERT(id >= 0); | 641 ASSERT(id >= 0); |
| 641 if (id >= kMaxNumberOfEntries) return NULL; | 642 if (id >= kMaxNumberOfEntries) return NULL; |
| 642 MemoryChunk* base = NULL; | 643 MemoryChunk* base = NULL; |
| 643 if (mode == ENSURE_ENTRY_CODE) { | 644 if (mode == ENSURE_ENTRY_CODE) { |
| 644 EnsureCodeForDeoptimizationEntry(type, id); | 645 EnsureCodeForDeoptimizationEntry(isolate, type, id); |
| 645 } else { | 646 } else { |
| 646 ASSERT(mode == CALCULATE_ENTRY_ADDRESS); | 647 ASSERT(mode == CALCULATE_ENTRY_ADDRESS); |
| 647 } | 648 } |
| 648 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 649 DeoptimizerData* data = isolate->deoptimizer_data(); |
| 649 if (type == EAGER) { | 650 if (type == EAGER) { |
| 650 base = data->eager_deoptimization_entry_code_; | 651 base = data->eager_deoptimization_entry_code_; |
| 651 } else { | 652 } else { |
| 652 base = data->lazy_deoptimization_entry_code_; | 653 base = data->lazy_deoptimization_entry_code_; |
| 653 } | 654 } |
| 654 return base->area_start() + (id * table_entry_size_); | 655 return base->area_start() + (id * table_entry_size_); |
| 655 } | 656 } |
| 656 | 657 |
| 657 | 658 |
| 658 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { | 659 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { |
| (...skipping 902 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1561 } | 1562 } |
| 1562 | 1563 |
| 1563 | 1564 |
| 1564 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) { | 1565 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) { |
| 1565 HeapNumberMaterializationDescriptor value_desc( | 1566 HeapNumberMaterializationDescriptor value_desc( |
| 1566 reinterpret_cast<Address>(slot_address), value); | 1567 reinterpret_cast<Address>(slot_address), value); |
| 1567 deferred_heap_numbers_.Add(value_desc); | 1568 deferred_heap_numbers_.Add(value_desc); |
| 1568 } | 1569 } |
| 1569 | 1570 |
| 1570 | 1571 |
| 1571 void Deoptimizer::EnsureCodeForDeoptimizationEntry(BailoutType type, | 1572 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, |
| 1573 BailoutType type, |
| 1572 int max_entry_id) { | 1574 int max_entry_id) { |
| 1573 // We cannot run this if the serializer is enabled because this will | 1575 // We cannot run this if the serializer is enabled because this will |
| 1574 // cause us to emit relocation information for the external | 1576 // cause us to emit relocation information for the external |
| 1575 // references. This is fine because the deoptimizer's code section | 1577 // references. This is fine because the deoptimizer's code section |
| 1576 // isn't meant to be serialized at all. | 1578 // isn't meant to be serialized at all. |
| 1577 ASSERT(type == EAGER || type == LAZY); | 1579 ASSERT(type == EAGER || type == LAZY); |
| 1578 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 1580 DeoptimizerData* data = isolate->deoptimizer_data(); |
| 1579 int entry_count = (type == EAGER) | 1581 int entry_count = (type == EAGER) |
| 1580 ? data->eager_deoptimization_entry_code_entries_ | 1582 ? data->eager_deoptimization_entry_code_entries_ |
| 1581 : data->lazy_deoptimization_entry_code_entries_; | 1583 : data->lazy_deoptimization_entry_code_entries_; |
| 1582 if (max_entry_id < entry_count) return; | 1584 if (max_entry_id < entry_count) return; |
| 1583 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); | 1585 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); |
| 1584 while (max_entry_id >= entry_count) entry_count *= 2; | 1586 while (max_entry_id >= entry_count) entry_count *= 2; |
| 1585 ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries); | 1587 ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries); |
| 1586 | 1588 |
| 1587 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); | 1589 MacroAssembler masm(isolate, NULL, 16 * KB); |
| 1588 masm.set_emit_debug_code(false); | 1590 masm.set_emit_debug_code(false); |
| 1589 GenerateDeoptimizationEntries(&masm, entry_count, type); | 1591 GenerateDeoptimizationEntries(&masm, entry_count, type); |
| 1590 CodeDesc desc; | 1592 CodeDesc desc; |
| 1591 masm.GetCode(&desc); | 1593 masm.GetCode(&desc); |
| 1592 ASSERT(!RelocInfo::RequiresRelocation(desc)); | 1594 ASSERT(!RelocInfo::RequiresRelocation(desc)); |
| 1593 | 1595 |
| 1594 MemoryChunk* chunk = type == EAGER | 1596 MemoryChunk* chunk = type == EAGER |
| 1595 ? data->eager_deoptimization_entry_code_ | 1597 ? data->eager_deoptimization_entry_code_ |
| 1596 : data->lazy_deoptimization_entry_code_; | 1598 : data->lazy_deoptimization_entry_code_; |
| 1597 ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= | 1599 ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2137 | 2139 |
| 2138 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 2140 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 2139 v->VisitPointer(BitCast<Object**>(&function_)); | 2141 v->VisitPointer(BitCast<Object**>(&function_)); |
| 2140 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 2142 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 2141 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 2143 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 2142 } | 2144 } |
| 2143 | 2145 |
| 2144 #endif // ENABLE_DEBUGGER_SUPPORT | 2146 #endif // ENABLE_DEBUGGER_SUPPORT |
| 2145 | 2147 |
| 2146 } } // namespace v8::internal | 2148 } } // namespace v8::internal |
| OLD | NEW |