| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 322 while (true) { | 322 while (true) { |
| 323 SafeJavaScriptFrameIterator::Advance(); | 323 SafeJavaScriptFrameIterator::Advance(); |
| 324 if (done()) return; | 324 if (done()) return; |
| 325 if (frame()->is_java_script()) return; | 325 if (frame()->is_java_script()) return; |
| 326 } | 326 } |
| 327 } | 327 } |
| 328 #endif | 328 #endif |
| 329 | 329 |
| 330 | 330 |
| 331 Code* StackFrame::GetSafepointData(Address pc, | 331 Code* StackFrame::GetSafepointData(Address pc, |
| 332 uint8_t** safepoint_entry, | 332 SafepointEntry* safepoint_entry, |
| 333 unsigned* stack_slots) { | 333 unsigned* stack_slots) { |
| 334 PcToCodeCache::PcToCodeCacheEntry* entry = PcToCodeCache::GetCacheEntry(pc); | 334 PcToCodeCache::PcToCodeCacheEntry* entry = PcToCodeCache::GetCacheEntry(pc); |
| 335 uint8_t* cached_safepoint_entry = entry->safepoint_entry; | 335 SafepointEntry cached_safepoint_entry = entry->safepoint_entry; |
| 336 if (cached_safepoint_entry == NULL) { | 336 if (!entry->safepoint_entry.is_valid()) { |
| 337 cached_safepoint_entry = entry->code->GetSafepointEntry(pc); | 337 entry->safepoint_entry = entry->code->GetSafepointEntry(pc); |
| 338 ASSERT(cached_safepoint_entry != NULL); // No safepoint found. | 338 ASSERT(entry->safepoint_entry.is_valid()); |
| 339 entry->safepoint_entry = cached_safepoint_entry; | |
| 340 } else { | 339 } else { |
| 341 ASSERT(cached_safepoint_entry == entry->code->GetSafepointEntry(pc)); | 340 ASSERT(entry->safepoint_entry.Equals(entry->code->GetSafepointEntry(pc))); |
| 342 } | 341 } |
| 343 | 342 |
| 344 // Fill in the results and return the code. | 343 // Fill in the results and return the code. |
| 345 Code* code = entry->code; | 344 Code* code = entry->code; |
| 346 *safepoint_entry = cached_safepoint_entry; | 345 *safepoint_entry = entry->safepoint_entry; |
| 347 *stack_slots = code->stack_slots(); | 346 *stack_slots = code->stack_slots(); |
| 348 return code; | 347 return code; |
| 349 } | 348 } |
| 350 | 349 |
| 351 | 350 |
| 352 bool StackFrame::HasHandler() const { | 351 bool StackFrame::HasHandler() const { |
| 353 StackHandlerIterator it(this, top_handler()); | 352 StackHandlerIterator it(this, top_handler()); |
| 354 return !it.done(); | 353 return !it.done(); |
| 355 } | 354 } |
| 356 | 355 |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 529 StackHandlerIterator it(this, top_handler()); | 528 StackHandlerIterator it(this, top_handler()); |
| 530 ASSERT(it.done()); | 529 ASSERT(it.done()); |
| 531 #endif | 530 #endif |
| 532 | 531 |
| 533 // Make sure that we're not doing "safe" stack frame iteration. We cannot | 532 // Make sure that we're not doing "safe" stack frame iteration. We cannot |
| 534 // possibly find pointers in optimized frames in that state. | 533 // possibly find pointers in optimized frames in that state. |
| 535 ASSERT(!SafeStackFrameIterator::is_active()); | 534 ASSERT(!SafeStackFrameIterator::is_active()); |
| 536 | 535 |
| 537 // Compute the safepoint information. | 536 // Compute the safepoint information. |
| 538 unsigned stack_slots = 0; | 537 unsigned stack_slots = 0; |
| 539 uint8_t* safepoint_entry = NULL; | 538 SafepointEntry safepoint_entry; |
| 540 Code* code = StackFrame::GetSafepointData( | 539 Code* code = StackFrame::GetSafepointData( |
| 541 pc(), &safepoint_entry, &stack_slots); | 540 pc(), &safepoint_entry, &stack_slots); |
| 542 unsigned slot_space = stack_slots * kPointerSize; | 541 unsigned slot_space = stack_slots * kPointerSize; |
| 543 | 542 |
| 544 // Visit the outgoing parameters. This is usually dealt with by the | 543 // Visit the outgoing parameters. This is usually dealt with by the |
| 545 // callee, but while GC'ing we artificially lower the number of | 544 // callee, but while GC'ing we artificially lower the number of |
| 546 // arguments to zero and let the caller deal with it. | 545 // arguments to zero and let the caller deal with it. |
| 547 Object** parameters_base = &Memory::Object_at(sp()); | 546 Object** parameters_base = &Memory::Object_at(sp()); |
| 548 Object** parameters_limit = &Memory::Object_at( | 547 Object** parameters_limit = &Memory::Object_at( |
| 549 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space); | 548 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space); |
| 550 | 549 |
| 550 // Visit the parameters that may be on top of the saved registers. |
| 551 if (safepoint_entry.argument_count() > 0) { |
| 552 v->VisitPointers(parameters_base, |
| 553 parameters_base + safepoint_entry.argument_count()); |
| 554 parameters_base += safepoint_entry.argument_count(); |
| 555 } |
| 556 |
| 557 // Skip saved double registers. |
| 558 if (safepoint_entry.has_doubles()) { |
| 559 parameters_base += DoubleRegister::kNumAllocatableRegisters * |
| 560 kDoubleSize / kPointerSize; |
| 561 } |
| 562 |
| 551 // Visit the registers that contain pointers if any. | 563 // Visit the registers that contain pointers if any. |
| 552 if (SafepointTable::HasRegisters(safepoint_entry)) { | 564 if (safepoint_entry.HasRegisters()) { |
| 553 for (int i = kNumSafepointRegisters - 1; i >=0; i--) { | 565 for (int i = kNumSafepointRegisters - 1; i >=0; i--) { |
| 554 if (SafepointTable::HasRegisterAt(safepoint_entry, i)) { | 566 if (safepoint_entry.HasRegisterAt(i)) { |
| 555 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i); | 567 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i); |
| 556 v->VisitPointer(parameters_base + reg_stack_index); | 568 v->VisitPointer(parameters_base + reg_stack_index); |
| 557 } | 569 } |
| 558 } | 570 } |
| 559 // Skip the words containing the register values. | 571 // Skip the words containing the register values. |
| 560 parameters_base += kNumSafepointRegisters; | 572 parameters_base += kNumSafepointRegisters; |
| 561 } | 573 } |
| 562 | 574 |
| 563 // We're done dealing with the register bits. | 575 // We're done dealing with the register bits. |
| 564 safepoint_entry += kNumSafepointRegisters >> kBitsPerByteLog2; | 576 uint8_t* safepoint_bits = safepoint_entry.bits(); |
| 577 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2; |
| 565 | 578 |
| 566 // Visit the rest of the parameters. | 579 // Visit the rest of the parameters. |
| 567 v->VisitPointers(parameters_base, parameters_limit); | 580 v->VisitPointers(parameters_base, parameters_limit); |
| 568 | 581 |
| 569 // Visit pointer spill slots and locals. | 582 // Visit pointer spill slots and locals. |
| 570 for (unsigned index = 0; index < stack_slots; index++) { | 583 for (unsigned index = 0; index < stack_slots; index++) { |
| 571 int byte_index = index >> kBitsPerByteLog2; | 584 int byte_index = index >> kBitsPerByteLog2; |
| 572 int bit_index = index & (kBitsPerByte - 1); | 585 int bit_index = index & (kBitsPerByte - 1); |
| 573 if ((safepoint_entry[byte_index] & (1U << bit_index)) != 0) { | 586 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) { |
| 574 v->VisitPointer(parameters_limit + index); | 587 v->VisitPointer(parameters_limit + index); |
| 575 } | 588 } |
| 576 } | 589 } |
| 577 | 590 |
| 578 // Visit the context and the function. | 591 // Visit the context and the function. |
| 579 Object** fixed_base = &Memory::Object_at( | 592 Object** fixed_base = &Memory::Object_at( |
| 580 fp() + JavaScriptFrameConstants::kFunctionOffset); | 593 fp() + JavaScriptFrameConstants::kFunctionOffset); |
| 581 Object** fixed_limit = &Memory::Object_at(fp()); | 594 Object** fixed_limit = &Memory::Object_at(fp()); |
| 582 v->VisitPointers(fixed_base, fixed_limit); | 595 v->VisitPointers(fixed_base, fixed_limit); |
| 583 | 596 |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 675 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT"); | 688 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT"); |
| 676 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT"); | 689 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT"); |
| 677 PrintF("\npc: %d\n", offset_); | 690 PrintF("\npc: %d\n", offset_); |
| 678 } | 691 } |
| 679 | 692 |
| 680 | 693 |
| 681 void OptimizedFrame::Summarize(List<FrameSummary>* frames) { | 694 void OptimizedFrame::Summarize(List<FrameSummary>* frames) { |
| 682 ASSERT(frames->length() == 0); | 695 ASSERT(frames->length() == 0); |
| 683 ASSERT(is_optimized()); | 696 ASSERT(is_optimized()); |
| 684 | 697 |
| 685 int deopt_index = AstNode::kNoNumber; | 698 int deopt_index = Safepoint::kNoDeoptimizationIndex; |
| 686 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); | 699 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); |
| 687 | 700 |
| 688 // BUG(3243555): Since we don't have a lazy-deopt registered at | 701 // BUG(3243555): Since we don't have a lazy-deopt registered at |
| 689 // throw-statements, we can't use the translation at the call-site of | 702 // throw-statements, we can't use the translation at the call-site of |
| 690 // throw. An entry with no deoptimization index indicates a call-site | 703 // throw. An entry with no deoptimization index indicates a call-site |
| 691 // without a lazy-deopt. As a consequence we are not allowed to inline | 704 // without a lazy-deopt. As a consequence we are not allowed to inline |
| 692 // functions containing throw. | 705 // functions containing throw. |
| 693 if (deopt_index == Safepoint::kNoDeoptimizationIndex) { | 706 if (deopt_index == Safepoint::kNoDeoptimizationIndex) { |
| 694 JavaScriptFrame::Summarize(frames); | 707 JavaScriptFrame::Summarize(frames); |
| 695 return; | 708 return; |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 771 | 784 |
| 772 // The code object may have been replaced by lazy deoptimization. Fall | 785 // The code object may have been replaced by lazy deoptimization. Fall |
| 773 // back to a slow search in this case to find the original optimized | 786 // back to a slow search in this case to find the original optimized |
| 774 // code object. | 787 // code object. |
| 775 if (!code->contains(pc())) { | 788 if (!code->contains(pc())) { |
| 776 code = PcToCodeCache::GcSafeFindCodeForPc(pc()); | 789 code = PcToCodeCache::GcSafeFindCodeForPc(pc()); |
| 777 } | 790 } |
| 778 ASSERT(code != NULL); | 791 ASSERT(code != NULL); |
| 779 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 792 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 780 | 793 |
| 781 SafepointTable table(code); | 794 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc()); |
| 782 unsigned pc_offset = static_cast<unsigned>(pc() - code->instruction_start()); | 795 *deopt_index = safepoint_entry.deoptimization_index(); |
| 783 for (unsigned i = 0; i < table.length(); i++) { | 796 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex); |
| 784 if (table.GetPcOffset(i) == pc_offset) { | |
| 785 *deopt_index = table.GetDeoptimizationIndex(i); | |
| 786 break; | |
| 787 } | |
| 788 } | |
| 789 ASSERT(*deopt_index != AstNode::kNoNumber); | |
| 790 | 797 |
| 791 return DeoptimizationInputData::cast(code->deoptimization_data()); | 798 return DeoptimizationInputData::cast(code->deoptimization_data()); |
| 792 } | 799 } |
| 793 | 800 |
| 794 | 801 |
| 795 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) { | 802 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) { |
| 796 ASSERT(functions->length() == 0); | 803 ASSERT(functions->length() == 0); |
| 797 ASSERT(is_optimized()); | 804 ASSERT(is_optimized()); |
| 798 | 805 |
| 799 int deopt_index = AstNode::kNoNumber; | 806 int deopt_index = Safepoint::kNoDeoptimizationIndex; |
| 800 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); | 807 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index); |
| 801 | 808 |
| 802 TranslationIterator it(data->TranslationByteArray(), | 809 TranslationIterator it(data->TranslationByteArray(), |
| 803 data->TranslationIndex(deopt_index)->value()); | 810 data->TranslationIndex(deopt_index)->value()); |
| 804 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); | 811 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next()); |
| 805 ASSERT(opcode == Translation::BEGIN); | 812 ASSERT(opcode == Translation::BEGIN); |
| 806 int frame_count = it.Next(); | 813 int frame_count = it.Next(); |
| 807 | 814 |
| 808 // We insert the frames in reverse order because the frames | 815 // We insert the frames in reverse order because the frames |
| 809 // in the deoptimization translation are ordered bottom-to-top. | 816 // in the deoptimization translation are ordered bottom-to-top. |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1143 PcToCodeCacheEntry* entry = cache(index); | 1150 PcToCodeCacheEntry* entry = cache(index); |
| 1144 if (entry->pc == pc) { | 1151 if (entry->pc == pc) { |
| 1145 Counters::pc_to_code_cached.Increment(); | 1152 Counters::pc_to_code_cached.Increment(); |
| 1146 ASSERT(entry->code == GcSafeFindCodeForPc(pc)); | 1153 ASSERT(entry->code == GcSafeFindCodeForPc(pc)); |
| 1147 } else { | 1154 } else { |
| 1148 // Because this code may be interrupted by a profiling signal that | 1155 // Because this code may be interrupted by a profiling signal that |
| 1149 // also queries the cache, we cannot update pc before the code has | 1156 // also queries the cache, we cannot update pc before the code has |
| 1150 // been set. Otherwise, we risk trying to use a cache entry before | 1157 // been set. Otherwise, we risk trying to use a cache entry before |
| 1151 // the code has been computed. | 1158 // the code has been computed. |
| 1152 entry->code = GcSafeFindCodeForPc(pc); | 1159 entry->code = GcSafeFindCodeForPc(pc); |
| 1153 entry->safepoint_entry = NULL; | 1160 entry->safepoint_entry.Reset(); |
| 1154 entry->pc = pc; | 1161 entry->pc = pc; |
| 1155 } | 1162 } |
| 1156 return entry; | 1163 return entry; |
| 1157 } | 1164 } |
| 1158 | 1165 |
| 1159 | 1166 |
| 1160 // ------------------------------------------------------------------------- | 1167 // ------------------------------------------------------------------------- |
| 1161 | 1168 |
| 1162 int NumRegs(RegList reglist) { | 1169 int NumRegs(RegList reglist) { |
| 1163 int n = 0; | 1170 int n = 0; |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1216 ZoneList<StackFrame*> list(10); | 1223 ZoneList<StackFrame*> list(10); |
| 1217 for (StackFrameIterator it; !it.done(); it.Advance()) { | 1224 for (StackFrameIterator it; !it.done(); it.Advance()) { |
| 1218 StackFrame* frame = AllocateFrameCopy(it.frame()); | 1225 StackFrame* frame = AllocateFrameCopy(it.frame()); |
| 1219 list.Add(frame); | 1226 list.Add(frame); |
| 1220 } | 1227 } |
| 1221 return list.ToVector(); | 1228 return list.ToVector(); |
| 1222 } | 1229 } |
| 1223 | 1230 |
| 1224 | 1231 |
| 1225 } } // namespace v8::internal | 1232 } } // namespace v8::internal |
| OLD | NEW |