| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/disasm.h" | 10 #include "src/disasm.h" |
| (...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 405 } | 405 } |
| 406 | 406 |
| 407 // TODO(titzer): we need a handle scope only because of the macro assembler, | 407 // TODO(titzer): we need a handle scope only because of the macro assembler, |
| 408 // which is only used in EnsureCodeForDeoptimizationEntry. | 408 // which is only used in EnsureCodeForDeoptimizationEntry. |
| 409 HandleScope scope(isolate); | 409 HandleScope scope(isolate); |
| 410 | 410 |
| 411 // Now patch all the codes for deoptimization. | 411 // Now patch all the codes for deoptimization. |
| 412 for (int i = 0; i < codes.length(); i++) { | 412 for (int i = 0; i < codes.length(); i++) { |
| 413 #ifdef DEBUG | 413 #ifdef DEBUG |
| 414 if (codes[i] == topmost_optimized_code) { | 414 if (codes[i] == topmost_optimized_code) { |
| 415 ASSERT(safe_to_deopt_topmost_optimized_code); | 415 DCHECK(safe_to_deopt_topmost_optimized_code); |
| 416 } | 416 } |
| 417 #endif | 417 #endif |
| 418 // It is finally time to die, code object. | 418 // It is finally time to die, code object. |
| 419 | 419 |
| 420 // Remove the code from optimized code map. | 420 // Remove the code from optimized code map. |
| 421 DeoptimizationInputData* deopt_data = | 421 DeoptimizationInputData* deopt_data = |
| 422 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); | 422 DeoptimizationInputData::cast(codes[i]->deoptimization_data()); |
| 423 SharedFunctionInfo* shared = | 423 SharedFunctionInfo* shared = |
| 424 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); | 424 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()); |
| 425 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); | 425 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code"); |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 626 materialized_values_(NULL), | 626 materialized_values_(NULL), |
| 627 materialized_objects_(NULL), | 627 materialized_objects_(NULL), |
| 628 materialization_value_index_(0), | 628 materialization_value_index_(0), |
| 629 materialization_object_index_(0), | 629 materialization_object_index_(0), |
| 630 trace_scope_(NULL) { | 630 trace_scope_(NULL) { |
| 631 // For COMPILED_STUBs called from builtins, the function pointer is a SMI | 631 // For COMPILED_STUBs called from builtins, the function pointer is a SMI |
| 632 // indicating an internal frame. | 632 // indicating an internal frame. |
| 633 if (function->IsSmi()) { | 633 if (function->IsSmi()) { |
| 634 function = NULL; | 634 function = NULL; |
| 635 } | 635 } |
| 636 ASSERT(from != NULL); | 636 DCHECK(from != NULL); |
| 637 if (function != NULL && function->IsOptimized()) { | 637 if (function != NULL && function->IsOptimized()) { |
| 638 function->shared()->increment_deopt_count(); | 638 function->shared()->increment_deopt_count(); |
| 639 if (bailout_type_ == Deoptimizer::SOFT) { | 639 if (bailout_type_ == Deoptimizer::SOFT) { |
| 640 isolate->counters()->soft_deopts_executed()->Increment(); | 640 isolate->counters()->soft_deopts_executed()->Increment(); |
| 641 // Soft deopts shouldn't count against the overall re-optimization count | 641 // Soft deopts shouldn't count against the overall re-optimization count |
| 642 // that can eventually lead to disabling optimization for a function. | 642 // that can eventually lead to disabling optimization for a function. |
| 643 int opt_count = function->shared()->opt_count(); | 643 int opt_count = function->shared()->opt_count(); |
| 644 if (opt_count > 0) opt_count--; | 644 if (opt_count > 0) opt_count--; |
| 645 function->shared()->set_opt_count(opt_count); | 645 function->shared()->set_opt_count(opt_count); |
| 646 } | 646 } |
| 647 } | 647 } |
| 648 compiled_code_ = FindOptimizedCode(function, optimized_code); | 648 compiled_code_ = FindOptimizedCode(function, optimized_code); |
| 649 | 649 |
| 650 #if DEBUG | 650 #if DEBUG |
| 651 ASSERT(compiled_code_ != NULL); | 651 DCHECK(compiled_code_ != NULL); |
| 652 if (type == EAGER || type == SOFT || type == LAZY) { | 652 if (type == EAGER || type == SOFT || type == LAZY) { |
| 653 ASSERT(compiled_code_->kind() != Code::FUNCTION); | 653 DCHECK(compiled_code_->kind() != Code::FUNCTION); |
| 654 } | 654 } |
| 655 #endif | 655 #endif |
| 656 | 656 |
| 657 StackFrame::Type frame_type = function == NULL | 657 StackFrame::Type frame_type = function == NULL |
| 658 ? StackFrame::STUB | 658 ? StackFrame::STUB |
| 659 : StackFrame::JAVA_SCRIPT; | 659 : StackFrame::JAVA_SCRIPT; |
| 660 trace_scope_ = TraceEnabledFor(type, frame_type) ? | 660 trace_scope_ = TraceEnabledFor(type, frame_type) ? |
| 661 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL; | 661 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL; |
| 662 #ifdef DEBUG | 662 #ifdef DEBUG |
| 663 CHECK(AllowHeapAllocation::IsAllowed()); | 663 CHECK(AllowHeapAllocation::IsAllowed()); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 674 switch (bailout_type_) { | 674 switch (bailout_type_) { |
| 675 case Deoptimizer::SOFT: | 675 case Deoptimizer::SOFT: |
| 676 case Deoptimizer::EAGER: | 676 case Deoptimizer::EAGER: |
| 677 case Deoptimizer::LAZY: { | 677 case Deoptimizer::LAZY: { |
| 678 Code* compiled_code = FindDeoptimizingCode(from_); | 678 Code* compiled_code = FindDeoptimizingCode(from_); |
| 679 return (compiled_code == NULL) | 679 return (compiled_code == NULL) |
| 680 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) | 680 ? static_cast<Code*>(isolate_->FindCodeObject(from_)) |
| 681 : compiled_code; | 681 : compiled_code; |
| 682 } | 682 } |
| 683 case Deoptimizer::DEBUGGER: | 683 case Deoptimizer::DEBUGGER: |
| 684 ASSERT(optimized_code->contains(from_)); | 684 DCHECK(optimized_code->contains(from_)); |
| 685 return optimized_code; | 685 return optimized_code; |
| 686 } | 686 } |
| 687 FATAL("Could not find code for optimized function"); | 687 FATAL("Could not find code for optimized function"); |
| 688 return NULL; | 688 return NULL; |
| 689 } | 689 } |
| 690 | 690 |
| 691 | 691 |
| 692 void Deoptimizer::PrintFunctionName() { | 692 void Deoptimizer::PrintFunctionName() { |
| 693 if (function_->IsJSFunction()) { | 693 if (function_->IsJSFunction()) { |
| 694 function_->PrintName(trace_scope_->file()); | 694 function_->PrintName(trace_scope_->file()); |
| 695 } else { | 695 } else { |
| 696 PrintF(trace_scope_->file(), | 696 PrintF(trace_scope_->file(), |
| 697 "%s", Code::Kind2String(compiled_code_->kind())); | 697 "%s", Code::Kind2String(compiled_code_->kind())); |
| 698 } | 698 } |
| 699 } | 699 } |
| 700 | 700 |
| 701 | 701 |
| 702 Deoptimizer::~Deoptimizer() { | 702 Deoptimizer::~Deoptimizer() { |
| 703 ASSERT(input_ == NULL && output_ == NULL); | 703 DCHECK(input_ == NULL && output_ == NULL); |
| 704 ASSERT(disallow_heap_allocation_ == NULL); | 704 DCHECK(disallow_heap_allocation_ == NULL); |
| 705 delete trace_scope_; | 705 delete trace_scope_; |
| 706 } | 706 } |
| 707 | 707 |
| 708 | 708 |
| 709 void Deoptimizer::DeleteFrameDescriptions() { | 709 void Deoptimizer::DeleteFrameDescriptions() { |
| 710 delete input_; | 710 delete input_; |
| 711 for (int i = 0; i < output_count_; ++i) { | 711 for (int i = 0; i < output_count_; ++i) { |
| 712 if (output_[i] != input_) delete output_[i]; | 712 if (output_[i] != input_) delete output_[i]; |
| 713 } | 713 } |
| 714 delete[] output_; | 714 delete[] output_; |
| (...skipping 30 matching lines...) Expand all Loading... |
| 745 Address addr, | 745 Address addr, |
| 746 BailoutType type) { | 746 BailoutType type) { |
| 747 DeoptimizerData* data = isolate->deoptimizer_data(); | 747 DeoptimizerData* data = isolate->deoptimizer_data(); |
| 748 MemoryChunk* base = data->deopt_entry_code_[type]; | 748 MemoryChunk* base = data->deopt_entry_code_[type]; |
| 749 Address start = base->area_start(); | 749 Address start = base->area_start(); |
| 750 if (base == NULL || | 750 if (base == NULL || |
| 751 addr < start || | 751 addr < start || |
| 752 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) { | 752 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) { |
| 753 return kNotDeoptimizationEntry; | 753 return kNotDeoptimizationEntry; |
| 754 } | 754 } |
| 755 ASSERT_EQ(0, | 755 DCHECK_EQ(0, |
| 756 static_cast<int>(addr - start) % table_entry_size_); | 756 static_cast<int>(addr - start) % table_entry_size_); |
| 757 return static_cast<int>(addr - start) / table_entry_size_; | 757 return static_cast<int>(addr - start) / table_entry_size_; |
| 758 } | 758 } |
| 759 | 759 |
| 760 | 760 |
| 761 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, | 761 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, |
| 762 BailoutId id, | 762 BailoutId id, |
| 763 SharedFunctionInfo* shared) { | 763 SharedFunctionInfo* shared) { |
| 764 // TODO(kasperl): For now, we do a simple linear search for the PC | 764 // TODO(kasperl): For now, we do a simple linear search for the PC |
| 765 // offset associated with the given node id. This should probably be | 765 // offset associated with the given node id. This should probably be |
| (...skipping 16 matching lines...) Expand all Loading... |
| 782 | 782 |
| 783 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { | 783 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) { |
| 784 int length = 0; | 784 int length = 0; |
| 785 // Count all entries in the deoptimizing code list of every context. | 785 // Count all entries in the deoptimizing code list of every context. |
| 786 Object* context = isolate->heap()->native_contexts_list(); | 786 Object* context = isolate->heap()->native_contexts_list(); |
| 787 while (!context->IsUndefined()) { | 787 while (!context->IsUndefined()) { |
| 788 Context* native_context = Context::cast(context); | 788 Context* native_context = Context::cast(context); |
| 789 Object* element = native_context->DeoptimizedCodeListHead(); | 789 Object* element = native_context->DeoptimizedCodeListHead(); |
| 790 while (!element->IsUndefined()) { | 790 while (!element->IsUndefined()) { |
| 791 Code* code = Code::cast(element); | 791 Code* code = Code::cast(element); |
| 792 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 792 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 793 length++; | 793 length++; |
| 794 element = code->next_code_link(); | 794 element = code->next_code_link(); |
| 795 } | 795 } |
| 796 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 796 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 797 } | 797 } |
| 798 return length; | 798 return length; |
| 799 } | 799 } |
| 800 | 800 |
| 801 | 801 |
| 802 // We rely on this function not causing a GC. It is called from generated code | 802 // We rely on this function not causing a GC. It is called from generated code |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 834 | 834 |
| 835 BailoutId node_id = input_data->AstId(bailout_id_); | 835 BailoutId node_id = input_data->AstId(bailout_id_); |
| 836 ByteArray* translations = input_data->TranslationByteArray(); | 836 ByteArray* translations = input_data->TranslationByteArray(); |
| 837 unsigned translation_index = | 837 unsigned translation_index = |
| 838 input_data->TranslationIndex(bailout_id_)->value(); | 838 input_data->TranslationIndex(bailout_id_)->value(); |
| 839 | 839 |
| 840 // Do the input frame to output frame(s) translation. | 840 // Do the input frame to output frame(s) translation. |
| 841 TranslationIterator iterator(translations, translation_index); | 841 TranslationIterator iterator(translations, translation_index); |
| 842 Translation::Opcode opcode = | 842 Translation::Opcode opcode = |
| 843 static_cast<Translation::Opcode>(iterator.Next()); | 843 static_cast<Translation::Opcode>(iterator.Next()); |
| 844 ASSERT(Translation::BEGIN == opcode); | 844 DCHECK(Translation::BEGIN == opcode); |
| 845 USE(opcode); | 845 USE(opcode); |
| 846 // Read the number of output frames and allocate an array for their | 846 // Read the number of output frames and allocate an array for their |
| 847 // descriptions. | 847 // descriptions. |
| 848 int count = iterator.Next(); | 848 int count = iterator.Next(); |
| 849 iterator.Next(); // Drop JS frames count. | 849 iterator.Next(); // Drop JS frames count. |
| 850 ASSERT(output_ == NULL); | 850 DCHECK(output_ == NULL); |
| 851 output_ = new FrameDescription*[count]; | 851 output_ = new FrameDescription*[count]; |
| 852 for (int i = 0; i < count; ++i) { | 852 for (int i = 0; i < count; ++i) { |
| 853 output_[i] = NULL; | 853 output_[i] = NULL; |
| 854 } | 854 } |
| 855 output_count_ = count; | 855 output_count_ = count; |
| 856 | 856 |
| 857 Register fp_reg = JavaScriptFrame::fp_register(); | 857 Register fp_reg = JavaScriptFrame::fp_register(); |
| 858 stack_fp_ = reinterpret_cast<Address>( | 858 stack_fp_ = reinterpret_cast<Address>( |
| 859 input_->GetRegister(fp_reg.code()) + | 859 input_->GetRegister(fp_reg.code()) + |
| 860 has_alignment_padding_ * kPointerSize); | 860 has_alignment_padding_ * kPointerSize); |
| (...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1028 // pointer. | 1028 // pointer. |
| 1029 output_offset -= kFPOnStackSize; | 1029 output_offset -= kFPOnStackSize; |
| 1030 input_offset -= kFPOnStackSize; | 1030 input_offset -= kFPOnStackSize; |
| 1031 if (is_bottommost) { | 1031 if (is_bottommost) { |
| 1032 value = input_->GetFrameSlot(input_offset); | 1032 value = input_->GetFrameSlot(input_offset); |
| 1033 } else { | 1033 } else { |
| 1034 value = output_[frame_index - 1]->GetFp(); | 1034 value = output_[frame_index - 1]->GetFp(); |
| 1035 } | 1035 } |
| 1036 output_frame->SetCallerFp(output_offset, value); | 1036 output_frame->SetCallerFp(output_offset, value); |
| 1037 intptr_t fp_value = top_address + output_offset; | 1037 intptr_t fp_value = top_address + output_offset; |
| 1038 ASSERT(!is_bottommost || (input_->GetRegister(fp_reg.code()) + | 1038 DCHECK(!is_bottommost || (input_->GetRegister(fp_reg.code()) + |
| 1039 has_alignment_padding_ * kPointerSize) == fp_value); | 1039 has_alignment_padding_ * kPointerSize) == fp_value); |
| 1040 output_frame->SetFp(fp_value); | 1040 output_frame->SetFp(fp_value); |
| 1041 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value); | 1041 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value); |
| 1042 if (trace_scope_ != NULL) { | 1042 if (trace_scope_ != NULL) { |
| 1043 PrintF(trace_scope_->file(), | 1043 PrintF(trace_scope_->file(), |
| 1044 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" | 1044 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" |
| 1045 V8PRIxPTR " ; caller's fp\n", | 1045 V8PRIxPTR " ; caller's fp\n", |
| 1046 fp_value, output_offset, value); | 1046 fp_value, output_offset, value); |
| 1047 } | 1047 } |
| 1048 ASSERT(!is_bottommost || !has_alignment_padding_ || | 1048 DCHECK(!is_bottommost || !has_alignment_padding_ || |
| 1049 (fp_value & kPointerSize) != 0); | 1049 (fp_value & kPointerSize) != 0); |
| 1050 | 1050 |
| 1051 if (FLAG_enable_ool_constant_pool) { | 1051 if (FLAG_enable_ool_constant_pool) { |
| 1052 // For the bottommost output frame the constant pool pointer can be gotten | 1052 // For the bottommost output frame the constant pool pointer can be gotten |
| 1053 // from the input frame. For subsequent output frames, it can be read from | 1053 // from the input frame. For subsequent output frames, it can be read from |
| 1054 // the previous frame. | 1054 // the previous frame. |
| 1055 output_offset -= kPointerSize; | 1055 output_offset -= kPointerSize; |
| 1056 input_offset -= kPointerSize; | 1056 input_offset -= kPointerSize; |
| 1057 if (is_bottommost) { | 1057 if (is_bottommost) { |
| 1058 value = input_->GetFrameSlot(input_offset); | 1058 value = input_->GetFrameSlot(input_offset); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 1087 V8PRIxPTR "; context\n", | 1087 V8PRIxPTR "; context\n", |
| 1088 top_address + output_offset, output_offset, value); | 1088 top_address + output_offset, output_offset, value); |
| 1089 } | 1089 } |
| 1090 | 1090 |
| 1091 // The function was mentioned explicitly in the BEGIN_FRAME. | 1091 // The function was mentioned explicitly in the BEGIN_FRAME. |
| 1092 output_offset -= kPointerSize; | 1092 output_offset -= kPointerSize; |
| 1093 input_offset -= kPointerSize; | 1093 input_offset -= kPointerSize; |
| 1094 value = reinterpret_cast<intptr_t>(function); | 1094 value = reinterpret_cast<intptr_t>(function); |
| 1095 // The function for the bottommost output frame should also agree with the | 1095 // The function for the bottommost output frame should also agree with the |
| 1096 // input frame. | 1096 // input frame. |
| 1097 ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value); | 1097 DCHECK(!is_bottommost || input_->GetFrameSlot(input_offset) == value); |
| 1098 output_frame->SetFrameSlot(output_offset, value); | 1098 output_frame->SetFrameSlot(output_offset, value); |
| 1099 if (trace_scope_ != NULL) { | 1099 if (trace_scope_ != NULL) { |
| 1100 PrintF(trace_scope_->file(), | 1100 PrintF(trace_scope_->file(), |
| 1101 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" | 1101 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" |
| 1102 V8PRIxPTR "; function\n", | 1102 V8PRIxPTR "; function\n", |
| 1103 top_address + output_offset, output_offset, value); | 1103 top_address + output_offset, output_offset, value); |
| 1104 } | 1104 } |
| 1105 | 1105 |
| 1106 // Translate the rest of the frame. | 1106 // Translate the rest of the frame. |
| 1107 for (unsigned i = 0; i < height; ++i) { | 1107 for (unsigned i = 0; i < height; ++i) { |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1253 output_offset -= kPointerSize; | 1253 output_offset -= kPointerSize; |
| 1254 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1)); | 1254 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1)); |
| 1255 output_frame->SetFrameSlot(output_offset, value); | 1255 output_frame->SetFrameSlot(output_offset, value); |
| 1256 if (trace_scope_ != NULL) { | 1256 if (trace_scope_ != NULL) { |
| 1257 PrintF(trace_scope_->file(), | 1257 PrintF(trace_scope_->file(), |
| 1258 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" | 1258 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" |
| 1259 V8PRIxPTR " ; argc (%d)\n", | 1259 V8PRIxPTR " ; argc (%d)\n", |
| 1260 top_address + output_offset, output_offset, value, height - 1); | 1260 top_address + output_offset, output_offset, value, height - 1); |
| 1261 } | 1261 } |
| 1262 | 1262 |
| 1263 ASSERT(0 == output_offset); | 1263 DCHECK(0 == output_offset); |
| 1264 | 1264 |
| 1265 Builtins* builtins = isolate_->builtins(); | 1265 Builtins* builtins = isolate_->builtins(); |
| 1266 Code* adaptor_trampoline = | 1266 Code* adaptor_trampoline = |
| 1267 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline); | 1267 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline); |
| 1268 intptr_t pc_value = reinterpret_cast<intptr_t>( | 1268 intptr_t pc_value = reinterpret_cast<intptr_t>( |
| 1269 adaptor_trampoline->instruction_start() + | 1269 adaptor_trampoline->instruction_start() + |
| 1270 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value()); | 1270 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value()); |
| 1271 output_frame->SetPc(pc_value); | 1271 output_frame->SetPc(pc_value); |
| 1272 if (FLAG_enable_ool_constant_pool) { | 1272 if (FLAG_enable_ool_constant_pool) { |
| 1273 intptr_t constant_pool_value = | 1273 intptr_t constant_pool_value = |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1291 | 1291 |
| 1292 unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize; | 1292 unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize; |
| 1293 unsigned output_frame_size = height_in_bytes + fixed_frame_size; | 1293 unsigned output_frame_size = height_in_bytes + fixed_frame_size; |
| 1294 | 1294 |
| 1295 // Allocate and store the output frame description. | 1295 // Allocate and store the output frame description. |
| 1296 FrameDescription* output_frame = | 1296 FrameDescription* output_frame = |
| 1297 new(output_frame_size) FrameDescription(output_frame_size, function); | 1297 new(output_frame_size) FrameDescription(output_frame_size, function); |
| 1298 output_frame->SetFrameType(StackFrame::CONSTRUCT); | 1298 output_frame->SetFrameType(StackFrame::CONSTRUCT); |
| 1299 | 1299 |
| 1300 // Construct stub can not be topmost or bottommost. | 1300 // Construct stub can not be topmost or bottommost. |
| 1301 ASSERT(frame_index > 0 && frame_index < output_count_ - 1); | 1301 DCHECK(frame_index > 0 && frame_index < output_count_ - 1); |
| 1302 ASSERT(output_[frame_index] == NULL); | 1302 DCHECK(output_[frame_index] == NULL); |
| 1303 output_[frame_index] = output_frame; | 1303 output_[frame_index] = output_frame; |
| 1304 | 1304 |
| 1305 // The top address of the frame is computed from the previous | 1305 // The top address of the frame is computed from the previous |
| 1306 // frame's top and this frame's size. | 1306 // frame's top and this frame's size. |
| 1307 intptr_t top_address; | 1307 intptr_t top_address; |
| 1308 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; | 1308 top_address = output_[frame_index - 1]->GetTop() - output_frame_size; |
| 1309 output_frame->SetTop(top_address); | 1309 output_frame->SetTop(top_address); |
| 1310 | 1310 |
| 1311 // Compute the incoming parameter translation. | 1311 // Compute the incoming parameter translation. |
| 1312 int parameter_count = height; | 1312 int parameter_count = height; |
| (...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1819 CopyDoubleRegisters(output_frame); | 1819 CopyDoubleRegisters(output_frame); |
| 1820 | 1820 |
| 1821 // Fill registers containing handler and number of parameters. | 1821 // Fill registers containing handler and number of parameters. |
| 1822 SetPlatformCompiledStubRegisters(output_frame, descriptor); | 1822 SetPlatformCompiledStubRegisters(output_frame, descriptor); |
| 1823 | 1823 |
| 1824 // Compute this frame's PC, state, and continuation. | 1824 // Compute this frame's PC, state, and continuation. |
| 1825 Code* trampoline = NULL; | 1825 Code* trampoline = NULL; |
| 1826 StubFunctionMode function_mode = descriptor->function_mode(); | 1826 StubFunctionMode function_mode = descriptor->function_mode(); |
| 1827 StubFailureTrampolineStub(isolate_, | 1827 StubFailureTrampolineStub(isolate_, |
| 1828 function_mode).FindCodeInCache(&trampoline); | 1828 function_mode).FindCodeInCache(&trampoline); |
| 1829 ASSERT(trampoline != NULL); | 1829 DCHECK(trampoline != NULL); |
| 1830 output_frame->SetPc(reinterpret_cast<intptr_t>( | 1830 output_frame->SetPc(reinterpret_cast<intptr_t>( |
| 1831 trampoline->instruction_start())); | 1831 trampoline->instruction_start())); |
| 1832 if (FLAG_enable_ool_constant_pool) { | 1832 if (FLAG_enable_ool_constant_pool) { |
| 1833 Register constant_pool_reg = | 1833 Register constant_pool_reg = |
| 1834 StubFailureTrampolineFrame::constant_pool_pointer_register(); | 1834 StubFailureTrampolineFrame::constant_pool_pointer_register(); |
| 1835 intptr_t constant_pool_value = | 1835 intptr_t constant_pool_value = |
| 1836 reinterpret_cast<intptr_t>(trampoline->constant_pool()); | 1836 reinterpret_cast<intptr_t>(trampoline->constant_pool()); |
| 1837 output_frame->SetConstantPool(constant_pool_value); | 1837 output_frame->SetConstantPool(constant_pool_value); |
| 1838 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value); | 1838 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value); |
| 1839 } | 1839 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1866 for (int i = 0; i < length; ++i) { | 1866 for (int i = 0; i < length; ++i) { |
| 1867 MaterializeNextValue(); | 1867 MaterializeNextValue(); |
| 1868 } | 1868 } |
| 1869 } else if (desc.is_arguments()) { | 1869 } else if (desc.is_arguments()) { |
| 1870 // Construct an arguments object and copy the parameters to a newly | 1870 // Construct an arguments object and copy the parameters to a newly |
| 1871 // allocated arguments object backing store. | 1871 // allocated arguments object backing store. |
| 1872 Handle<JSFunction> function = ArgumentsObjectFunction(object_index); | 1872 Handle<JSFunction> function = ArgumentsObjectFunction(object_index); |
| 1873 Handle<JSObject> arguments = | 1873 Handle<JSObject> arguments = |
| 1874 isolate_->factory()->NewArgumentsObject(function, length); | 1874 isolate_->factory()->NewArgumentsObject(function, length); |
| 1875 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length); | 1875 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length); |
| 1876 ASSERT_EQ(array->length(), length); | 1876 DCHECK_EQ(array->length(), length); |
| 1877 arguments->set_elements(*array); | 1877 arguments->set_elements(*array); |
| 1878 materialized_objects_->Add(arguments); | 1878 materialized_objects_->Add(arguments); |
| 1879 for (int i = 0; i < length; ++i) { | 1879 for (int i = 0; i < length; ++i) { |
| 1880 Handle<Object> value = MaterializeNextValue(); | 1880 Handle<Object> value = MaterializeNextValue(); |
| 1881 array->set(i, *value); | 1881 array->set(i, *value); |
| 1882 } | 1882 } |
| 1883 } else { | 1883 } else { |
| 1884 // Dispatch on the instance type of the object to be materialized. | 1884 // Dispatch on the instance type of the object to be materialized. |
| 1885 // We also need to make sure that the representation of all fields | 1885 // We also need to make sure that the representation of all fields |
| 1886 // in the given object are general enough to hold a tagged value. | 1886 // in the given object are general enough to hold a tagged value. |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1957 HeapNumber::cast(*value)->set_map(isolate_->heap()->heap_number_map()); | 1957 HeapNumber::cast(*value)->set_map(isolate_->heap()->heap_number_map()); |
| 1958 } | 1958 } |
| 1959 if (*value == isolate_->heap()->arguments_marker()) { | 1959 if (*value == isolate_->heap()->arguments_marker()) { |
| 1960 value = MaterializeNextHeapObject(); | 1960 value = MaterializeNextHeapObject(); |
| 1961 } | 1961 } |
| 1962 return value; | 1962 return value; |
| 1963 } | 1963 } |
| 1964 | 1964 |
| 1965 | 1965 |
| 1966 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { | 1966 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) { |
| 1967 ASSERT_NE(DEBUGGER, bailout_type_); | 1967 DCHECK_NE(DEBUGGER, bailout_type_); |
| 1968 | 1968 |
| 1969 MaterializedObjectStore* materialized_store = | 1969 MaterializedObjectStore* materialized_store = |
| 1970 isolate_->materialized_object_store(); | 1970 isolate_->materialized_object_store(); |
| 1971 previously_materialized_objects_ = materialized_store->Get(stack_fp_); | 1971 previously_materialized_objects_ = materialized_store->Get(stack_fp_); |
| 1972 prev_materialized_count_ = previously_materialized_objects_.is_null() ? | 1972 prev_materialized_count_ = previously_materialized_objects_.is_null() ? |
| 1973 0 : previously_materialized_objects_->length(); | 1973 0 : previously_materialized_objects_->length(); |
| 1974 | 1974 |
| 1975 // Walk all JavaScript output frames with the given frame iterator. | 1975 // Walk all JavaScript output frames with the given frame iterator. |
| 1976 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) { | 1976 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) { |
| 1977 if (frame_index != 0) it->Advance(); | 1977 if (frame_index != 0) it->Advance(); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2010 HeapNumberMaterializationDescriptor<int> d = | 2010 HeapNumberMaterializationDescriptor<int> d = |
| 2011 deferred_objects_double_values_[i]; | 2011 deferred_objects_double_values_[i]; |
| 2012 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); | 2012 Handle<Object> num = isolate_->factory()->NewNumber(d.value()); |
| 2013 if (trace_scope_ != NULL) { | 2013 if (trace_scope_ != NULL) { |
| 2014 PrintF(trace_scope_->file(), | 2014 PrintF(trace_scope_->file(), |
| 2015 "Materialized a new heap number %p [%e] for object at %d\n", | 2015 "Materialized a new heap number %p [%e] for object at %d\n", |
| 2016 reinterpret_cast<void*>(*num), | 2016 reinterpret_cast<void*>(*num), |
| 2017 d.value(), | 2017 d.value(), |
| 2018 d.destination()); | 2018 d.destination()); |
| 2019 } | 2019 } |
| 2020 ASSERT(values.at(d.destination())->IsTheHole()); | 2020 DCHECK(values.at(d.destination())->IsTheHole()); |
| 2021 values.Set(d.destination(), num); | 2021 values.Set(d.destination(), num); |
| 2022 } | 2022 } |
| 2023 | 2023 |
| 2024 // Play it safe and clear all object double values before we continue. | 2024 // Play it safe and clear all object double values before we continue. |
| 2025 deferred_objects_double_values_.Clear(); | 2025 deferred_objects_double_values_.Clear(); |
| 2026 | 2026 |
| 2027 // Materialize arguments/captured objects. | 2027 // Materialize arguments/captured objects. |
| 2028 if (!deferred_objects_.is_empty()) { | 2028 if (!deferred_objects_.is_empty()) { |
| 2029 List<Handle<Object> > materialized_objects(deferred_objects_.length()); | 2029 List<Handle<Object> > materialized_objects(deferred_objects_.length()); |
| 2030 materialized_objects_ = &materialized_objects; | 2030 materialized_objects_ = &materialized_objects; |
| (...skipping 813 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2844 if (max_entry_id < entry_count) return; | 2844 if (max_entry_id < entry_count) return; |
| 2845 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); | 2845 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); |
| 2846 while (max_entry_id >= entry_count) entry_count *= 2; | 2846 while (max_entry_id >= entry_count) entry_count *= 2; |
| 2847 CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries); | 2847 CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries); |
| 2848 | 2848 |
| 2849 MacroAssembler masm(isolate, NULL, 16 * KB); | 2849 MacroAssembler masm(isolate, NULL, 16 * KB); |
| 2850 masm.set_emit_debug_code(false); | 2850 masm.set_emit_debug_code(false); |
| 2851 GenerateDeoptimizationEntries(&masm, entry_count, type); | 2851 GenerateDeoptimizationEntries(&masm, entry_count, type); |
| 2852 CodeDesc desc; | 2852 CodeDesc desc; |
| 2853 masm.GetCode(&desc); | 2853 masm.GetCode(&desc); |
| 2854 ASSERT(!RelocInfo::RequiresRelocation(desc)); | 2854 DCHECK(!RelocInfo::RequiresRelocation(desc)); |
| 2855 | 2855 |
| 2856 MemoryChunk* chunk = data->deopt_entry_code_[type]; | 2856 MemoryChunk* chunk = data->deopt_entry_code_[type]; |
| 2857 CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= | 2857 CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= |
| 2858 desc.instr_size); | 2858 desc.instr_size); |
| 2859 chunk->CommitArea(desc.instr_size); | 2859 chunk->CommitArea(desc.instr_size); |
| 2860 CopyBytes(chunk->area_start(), desc.buffer, | 2860 CopyBytes(chunk->area_start(), desc.buffer, |
| 2861 static_cast<size_t>(desc.instr_size)); | 2861 static_cast<size_t>(desc.instr_size)); |
| 2862 CpuFeatures::FlushICache(chunk->area_start(), desc.instr_size); | 2862 CpuFeatures::FlushICache(chunk->area_start(), desc.instr_size); |
| 2863 | 2863 |
| 2864 data->deopt_entry_code_entries_[type] = entry_count; | 2864 data->deopt_entry_code_entries_[type] = entry_count; |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2938 | 2938 |
| 2939 | 2939 |
| 2940 unsigned FrameDescription::GetExpressionCount() { | 2940 unsigned FrameDescription::GetExpressionCount() { |
| 2941 CHECK_EQ(StackFrame::JAVA_SCRIPT, type_); | 2941 CHECK_EQ(StackFrame::JAVA_SCRIPT, type_); |
| 2942 unsigned size = GetFrameSize() - ComputeFixedSize(); | 2942 unsigned size = GetFrameSize() - ComputeFixedSize(); |
| 2943 return size / kPointerSize; | 2943 return size / kPointerSize; |
| 2944 } | 2944 } |
| 2945 | 2945 |
| 2946 | 2946 |
| 2947 Object* FrameDescription::GetExpression(int index) { | 2947 Object* FrameDescription::GetExpression(int index) { |
| 2948 ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_); | 2948 DCHECK_EQ(StackFrame::JAVA_SCRIPT, type_); |
| 2949 unsigned offset = GetOffsetFromSlotIndex(index); | 2949 unsigned offset = GetOffsetFromSlotIndex(index); |
| 2950 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); | 2950 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset)); |
| 2951 } | 2951 } |
| 2952 | 2952 |
| 2953 | 2953 |
| 2954 void TranslationBuffer::Add(int32_t value, Zone* zone) { | 2954 void TranslationBuffer::Add(int32_t value, Zone* zone) { |
| 2955 // Encode the sign bit in the least significant bit. | 2955 // Encode the sign bit in the least significant bit. |
| 2956 bool is_negative = (value < 0); | 2956 bool is_negative = (value < 0); |
| 2957 uint32_t bits = ((is_negative ? -value : value) << 1) | | 2957 uint32_t bits = ((is_negative ? -value : value) << 1) | |
| 2958 static_cast<int32_t>(is_negative); | 2958 static_cast<int32_t>(is_negative); |
| 2959 // Encode the individual bytes using the least significant bit of | 2959 // Encode the individual bytes using the least significant bit of |
| 2960 // each byte to indicate whether or not more bytes follow. | 2960 // each byte to indicate whether or not more bytes follow. |
| 2961 do { | 2961 do { |
| 2962 uint32_t next = bits >> 7; | 2962 uint32_t next = bits >> 7; |
| 2963 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone); | 2963 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone); |
| 2964 bits = next; | 2964 bits = next; |
| 2965 } while (bits != 0); | 2965 } while (bits != 0); |
| 2966 } | 2966 } |
| 2967 | 2967 |
| 2968 | 2968 |
| 2969 int32_t TranslationIterator::Next() { | 2969 int32_t TranslationIterator::Next() { |
| 2970 // Run through the bytes until we reach one with a least significant | 2970 // Run through the bytes until we reach one with a least significant |
| 2971 // bit of zero (marks the end). | 2971 // bit of zero (marks the end). |
| 2972 uint32_t bits = 0; | 2972 uint32_t bits = 0; |
| 2973 for (int i = 0; true; i += 7) { | 2973 for (int i = 0; true; i += 7) { |
| 2974 ASSERT(HasNext()); | 2974 DCHECK(HasNext()); |
| 2975 uint8_t next = buffer_->get(index_++); | 2975 uint8_t next = buffer_->get(index_++); |
| 2976 bits |= (next >> 1) << i; | 2976 bits |= (next >> 1) << i; |
| 2977 if ((next & 1) == 0) break; | 2977 if ((next & 1) == 0) break; |
| 2978 } | 2978 } |
| 2979 // The bits encode the sign in the least significant bit. | 2979 // The bits encode the sign in the least significant bit. |
| 2980 bool is_negative = (bits & 1) == 1; | 2980 bool is_negative = (bits & 1) == 1; |
| 2981 int32_t result = bits >> 1; | 2981 int32_t result = bits >> 1; |
| 2982 return is_negative ? -result : result; | 2982 return is_negative ? -result : result; |
| 2983 } | 2983 } |
| 2984 | 2984 |
| (...skipping 685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3670 } | 3670 } |
| 3671 | 3671 |
| 3672 | 3672 |
| 3673 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 3673 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 3674 v->VisitPointer(BitCast<Object**>(&function_)); | 3674 v->VisitPointer(BitCast<Object**>(&function_)); |
| 3675 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 3675 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 3676 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 3676 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 3677 } | 3677 } |
| 3678 | 3678 |
| 3679 } } // namespace v8::internal | 3679 } } // namespace v8::internal |
| OLD | NEW |