| OLD | NEW | 
|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 942 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 953       chunk()->inlined_closures(); | 953       chunk()->inlined_closures(); | 
| 954 | 954 | 
| 955   for (int i = 0, length = inlined_closures->length(); i < length; i++) { | 955   for (int i = 0, length = inlined_closures->length(); i < length; i++) { | 
| 956     DefineDeoptimizationLiteral(inlined_closures->at(i)); | 956     DefineDeoptimizationLiteral(inlined_closures->at(i)); | 
| 957   } | 957   } | 
| 958 | 958 | 
| 959   inlined_function_count_ = deoptimization_literals_.length(); | 959   inlined_function_count_ = deoptimization_literals_.length(); | 
| 960 } | 960 } | 
| 961 | 961 | 
| 962 | 962 | 
| 963 Deoptimizer::BailoutType LCodeGen::DeoptimizeHeader( | 963 void LCodeGen::DeoptimizeBranch( | 
| 964     LEnvironment* environment, | 964     LEnvironment* environment, | 
|  | 965     BranchType branch_type, Register reg, int bit, | 
| 965     Deoptimizer::BailoutType* override_bailout_type) { | 966     Deoptimizer::BailoutType* override_bailout_type) { | 
| 966   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 967   RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 
|  | 968   Deoptimizer::BailoutType bailout_type = | 
|  | 969     info()->IsStub() ? Deoptimizer::LAZY : Deoptimizer::EAGER; | 
|  | 970 | 
|  | 971   if (override_bailout_type != NULL) { | 
|  | 972     bailout_type = *override_bailout_type; | 
|  | 973   } | 
|  | 974 | 
| 967   ASSERT(environment->HasBeenRegistered()); | 975   ASSERT(environment->HasBeenRegistered()); | 
| 968   ASSERT(info()->IsOptimizing() || info()->IsStub()); | 976   ASSERT(info()->IsOptimizing() || info()->IsStub()); | 
| 969   int id = environment->deoptimization_index(); | 977   int id = environment->deoptimization_index(); | 
| 970   Deoptimizer::BailoutType bailout_type = |  | 
| 971       info()->IsStub() ? Deoptimizer::LAZY : Deoptimizer::EAGER; |  | 
| 972   if (override_bailout_type) bailout_type = *override_bailout_type; |  | 
| 973   Address entry = | 978   Address entry = | 
| 974       Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 979       Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 
| 975 | 980 | 
| 976   if (entry == NULL) { | 981   if (entry == NULL) { | 
| 977     Abort(kBailoutWasNotPrepared); | 982     Abort(kBailoutWasNotPrepared); | 
| 978     return bailout_type; |  | 
| 979   } | 983   } | 
| 980 | 984 | 
| 981   if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { | 985   if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { | 
| 982     Label not_zero; | 986     Label not_zero; | 
| 983     ExternalReference count = ExternalReference::stress_deopt_count(isolate()); | 987     ExternalReference count = ExternalReference::stress_deopt_count(isolate()); | 
| 984 | 988 | 
| 985     __ Push(x0, x1, x2); | 989     __ Push(x0, x1, x2); | 
| 986     __ Mrs(x2, NZCV); | 990     __ Mrs(x2, NZCV); | 
| 987     __ Mov(x0, Operand(count)); | 991     __ Mov(x0, Operand(count)); | 
| 988     __ Ldr(w1, MemOperand(x0)); | 992     __ Ldr(w1, MemOperand(x0)); | 
| 989     __ Subs(x1, x1, 1); | 993     __ Subs(x1, x1, 1); | 
| 990     __ B(gt, ¬_zero); | 994     __ B(gt, ¬_zero); | 
| 991     __ Mov(w1, FLAG_deopt_every_n_times); | 995     __ Mov(w1, FLAG_deopt_every_n_times); | 
| 992     __ Str(w1, MemOperand(x0)); | 996     __ Str(w1, MemOperand(x0)); | 
| 993     __ Pop(x0, x1, x2); | 997     __ Pop(x0, x1, x2); | 
| 994     ASSERT(frame_is_built_); | 998     ASSERT(frame_is_built_); | 
| 995     __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 999     __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 
| 996     __ Unreachable(); | 1000     __ Unreachable(); | 
| 997 | 1001 | 
| 998     __ Bind(¬_zero); | 1002     __ Bind(¬_zero); | 
| 999     __ Str(w1, MemOperand(x0)); | 1003     __ Str(w1, MemOperand(x0)); | 
| 1000     __ Msr(NZCV, x2); | 1004     __ Msr(NZCV, x2); | 
| 1001     __ Pop(x0, x1, x2); | 1005     __ Pop(x0, x1, x2); | 
| 1002   } | 1006   } | 
| 1003 | 1007 | 
| 1004   return bailout_type; |  | 
| 1005 } |  | 
| 1006 |  | 
| 1007 |  | 
| 1008 void LCodeGen::Deoptimize(LEnvironment* environment, |  | 
| 1009                           Deoptimizer::BailoutType bailout_type) { |  | 
| 1010   ASSERT(environment->HasBeenRegistered()); |  | 
| 1011   ASSERT(info()->IsOptimizing() || info()->IsStub()); |  | 
| 1012   int id = environment->deoptimization_index(); |  | 
| 1013   Address entry = |  | 
| 1014       Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |  | 
| 1015 |  | 
| 1016   if (info()->ShouldTrapOnDeopt()) { | 1008   if (info()->ShouldTrapOnDeopt()) { | 
|  | 1009     Label dont_trap; | 
|  | 1010     __ B(&dont_trap, InvertBranchType(branch_type), reg, bit); | 
| 1017     __ Debug("trap_on_deopt", __LINE__, BREAK); | 1011     __ Debug("trap_on_deopt", __LINE__, BREAK); | 
|  | 1012     __ Bind(&dont_trap); | 
| 1018   } | 1013   } | 
| 1019 | 1014 | 
| 1020   ASSERT(info()->IsStub() || frame_is_built_); | 1015   ASSERT(info()->IsStub() || frame_is_built_); | 
| 1021   // Go through jump table if we need to build frame, or restore caller doubles. | 1016   // Go through jump table if we need to build frame, or restore caller doubles. | 
| 1022   if (frame_is_built_ && !info()->saves_caller_doubles()) { | 1017   if (frame_is_built_ && !info()->saves_caller_doubles()) { | 
|  | 1018     Label dont_deopt; | 
|  | 1019     __ B(&dont_deopt, InvertBranchType(branch_type), reg, bit); | 
| 1023     __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 1020     __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 
|  | 1021     __ Bind(&dont_deopt); | 
| 1024   } else { | 1022   } else { | 
| 1025     // We often have several deopts to the same entry, reuse the last | 1023     // We often have several deopts to the same entry, reuse the last | 
| 1026     // jump entry if this is the case. | 1024     // jump entry if this is the case. | 
| 1027     if (deopt_jump_table_.is_empty() || | 1025     if (deopt_jump_table_.is_empty() || | 
| 1028         (deopt_jump_table_.last().address != entry) || | 1026         (deopt_jump_table_.last().address != entry) || | 
| 1029         (deopt_jump_table_.last().bailout_type != bailout_type) || | 1027         (deopt_jump_table_.last().bailout_type != bailout_type) || | 
| 1030         (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 1028         (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 
| 1031       Deoptimizer::JumpTableEntry table_entry(entry, | 1029       Deoptimizer::JumpTableEntry table_entry(entry, | 
| 1032                                               bailout_type, | 1030                                               bailout_type, | 
| 1033                                               !frame_is_built_); | 1031                                               !frame_is_built_); | 
| 1034       deopt_jump_table_.Add(table_entry, zone()); | 1032       deopt_jump_table_.Add(table_entry, zone()); | 
| 1035     } | 1033     } | 
| 1036     __ B(&deopt_jump_table_.last().label); | 1034     __ B(&deopt_jump_table_.last().label, | 
|  | 1035          branch_type, reg, bit); | 
| 1037   } | 1036   } | 
| 1038 } | 1037 } | 
| 1039 | 1038 | 
| 1040 | 1039 | 
| 1041 void LCodeGen::Deoptimize(LEnvironment* environment) { | 1040 void LCodeGen::Deoptimize(LEnvironment* environment, | 
| 1042   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); | 1041                           Deoptimizer::BailoutType* override_bailout_type) { | 
| 1043   Deoptimize(environment, bailout_type); | 1042   DeoptimizeBranch(environment, always, NoReg, -1, override_bailout_type); | 
| 1044 } | 1043 } | 
| 1045 | 1044 | 
| 1046 | 1045 | 
| 1047 void LCodeGen::DeoptimizeIf(Condition cond, LEnvironment* environment) { | 1046 void LCodeGen::DeoptimizeIf(Condition cond, LEnvironment* environment) { | 
| 1048   Label dont_deopt; | 1047   DeoptimizeBranch(environment, static_cast<BranchType>(cond)); | 
| 1049   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); |  | 
| 1050   __ B(InvertCondition(cond), &dont_deopt); |  | 
| 1051   Deoptimize(environment, bailout_type); |  | 
| 1052   __ Bind(&dont_deopt); |  | 
| 1053 } | 1048 } | 
| 1054 | 1049 | 
| 1055 | 1050 | 
| 1056 void LCodeGen::DeoptimizeIfZero(Register rt, LEnvironment* environment) { | 1051 void LCodeGen::DeoptimizeIfZero(Register rt, LEnvironment* environment) { | 
| 1057   Label dont_deopt; | 1052   DeoptimizeBranch(environment, reg_zero, rt); | 
| 1058   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); |  | 
| 1059   __ Cbnz(rt, &dont_deopt); |  | 
| 1060   Deoptimize(environment, bailout_type); |  | 
| 1061   __ Bind(&dont_deopt); |  | 
| 1062 } | 1053 } | 
| 1063 | 1054 | 
| 1064 | 1055 | 
| 1065 void LCodeGen::DeoptimizeIfNegative(Register rt, LEnvironment* environment) { | 1056 void LCodeGen::DeoptimizeIfNegative(Register rt, LEnvironment* environment) { | 
| 1066   Label dont_deopt; | 1057   int sign_bit = rt.Is64Bits() ? kXSignBit : kWSignBit; | 
| 1067   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); | 1058   DeoptimizeBranch(environment, reg_bit_set, rt, sign_bit); | 
| 1068   __ Tbz(rt, rt.Is64Bits() ? kXSignBit : kWSignBit, &dont_deopt); |  | 
| 1069   Deoptimize(environment, bailout_type); |  | 
| 1070   __ Bind(&dont_deopt); |  | 
| 1071 } | 1059 } | 
| 1072 | 1060 | 
| 1073 | 1061 | 
| 1074 void LCodeGen::DeoptimizeIfSmi(Register rt, | 1062 void LCodeGen::DeoptimizeIfSmi(Register rt, | 
| 1075                                LEnvironment* environment) { | 1063                                LEnvironment* environment) { | 
| 1076   Label dont_deopt; | 1064   DeoptimizeBranch(environment, reg_bit_clear, rt, MaskToBit(kSmiTagMask)); | 
| 1077   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); |  | 
| 1078   __ JumpIfNotSmi(rt, &dont_deopt); |  | 
| 1079   Deoptimize(environment, bailout_type); |  | 
| 1080   __ Bind(&dont_deopt); |  | 
| 1081 } | 1065 } | 
| 1082 | 1066 | 
| 1083 | 1067 | 
| 1084 void LCodeGen::DeoptimizeIfNotSmi(Register rt, LEnvironment* environment) { | 1068 void LCodeGen::DeoptimizeIfNotSmi(Register rt, LEnvironment* environment) { | 
| 1085   Label dont_deopt; | 1069   DeoptimizeBranch(environment, reg_bit_set, rt, MaskToBit(kSmiTagMask)); | 
| 1086   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); |  | 
| 1087   __ JumpIfSmi(rt, &dont_deopt); |  | 
| 1088   Deoptimize(environment, bailout_type); |  | 
| 1089   __ Bind(&dont_deopt); |  | 
| 1090 } | 1070 } | 
| 1091 | 1071 | 
| 1092 | 1072 | 
| 1093 void LCodeGen::DeoptimizeIfRoot(Register rt, | 1073 void LCodeGen::DeoptimizeIfRoot(Register rt, | 
| 1094                                 Heap::RootListIndex index, | 1074                                 Heap::RootListIndex index, | 
| 1095                                 LEnvironment* environment) { | 1075                                 LEnvironment* environment) { | 
| 1096   Label dont_deopt; | 1076   __ CompareRoot(rt, index); | 
| 1097   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); | 1077   DeoptimizeIf(eq, environment); | 
| 1098   __ JumpIfNotRoot(rt, index, &dont_deopt); |  | 
| 1099   Deoptimize(environment, bailout_type); |  | 
| 1100   __ Bind(&dont_deopt); |  | 
| 1101 } | 1078 } | 
| 1102 | 1079 | 
| 1103 | 1080 | 
| 1104 void LCodeGen::DeoptimizeIfNotRoot(Register rt, | 1081 void LCodeGen::DeoptimizeIfNotRoot(Register rt, | 
| 1105                                    Heap::RootListIndex index, | 1082                                    Heap::RootListIndex index, | 
| 1106                                    LEnvironment* environment) { | 1083                                    LEnvironment* environment) { | 
| 1107   Label dont_deopt; | 1084   __ CompareRoot(rt, index); | 
| 1108   Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); | 1085   DeoptimizeIf(ne, environment); | 
| 1109   __ JumpIfRoot(rt, index, &dont_deopt); |  | 
| 1110   Deoptimize(environment, bailout_type); |  | 
| 1111   __ Bind(&dont_deopt); |  | 
| 1112 } | 1086 } | 
| 1113 | 1087 | 
| 1114 | 1088 | 
| 1115 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 1089 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 
| 1116   if (!info()->IsStub()) { | 1090   if (!info()->IsStub()) { | 
| 1117     // Ensure that we have enough space after the previous lazy-bailout | 1091     // Ensure that we have enough space after the previous lazy-bailout | 
| 1118     // instruction for patching the code here. | 1092     // instruction for patching the code here. | 
| 1119     intptr_t current_pc = masm()->pc_offset(); | 1093     intptr_t current_pc = masm()->pc_offset(); | 
| 1120 | 1094 | 
| 1121     if (current_pc < (last_lazy_deopt_pc_ + space_needed)) { | 1095     if (current_pc < (last_lazy_deopt_pc_ + space_needed)) { | 
| (...skipping 997 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2119   } else { | 2093   } else { | 
| 2120     Deoptimize(instr->environment()); | 2094     Deoptimize(instr->environment()); | 
| 2121   } | 2095   } | 
| 2122 | 2096 | 
| 2123   __ Bind(&success); | 2097   __ Bind(&success); | 
| 2124 } | 2098 } | 
| 2125 | 2099 | 
| 2126 | 2100 | 
| 2127 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 2101 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 
| 2128   if (!instr->hydrogen()->value()->IsHeapObject()) { | 2102   if (!instr->hydrogen()->value()->IsHeapObject()) { | 
| 2129     // TODO(all): Depending of how we chose to implement the deopt, if we could |  | 
| 2130     // guarantee that we have a deopt handler reachable by a tbz instruction, |  | 
| 2131     // we could use tbz here and produce less code to support this instruction. |  | 
| 2132     DeoptimizeIfSmi(ToRegister(instr->value()), instr->environment()); | 2103     DeoptimizeIfSmi(ToRegister(instr->value()), instr->environment()); | 
| 2133   } | 2104   } | 
| 2134 } | 2105 } | 
| 2135 | 2106 | 
| 2136 | 2107 | 
| 2137 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 2108 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 
| 2138   Register value = ToRegister(instr->value()); | 2109   Register value = ToRegister(instr->value()); | 
| 2139   ASSERT(!instr->result() || ToRegister(instr->result()).Is(value)); | 2110   ASSERT(!instr->result() || ToRegister(instr->result()).Is(value)); | 
| 2140   // TODO(all): See DoCheckNonSmi for comments on use of tbz. |  | 
| 2141   DeoptimizeIfNotSmi(value, instr->environment()); | 2111   DeoptimizeIfNotSmi(value, instr->environment()); | 
| 2142 } | 2112 } | 
| 2143 | 2113 | 
| 2144 | 2114 | 
| 2145 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 2115 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 
| 2146   Register input = ToRegister(instr->value()); | 2116   Register input = ToRegister(instr->value()); | 
| 2147   Register scratch = ToRegister(instr->temp()); | 2117   Register scratch = ToRegister(instr->temp()); | 
| 2148 | 2118 | 
| 2149   __ Ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 2119   __ Ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 
| 2150   __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 2120   __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 
| (...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2570   Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 2540   Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 
| 2571   // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 2541   // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 
| 2572   // needed return address), even though the implementation of LAZY and EAGER is | 2542   // needed return address), even though the implementation of LAZY and EAGER is | 
| 2573   // now identical. When LAZY is eventually completely folded into EAGER, remove | 2543   // now identical. When LAZY is eventually completely folded into EAGER, remove | 
| 2574   // the special case below. | 2544   // the special case below. | 
| 2575   if (info()->IsStub() && (type == Deoptimizer::EAGER)) { | 2545   if (info()->IsStub() && (type == Deoptimizer::EAGER)) { | 
| 2576     type = Deoptimizer::LAZY; | 2546     type = Deoptimizer::LAZY; | 
| 2577   } | 2547   } | 
| 2578 | 2548 | 
| 2579   Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); | 2549   Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); | 
| 2580   DeoptimizeHeader(instr->environment(), &type); | 2550   Deoptimize(instr->environment(), &type); | 
| 2581   Deoptimize(instr->environment(), type); |  | 
| 2582 } | 2551 } | 
| 2583 | 2552 | 
| 2584 | 2553 | 
| 2585 void LCodeGen::DoDivI(LDivI* instr) { | 2554 void LCodeGen::DoDivI(LDivI* instr) { | 
| 2586   if (!instr->is_flooring() && instr->hydrogen()->RightIsPowerOf2()) { | 2555   if (!instr->is_flooring() && instr->hydrogen()->RightIsPowerOf2()) { | 
| 2587     HDiv* hdiv = instr->hydrogen(); | 2556     HDiv* hdiv = instr->hydrogen(); | 
| 2588     Register dividend = ToRegister32(instr->left()); | 2557     Register dividend = ToRegister32(instr->left()); | 
| 2589     int32_t divisor = hdiv->right()->GetInteger32Constant(); | 2558     int32_t divisor = hdiv->right()->GetInteger32Constant(); | 
| 2590     Register result = ToRegister32(instr->result()); | 2559     Register result = ToRegister32(instr->result()); | 
| 2591     ASSERT(!result.is(dividend)); | 2560     ASSERT(!result.is(dividend)); | 
| (...skipping 3098 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5690   __ Bind(&out_of_object); | 5659   __ Bind(&out_of_object); | 
| 5691   __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5660   __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 
| 5692   // Index is equal to negated out of object property index plus 1. | 5661   // Index is equal to negated out of object property index plus 1. | 
| 5693   __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5662   __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 
| 5694   __ Ldr(result, FieldMemOperand(result, | 5663   __ Ldr(result, FieldMemOperand(result, | 
| 5695                                  FixedArray::kHeaderSize - kPointerSize)); | 5664                                  FixedArray::kHeaderSize - kPointerSize)); | 
| 5696   __ Bind(&done); | 5665   __ Bind(&done); | 
| 5697 } | 5666 } | 
| 5698 | 5667 | 
| 5699 } }  // namespace v8::internal | 5668 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|