| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 91 info()->CommitDependencies(code); | 91 info()->CommitDependencies(code); |
| 92 } | 92 } |
| 93 | 93 |
| 94 | 94 |
| 95 void LCodeGen::Abort(BailoutReason reason) { | 95 void LCodeGen::Abort(BailoutReason reason) { |
| 96 info()->set_bailout_reason(reason); | 96 info()->set_bailout_reason(reason); |
| 97 status_ = ABORTED; | 97 status_ = ABORTED; |
| 98 } | 98 } |
| 99 | 99 |
| 100 | 100 |
| 101 void LCodeGen::SaveCallerDoubles() { |
| 102 ASSERT(info()->saves_caller_doubles()); |
| 103 ASSERT(NeedsEagerFrame()); |
| 104 Comment(";;; Save clobbered callee double registers"); |
| 105 int count = 0; |
| 106 BitVector* doubles = chunk()->allocated_double_registers(); |
| 107 BitVector::Iterator save_iterator(doubles); |
| 108 while (!save_iterator.Done()) { |
| 109 __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), |
| 110 MemOperand(sp, count * kDoubleSize)); |
| 111 save_iterator.Advance(); |
| 112 count++; |
| 113 } |
| 114 } |
| 115 |
| 116 |
| 117 void LCodeGen::RestoreCallerDoubles() { |
| 118 ASSERT(info()->saves_caller_doubles()); |
| 119 ASSERT(NeedsEagerFrame()); |
| 120 Comment(";;; Restore clobbered callee double registers"); |
| 121 BitVector* doubles = chunk()->allocated_double_registers(); |
| 122 BitVector::Iterator save_iterator(doubles); |
| 123 int count = 0; |
| 124 while (!save_iterator.Done()) { |
| 125 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), |
| 126 MemOperand(sp, count * kDoubleSize)); |
| 127 save_iterator.Advance(); |
| 128 count++; |
| 129 } |
| 130 } |
| 131 |
| 132 |
| 101 bool LCodeGen::GeneratePrologue() { | 133 bool LCodeGen::GeneratePrologue() { |
| 102 ASSERT(is_generating()); | 134 ASSERT(is_generating()); |
| 103 | 135 |
| 104 if (info()->IsOptimizing()) { | 136 if (info()->IsOptimizing()) { |
| 105 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| 106 | 138 |
| 107 #ifdef DEBUG | 139 #ifdef DEBUG |
| 108 if (strlen(FLAG_stop_at) > 0 && | 140 if (strlen(FLAG_stop_at) > 0 && |
| 109 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | 141 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { |
| 110 __ stop("stop_at"); | 142 __ stop("stop_at"); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 151 __ cmp(r0, sp); | 183 __ cmp(r0, sp); |
| 152 __ b(ne, &loop); | 184 __ b(ne, &loop); |
| 153 __ pop(r1); | 185 __ pop(r1); |
| 154 __ pop(r0); | 186 __ pop(r0); |
| 155 } else { | 187 } else { |
| 156 __ sub(sp, sp, Operand(slots * kPointerSize)); | 188 __ sub(sp, sp, Operand(slots * kPointerSize)); |
| 157 } | 189 } |
| 158 } | 190 } |
| 159 | 191 |
| 160 if (info()->saves_caller_doubles()) { | 192 if (info()->saves_caller_doubles()) { |
| 161 Comment(";;; Save clobbered callee double registers"); | 193 SaveCallerDoubles(); |
| 162 int count = 0; | |
| 163 BitVector* doubles = chunk()->allocated_double_registers(); | |
| 164 BitVector::Iterator save_iterator(doubles); | |
| 165 while (!save_iterator.Done()) { | |
| 166 __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), | |
| 167 MemOperand(sp, count * kDoubleSize)); | |
| 168 save_iterator.Advance(); | |
| 169 count++; | |
| 170 } | |
| 171 } | 194 } |
| 172 | 195 |
| 173 // Possibly allocate a local context. | 196 // Possibly allocate a local context. |
| 174 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 197 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 175 if (heap_slots > 0) { | 198 if (heap_slots > 0) { |
| 176 Comment(";;; Allocate local context"); | 199 Comment(";;; Allocate local context"); |
| 177 // Argument to NewContext is the function, which is in r1. | 200 // Argument to NewContext is the function, which is in r1. |
| 178 __ push(r1); | 201 __ push(r1); |
| 179 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 202 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 180 FastNewContextStub stub(heap_slots); | 203 FastNewContextStub stub(heap_slots); |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 306 __ bind(&deopt_jump_table_[i].label); | 329 __ bind(&deopt_jump_table_[i].label); |
| 307 Address entry = deopt_jump_table_[i].address; | 330 Address entry = deopt_jump_table_[i].address; |
| 308 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; | 331 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; |
| 309 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 332 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 310 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 333 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 311 Comment(";;; jump table entry %d.", i); | 334 Comment(";;; jump table entry %d.", i); |
| 312 } else { | 335 } else { |
| 313 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 336 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 314 } | 337 } |
| 315 if (deopt_jump_table_[i].needs_frame) { | 338 if (deopt_jump_table_[i].needs_frame) { |
| 339 ASSERT(!info()->saves_caller_doubles()); |
| 316 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry))); | 340 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry))); |
| 317 if (needs_frame.is_bound()) { | 341 if (needs_frame.is_bound()) { |
| 318 __ b(&needs_frame); | 342 __ b(&needs_frame); |
| 319 } else { | 343 } else { |
| 320 __ bind(&needs_frame); | 344 __ bind(&needs_frame); |
| 321 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); | 345 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); |
| 322 // This variant of deopt can only be used with stubs. Since we don't | 346 // This variant of deopt can only be used with stubs. Since we don't |
| 323 // have a function pointer to install in the stack frame that we're | 347 // have a function pointer to install in the stack frame that we're |
| 324 // building, install a special marker there instead. | 348 // building, install a special marker there instead. |
| 325 ASSERT(info()->IsStub()); | 349 ASSERT(info()->IsStub()); |
| 326 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 350 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); |
| 327 __ push(scratch0()); | 351 __ push(scratch0()); |
| 328 __ add(fp, sp, Operand(2 * kPointerSize)); | 352 __ add(fp, sp, Operand(2 * kPointerSize)); |
| 329 __ mov(lr, Operand(pc), LeaveCC, al); | 353 __ mov(lr, Operand(pc), LeaveCC, al); |
| 330 __ mov(pc, ip); | 354 __ mov(pc, ip); |
| 331 } | 355 } |
| 332 } else { | 356 } else { |
| 357 if (info()->saves_caller_doubles()) { |
| 358 ASSERT(info()->IsStub()); |
| 359 RestoreCallerDoubles(); |
| 360 } |
| 333 __ mov(lr, Operand(pc), LeaveCC, al); | 361 __ mov(lr, Operand(pc), LeaveCC, al); |
| 334 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); | 362 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); |
| 335 } | 363 } |
| 336 masm()->CheckConstPool(false, false); | 364 masm()->CheckConstPool(false, false); |
| 337 } | 365 } |
| 338 | 366 |
| 339 // Force constant pool emission at the end of the deopt jump table to make | 367 // Force constant pool emission at the end of the deopt jump table to make |
| 340 // sure that no constant pools are emitted after. | 368 // sure that no constant pools are emitted after. |
| 341 masm()->CheckConstPool(true, false); | 369 masm()->CheckConstPool(true, false); |
| 342 | 370 |
| (...skipping 478 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 821 condition = ne; | 849 condition = ne; |
| 822 __ cmp(scratch, Operand::Zero()); | 850 __ cmp(scratch, Operand::Zero()); |
| 823 } | 851 } |
| 824 } | 852 } |
| 825 | 853 |
| 826 if (info()->ShouldTrapOnDeopt()) { | 854 if (info()->ShouldTrapOnDeopt()) { |
| 827 __ stop("trap_on_deopt", condition); | 855 __ stop("trap_on_deopt", condition); |
| 828 } | 856 } |
| 829 | 857 |
| 830 ASSERT(info()->IsStub() || frame_is_built_); | 858 ASSERT(info()->IsStub() || frame_is_built_); |
| 831 if (condition == al && frame_is_built_) { | 859 // Go through jump table if we need to handle condition, build frame, or |
| 860 // restore caller doubles. |
| 861 if (condition == al && frame_is_built_ && |
| 862 !info()->saves_caller_doubles()) { |
| 832 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 863 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 833 } else { | 864 } else { |
| 834 // We often have several deopts to the same entry, reuse the last | 865 // We often have several deopts to the same entry, reuse the last |
| 835 // jump entry if this is the case. | 866 // jump entry if this is the case. |
| 836 if (deopt_jump_table_.is_empty() || | 867 if (deopt_jump_table_.is_empty() || |
| 837 (deopt_jump_table_.last().address != entry) || | 868 (deopt_jump_table_.last().address != entry) || |
| 838 (deopt_jump_table_.last().bailout_type != bailout_type) || | 869 (deopt_jump_table_.last().bailout_type != bailout_type) || |
| 839 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 870 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
| 840 Deoptimizer::JumpTableEntry table_entry(entry, | 871 Deoptimizer::JumpTableEntry table_entry(entry, |
| 841 bailout_type, | 872 bailout_type, |
| (...skipping 2110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2952 if (FLAG_trace && info()->IsOptimizing()) { | 2983 if (FLAG_trace && info()->IsOptimizing()) { |
| 2953 // Push the return value on the stack as the parameter. | 2984 // Push the return value on the stack as the parameter. |
| 2954 // Runtime::TraceExit returns its parameter in r0. We're leaving the code | 2985 // Runtime::TraceExit returns its parameter in r0. We're leaving the code |
| 2955 // managed by the register allocator and tearing down the frame, it's | 2986 // managed by the register allocator and tearing down the frame, it's |
| 2956 // safe to write to the context register. | 2987 // safe to write to the context register. |
| 2957 __ push(r0); | 2988 __ push(r0); |
| 2958 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2989 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2959 __ CallRuntime(Runtime::kTraceExit, 1); | 2990 __ CallRuntime(Runtime::kTraceExit, 1); |
| 2960 } | 2991 } |
| 2961 if (info()->saves_caller_doubles()) { | 2992 if (info()->saves_caller_doubles()) { |
| 2962 ASSERT(NeedsEagerFrame()); | 2993 RestoreCallerDoubles(); |
| 2963 BitVector* doubles = chunk()->allocated_double_registers(); | |
| 2964 BitVector::Iterator save_iterator(doubles); | |
| 2965 int count = 0; | |
| 2966 while (!save_iterator.Done()) { | |
| 2967 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), | |
| 2968 MemOperand(sp, count * kDoubleSize)); | |
| 2969 save_iterator.Advance(); | |
| 2970 count++; | |
| 2971 } | |
| 2972 } | 2994 } |
| 2973 int no_frame_start = -1; | 2995 int no_frame_start = -1; |
| 2974 if (NeedsEagerFrame()) { | 2996 if (NeedsEagerFrame()) { |
| 2975 __ mov(sp, fp); | 2997 __ mov(sp, fp); |
| 2976 no_frame_start = masm_->pc_offset(); | 2998 no_frame_start = masm_->pc_offset(); |
| 2977 __ ldm(ia_w, sp, fp.bit() | lr.bit()); | 2999 __ ldm(ia_w, sp, fp.bit() | lr.bit()); |
| 2978 } | 3000 } |
| 2979 if (instr->has_constant_parameter_count()) { | 3001 if (instr->has_constant_parameter_count()) { |
| 2980 int parameter_count = ToInteger32(instr->constant_parameter_count()); | 3002 int parameter_count = ToInteger32(instr->constant_parameter_count()); |
| 2981 int32_t sp_delta = (parameter_count + 1) * kPointerSize; | 3003 int32_t sp_delta = (parameter_count + 1) * kPointerSize; |
| (...skipping 2951 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5933 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5955 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
| 5934 __ ldr(result, FieldMemOperand(scratch, | 5956 __ ldr(result, FieldMemOperand(scratch, |
| 5935 FixedArray::kHeaderSize - kPointerSize)); | 5957 FixedArray::kHeaderSize - kPointerSize)); |
| 5936 __ bind(&done); | 5958 __ bind(&done); |
| 5937 } | 5959 } |
| 5938 | 5960 |
| 5939 | 5961 |
| 5940 #undef __ | 5962 #undef __ |
| 5941 | 5963 |
| 5942 } } // namespace v8::internal | 5964 } } // namespace v8::internal |
| OLD | NEW |