OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/cpu-profiler.h" | 9 #include "src/cpu-profiler.h" |
10 #include "src/hydrogen-osr.h" | 10 #include "src/hydrogen-osr.h" |
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
294 // Deferred code is the last part of the instruction sequence. Mark | 294 // Deferred code is the last part of the instruction sequence. Mark |
295 // the generated code as done unless we bailed out. | 295 // the generated code as done unless we bailed out. |
296 if (!is_aborted()) status_ = DONE; | 296 if (!is_aborted()) status_ = DONE; |
297 return !is_aborted(); | 297 return !is_aborted(); |
298 } | 298 } |
299 | 299 |
300 | 300 |
301 bool LCodeGen::GenerateJumpTable() { | 301 bool LCodeGen::GenerateJumpTable() { |
302 if (jump_table_.length() > 0) { | 302 if (jump_table_.length() > 0) { |
303 Comment(";;; -------------------- Jump table --------------------"); | 303 Comment(";;; -------------------- Jump table --------------------"); |
| 304 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 305 Label table_start, call_deopt_entry; |
| 306 |
| 307 __ bind(&table_start); |
| 308 Label needs_frame; |
| 309 Address base = jump_table_[0]->address; |
| 310 for (int i = 0; i < jump_table_.length(); i++) { |
| 311 Deoptimizer::JumpTableEntry* table_entry = jump_table_[i]; |
| 312 __ bind(&table_entry->label); |
| 313 Address entry = table_entry->address; |
| 314 DeoptComment(table_entry->deopt_info); |
| 315 |
| 316 // Second-level deopt table entries are contiguous and small, so instead |
| 317 // of loading the full, absolute address of each one, load the base |
| 318 // address and add an immediate offset. |
| 319 if (is_int16(entry - base)) { |
| 320 if (table_entry->needs_frame) { |
| 321 DCHECK(!info()->saves_caller_doubles()); |
| 322 Comment(";;; call deopt with frame"); |
| 323 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); |
| 324 __ BranchAndLink(&needs_frame, USE_DELAY_SLOT); |
| 325 __ li(t9, Operand(entry - base)); |
| 326 } else { |
| 327 __ BranchAndLink(&call_deopt_entry, USE_DELAY_SLOT); |
| 328 __ li(t9, Operand(entry - base)); |
| 329 } |
| 330 |
| 331 } else { |
| 332 __ li(t9, Operand(entry - base)); |
| 333 if (table_entry->needs_frame) { |
| 334 DCHECK(!info()->saves_caller_doubles()); |
| 335 Comment(";;; call deopt with frame"); |
| 336 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); |
| 337 __ BranchAndLink(&needs_frame); |
| 338 } else { |
| 339 __ BranchAndLink(&call_deopt_entry); |
| 340 } |
| 341 } |
| 342 info()->LogDeoptCallPosition(masm()->pc_offset(), |
| 343 table_entry->deopt_info.inlining_id); |
| 344 } |
| 345 if (needs_frame.is_linked()) { |
| 346 __ bind(&needs_frame); |
| 347 // This variant of deopt can only be used with stubs. Since we don't |
| 348 // have a function pointer to install in the stack frame that we're |
| 349 // building, install a special marker there instead. |
| 350 DCHECK(info()->IsStub()); |
| 351 __ li(at, Operand(Smi::FromInt(StackFrame::STUB))); |
| 352 __ push(at); |
| 353 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 354 } |
| 355 |
| 356 Comment(";;; call deopt"); |
| 357 __ bind(&call_deopt_entry); |
| 358 |
| 359 if (info()->saves_caller_doubles()) { |
| 360 DCHECK(info()->IsStub()); |
| 361 RestoreCallerDoubles(); |
| 362 } |
| 363 |
| 364 __ li(at, |
| 365 Operand(reinterpret_cast<int64_t>(base), RelocInfo::RUNTIME_ENTRY)); |
| 366 __ Daddu(t9, t9, Operand(at)); |
| 367 __ Jump(t9); |
304 } | 368 } |
305 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
306 Label table_start, call_deopt_entry; | |
307 __ bind(&table_start); | |
308 Label needs_frame; | |
309 for (int i = 0; i < jump_table_.length(); i++) { | |
310 Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i]; | |
311 __ bind(&table_entry->label); | |
312 Address entry = table_entry->address; | |
313 DeoptComment(table_entry->deopt_info); | |
314 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); | |
315 if (table_entry->needs_frame) { | |
316 DCHECK(!info()->saves_caller_doubles()); | |
317 Comment(";;; call deopt with frame"); | |
318 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); | |
319 __ Call(&needs_frame); | |
320 } else { | |
321 __ Call(&call_deopt_entry); | |
322 } | |
323 info()->LogDeoptCallPosition(masm()->pc_offset(), | |
324 table_entry->deopt_info.inlining_id); | |
325 } | |
326 if (needs_frame.is_linked()) { | |
327 __ bind(&needs_frame); | |
328 // This variant of deopt can only be used with stubs. Since we don't | |
329 // have a function pointer to install in the stack frame that we're | |
330 // building, install a special marker there instead. | |
331 DCHECK(info()->IsStub()); | |
332 __ li(at, Operand(Smi::FromInt(StackFrame::STUB))); | |
333 __ push(at); | |
334 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | |
335 } | |
336 | |
337 Comment(";;; call deopt"); | |
338 __ bind(&call_deopt_entry); | |
339 | |
340 if (info()->saves_caller_doubles()) { | |
341 DCHECK(info()->IsStub()); | |
342 RestoreCallerDoubles(); | |
343 } | |
344 __ Jump(t9); | |
345 | |
346 __ RecordComment("]"); | |
347 | |
348 // The deoptimization jump table is the last part of the instruction | 369 // The deoptimization jump table is the last part of the instruction |
349 // sequence. Mark the generated code as done unless we bailed out. | 370 // sequence. Mark the generated code as done unless we bailed out. |
350 if (!is_aborted()) status_ = DONE; | 371 if (!is_aborted()) status_ = DONE; |
351 return !is_aborted(); | 372 return !is_aborted(); |
352 } | 373 } |
353 | 374 |
354 | 375 |
355 bool LCodeGen::GenerateSafepointTable() { | 376 bool LCodeGen::GenerateSafepointTable() { |
356 DCHECK(is_done()); | 377 DCHECK(is_done()); |
357 safepoints_.Emit(masm(), GetStackSlotCount()); | 378 safepoints_.Emit(masm(), GetStackSlotCount()); |
(...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
817 | 838 |
818 DCHECK(info()->IsStub() || frame_is_built_); | 839 DCHECK(info()->IsStub() || frame_is_built_); |
819 // Go through jump table if we need to handle condition, build frame, or | 840 // Go through jump table if we need to handle condition, build frame, or |
820 // restore caller doubles. | 841 // restore caller doubles. |
821 if (condition == al && frame_is_built_ && | 842 if (condition == al && frame_is_built_ && |
822 !info()->saves_caller_doubles()) { | 843 !info()->saves_caller_doubles()) { |
823 DeoptComment(deopt_info); | 844 DeoptComment(deopt_info); |
824 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); | 845 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); |
825 info()->LogDeoptCallPosition(masm()->pc_offset(), deopt_info.inlining_id); | 846 info()->LogDeoptCallPosition(masm()->pc_offset(), deopt_info.inlining_id); |
826 } else { | 847 } else { |
827 Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type, | 848 Deoptimizer::JumpTableEntry* table_entry = |
828 !frame_is_built_); | 849 new (zone()) Deoptimizer::JumpTableEntry( |
| 850 entry, deopt_info, bailout_type, !frame_is_built_); |
829 // We often have several deopts to the same entry, reuse the last | 851 // We often have several deopts to the same entry, reuse the last |
830 // jump entry if this is the case. | 852 // jump entry if this is the case. |
831 if (FLAG_trace_deopt || isolate()->cpu_profiler()->is_profiling() || | 853 if (FLAG_trace_deopt || isolate()->cpu_profiler()->is_profiling() || |
832 jump_table_.is_empty() || | 854 jump_table_.is_empty() || |
833 !table_entry.IsEquivalentTo(jump_table_.last())) { | 855 !table_entry->IsEquivalentTo(*jump_table_.last())) { |
834 jump_table_.Add(table_entry, zone()); | 856 jump_table_.Add(table_entry, zone()); |
835 } | 857 } |
836 __ Branch(&jump_table_.last().label, condition, src1, src2); | 858 __ Branch(&jump_table_.last()->label, condition, src1, src2); |
837 } | 859 } |
838 } | 860 } |
839 | 861 |
840 | 862 |
841 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, | 863 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, |
842 Deoptimizer::DeoptReason deopt_reason, | 864 Deoptimizer::DeoptReason deopt_reason, |
843 Register src1, const Operand& src2) { | 865 Register src1, const Operand& src2) { |
844 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 866 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
845 ? Deoptimizer::LAZY | 867 ? Deoptimizer::LAZY |
846 : Deoptimizer::EAGER; | 868 : Deoptimizer::EAGER; |
(...skipping 5178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6025 __ li(at, scope_info); | 6047 __ li(at, scope_info); |
6026 __ Push(at, ToRegister(instr->function())); | 6048 __ Push(at, ToRegister(instr->function())); |
6027 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6049 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6028 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6050 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6029 } | 6051 } |
6030 | 6052 |
6031 | 6053 |
6032 #undef __ | 6054 #undef __ |
6033 | 6055 |
6034 } } // namespace v8::internal | 6056 } } // namespace v8::internal |
OLD | NEW |