Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1946)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 22876009: Improve and simplify removal of unreachable code (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Add missing file Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
91 info()->CommitDependencies(code); 91 info()->CommitDependencies(code);
92 } 92 }
93 93
94 94
95 void LCodeGen::Abort(BailoutReason reason) { 95 void LCodeGen::Abort(BailoutReason reason) {
96 info()->set_bailout_reason(reason); 96 info()->set_bailout_reason(reason);
97 status_ = ABORTED; 97 status_ = ABORTED;
98 } 98 }
99 99
100 100
101 void LCodeGen::Comment(const char* format, ...) {
102 if (!FLAG_code_comments) return;
103 char buffer[4 * KB];
104 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
105 va_list arguments;
106 va_start(arguments, format);
107 builder.AddFormattedList(format, arguments);
108 va_end(arguments);
109
110 // Copy the string before recording it in the assembler to avoid
111 // issues when the stack allocated buffer goes out of scope.
112 size_t length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1);
114 OS::MemCopy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start());
116 }
117
118
119 bool LCodeGen::GeneratePrologue() { 101 bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating()); 102 ASSERT(is_generating());
121 103
122 if (info()->IsOptimizing()) { 104 if (info()->IsOptimizing()) {
123 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 105 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
124 106
125 #ifdef DEBUG 107 #ifdef DEBUG
126 if (strlen(FLAG_stop_at) > 0 && 108 if (strlen(FLAG_stop_at) > 0 &&
127 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 109 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
128 __ stop("stop_at"); 110 __ stop("stop_at");
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 osr_pc_offset_ = masm()->pc_offset(); 246 osr_pc_offset_ = masm()->pc_offset();
265 247
266 // Adjust the frame size, subsuming the unoptimized frame into the 248 // Adjust the frame size, subsuming the unoptimized frame into the
267 // optimized frame. 249 // optimized frame.
268 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); 250 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
269 ASSERT(slots >= 0); 251 ASSERT(slots >= 0);
270 __ sub(sp, sp, Operand(slots * kPointerSize)); 252 __ sub(sp, sp, Operand(slots * kPointerSize));
271 } 253 }
272 254
273 255
274 bool LCodeGen::GenerateBody() {
275 ASSERT(is_generating());
276 bool emit_instructions = true;
277 for (current_instruction_ = 0;
278 !is_aborted() && current_instruction_ < instructions_->length();
279 current_instruction_++) {
280 LInstruction* instr = instructions_->at(current_instruction_);
281
282 // Don't emit code for basic blocks with a replacement.
283 if (instr->IsLabel()) {
284 emit_instructions = !LLabel::cast(instr)->HasReplacement();
285 }
286 if (!emit_instructions) continue;
287
288 if (FLAG_code_comments && instr->HasInterestingComment(this)) {
289 Comment(";;; <@%d,#%d> %s",
290 current_instruction_,
291 instr->hydrogen_value()->id(),
292 instr->Mnemonic());
293 }
294
295 RecordAndUpdatePosition(instr->position());
296
297 instr->CompileToNative(this);
298 }
299 EnsureSpaceForLazyDeopt();
300 last_lazy_deopt_pc_ = masm()->pc_offset();
301 return !is_aborted();
302 }
303
304
305 bool LCodeGen::GenerateDeferredCode() { 256 bool LCodeGen::GenerateDeferredCode() {
306 ASSERT(is_generating()); 257 ASSERT(is_generating());
307 if (deferred_.length() > 0) { 258 if (deferred_.length() > 0) {
308 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 259 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
309 LDeferredCode* code = deferred_[i]; 260 LDeferredCode* code = deferred_[i];
310 261
311 int pos = instructions_->at(code->instruction_index())->position(); 262 int pos = instructions_->at(code->instruction_index())->position();
312 RecordAndUpdatePosition(pos); 263 RecordAndUpdatePosition(pos);
313 264
314 Comment(";;; <@%d,#%d> " 265 Comment(";;; <@%d,#%d> "
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after
722 TargetAddressStorageMode storage_mode) { 673 TargetAddressStorageMode storage_mode) {
723 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); 674 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode);
724 } 675 }
725 676
726 677
727 void LCodeGen::CallCodeGeneric(Handle<Code> code, 678 void LCodeGen::CallCodeGeneric(Handle<Code> code,
728 RelocInfo::Mode mode, 679 RelocInfo::Mode mode,
729 LInstruction* instr, 680 LInstruction* instr,
730 SafepointMode safepoint_mode, 681 SafepointMode safepoint_mode,
731 TargetAddressStorageMode storage_mode) { 682 TargetAddressStorageMode storage_mode) {
732 EnsureSpaceForLazyDeopt(); 683 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
733 ASSERT(instr != NULL); 684 ASSERT(instr != NULL);
734 // Block literal pool emission to ensure nop indicating no inlined smi code 685 // Block literal pool emission to ensure nop indicating no inlined smi code
735 // is in the correct position. 686 // is in the correct position.
736 Assembler::BlockConstPoolScope block_const_pool(masm()); 687 Assembler::BlockConstPoolScope block_const_pool(masm());
737 LPointerMap* pointers = instr->pointer_map(); 688 LPointerMap* pointers = instr->pointer_map();
738 RecordPosition(pointers->position()); 689 RecordPosition(pointers->position());
739 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); 690 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode);
740 RecordSafepointWithLazyDeopt(instr, safepoint_mode); 691 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
741 692
742 // Signal that we don't inline smi code before these stubs in the 693 // Signal that we don't inline smi code before these stubs in the
(...skipping 1437 matching lines...) Expand 10 before | Expand all | Expand 10 after
2180 2131
2181 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 2132 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
2182 // Block literal pool emission to ensure nop indicating no inlined smi code 2133 // Block literal pool emission to ensure nop indicating no inlined smi code
2183 // is in the correct position. 2134 // is in the correct position.
2184 Assembler::BlockConstPoolScope block_const_pool(masm()); 2135 Assembler::BlockConstPoolScope block_const_pool(masm());
2185 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2136 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2186 __ nop(); // Signals no inlined code. 2137 __ nop(); // Signals no inlined code.
2187 } 2138 }
2188 2139
2189 2140
2190 int LCodeGen::GetNextEmittedBlock() const {
2191 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
2192 if (!chunk_->GetLabel(i)->HasReplacement()) return i;
2193 }
2194 return -1;
2195 }
2196
2197 template<class InstrType> 2141 template<class InstrType>
2198 void LCodeGen::EmitBranch(InstrType instr, Condition condition) { 2142 void LCodeGen::EmitBranch(InstrType instr, Condition condition) {
2199 int left_block = instr->TrueDestination(chunk_); 2143 int left_block = instr->TrueDestination(chunk_);
2200 int right_block = instr->FalseDestination(chunk_); 2144 int right_block = instr->FalseDestination(chunk_);
2201 2145
2202 int next_block = GetNextEmittedBlock(); 2146 int next_block = GetNextEmittedBlock();
2203 2147
2204 if (right_block == left_block || condition == al) { 2148 if (right_block == left_block || condition == al) {
2205 EmitGoto(left_block); 2149 EmitGoto(left_block);
2206 } else if (left_block == next_block) { 2150 } else if (left_block == next_block) {
(...skipping 3419 matching lines...) Expand 10 before | Expand all | Expand 10 after
5626 __ b(ne, &check_frame_marker); 5570 __ b(ne, &check_frame_marker);
5627 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); 5571 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
5628 5572
5629 // Check the marker in the calling frame. 5573 // Check the marker in the calling frame.
5630 __ bind(&check_frame_marker); 5574 __ bind(&check_frame_marker);
5631 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); 5575 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
5632 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 5576 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
5633 } 5577 }
5634 5578
5635 5579
5636 void LCodeGen::EnsureSpaceForLazyDeopt() { 5580 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5637 if (info()->IsStub()) return; 5581 if (info()->IsStub()) return;
5638 // Ensure that we have enough space after the previous lazy-bailout 5582 // Ensure that we have enough space after the previous lazy-bailout
5639 // instruction for patching the code here. 5583 // instruction for patching the code here.
5640 int current_pc = masm()->pc_offset(); 5584 int current_pc = masm()->pc_offset();
5641 int patch_size = Deoptimizer::patch_size(); 5585 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5642 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
5643 // Block literal pool emission for duration of padding. 5586 // Block literal pool emission for duration of padding.
5644 Assembler::BlockConstPoolScope block_const_pool(masm()); 5587 Assembler::BlockConstPoolScope block_const_pool(masm());
5645 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; 5588 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5646 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); 5589 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
5647 while (padding_size > 0) { 5590 while (padding_size > 0) {
5648 __ nop(); 5591 __ nop();
5649 padding_size -= Assembler::kInstrSize; 5592 padding_size -= Assembler::kInstrSize;
5650 } 5593 }
5651 } 5594 }
5652 } 5595 }
5653 5596
5654 5597
5655 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 5598 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5656 EnsureSpaceForLazyDeopt(); 5599 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
5657 last_lazy_deopt_pc_ = masm()->pc_offset(); 5600 last_lazy_deopt_pc_ = masm()->pc_offset();
5658 ASSERT(instr->HasEnvironment()); 5601 ASSERT(instr->HasEnvironment());
5659 LEnvironment* env = instr->environment(); 5602 LEnvironment* env = instr->environment();
5660 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5603 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5661 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5604 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5662 } 5605 }
5663 5606
5664 5607
5665 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 5608 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5666 Deoptimizer::BailoutType type = instr->hydrogen()->type(); 5609 Deoptimizer::BailoutType type = instr->hydrogen()->type();
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
5716 Label done; 5659 Label done;
5717 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5660 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5718 __ cmp(sp, Operand(ip)); 5661 __ cmp(sp, Operand(ip));
5719 __ b(hs, &done); 5662 __ b(hs, &done);
5720 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); 5663 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
5721 ASSERT(instr->context()->IsRegister()); 5664 ASSERT(instr->context()->IsRegister());
5722 ASSERT(ToRegister(instr->context()).is(cp)); 5665 ASSERT(ToRegister(instr->context()).is(cp));
5723 CallCode(isolate()->builtins()->StackCheck(), 5666 CallCode(isolate()->builtins()->StackCheck(),
5724 RelocInfo::CODE_TARGET, 5667 RelocInfo::CODE_TARGET,
5725 instr); 5668 instr);
5726 EnsureSpaceForLazyDeopt(); 5669 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
5727 last_lazy_deopt_pc_ = masm()->pc_offset(); 5670 last_lazy_deopt_pc_ = masm()->pc_offset();
5728 __ bind(&done); 5671 __ bind(&done);
5729 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5672 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5730 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5673 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5731 } else { 5674 } else {
5732 ASSERT(instr->hydrogen()->is_backwards_branch()); 5675 ASSERT(instr->hydrogen()->is_backwards_branch());
5733 // Perform stack overflow check if this goto needs it before jumping. 5676 // Perform stack overflow check if this goto needs it before jumping.
5734 DeferredStackCheck* deferred_stack_check = 5677 DeferredStackCheck* deferred_stack_check =
5735 new(zone()) DeferredStackCheck(this, instr); 5678 new(zone()) DeferredStackCheck(this, instr);
5736 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5679 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5737 __ cmp(sp, Operand(ip)); 5680 __ cmp(sp, Operand(ip));
5738 __ b(lo, deferred_stack_check->entry()); 5681 __ b(lo, deferred_stack_check->entry());
5739 EnsureSpaceForLazyDeopt(); 5682 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
5740 last_lazy_deopt_pc_ = masm()->pc_offset(); 5683 last_lazy_deopt_pc_ = masm()->pc_offset();
5741 __ bind(instr->done_label()); 5684 __ bind(instr->done_label());
5742 deferred_stack_check->SetExit(instr->done_label()); 5685 deferred_stack_check->SetExit(instr->done_label());
5743 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5686 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5744 // Don't record a deoptimization index for the safepoint here. 5687 // Don't record a deoptimization index for the safepoint here.
5745 // This will be done explicitly when emitting call and the safepoint in 5688 // This will be done explicitly when emitting call and the safepoint in
5746 // the deferred code. 5689 // the deferred code.
5747 } 5690 }
5748 } 5691 }
5749 5692
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
5853 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); 5796 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5854 __ ldr(result, FieldMemOperand(scratch, 5797 __ ldr(result, FieldMemOperand(scratch,
5855 FixedArray::kHeaderSize - kPointerSize)); 5798 FixedArray::kHeaderSize - kPointerSize));
5856 __ bind(&done); 5799 __ bind(&done);
5857 } 5800 }
5858 5801
5859 5802
5860 #undef __ 5803 #undef __
5861 5804
5862 } } // namespace v8::internal 5805 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698