Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(409)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 22876009: Improve and simplify removal of unreachable code (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Address review feedback Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
113 info()->CommitDependencies(code); 113 info()->CommitDependencies(code);
114 } 114 }
115 115
116 116
117 void LCodeGen::Abort(BailoutReason reason) { 117 void LCodeGen::Abort(BailoutReason reason) {
118 info()->set_bailout_reason(reason); 118 info()->set_bailout_reason(reason);
119 status_ = ABORTED; 119 status_ = ABORTED;
120 } 120 }
121 121
122 122
123 void LCodeGen::Comment(const char* format, ...) {
124 if (!FLAG_code_comments) return;
125 char buffer[4 * KB];
126 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
127 va_list arguments;
128 va_start(arguments, format);
129 builder.AddFormattedList(format, arguments);
130 va_end(arguments);
131
132 // Copy the string before recording it in the assembler to avoid
133 // issues when the stack allocated buffer goes out of scope.
134 size_t length = builder.position();
135 Vector<char> copy = Vector<char>::New(length + 1);
136 OS::MemCopy(copy.start(), builder.Finalize(), copy.length());
137 masm()->RecordComment(copy.start());
138 }
139
140
141 #ifdef _MSC_VER 123 #ifdef _MSC_VER
142 void LCodeGen::MakeSureStackPagesMapped(int offset) { 124 void LCodeGen::MakeSureStackPagesMapped(int offset) {
143 const int kPageSize = 4 * KB; 125 const int kPageSize = 4 * KB;
144 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { 126 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
145 __ mov(Operand(esp, offset), eax); 127 __ mov(Operand(esp, offset), eax);
146 } 128 }
147 } 129 }
148 #endif 130 #endif
149 131
150 132
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after
377 __ mov(alignment_loc, edx); 359 __ mov(alignment_loc, edx);
378 360
379 // Adjust the frame size, subsuming the unoptimized frame into the 361 // Adjust the frame size, subsuming the unoptimized frame into the
380 // optimized frame. 362 // optimized frame.
381 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); 363 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
382 ASSERT(slots >= 1); 364 ASSERT(slots >= 1);
383 __ sub(esp, Immediate((slots - 1) * kPointerSize)); 365 __ sub(esp, Immediate((slots - 1) * kPointerSize));
384 } 366 }
385 367
386 368
387 bool LCodeGen::GenerateBody() { 369 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
388 ASSERT(is_generating()); 370 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
389 bool emit_instructions = true;
390 for (current_instruction_ = 0;
391 !is_aborted() && current_instruction_ < instructions_->length();
392 current_instruction_++) {
393 LInstruction* instr = instructions_->at(current_instruction_);
394
395 // Don't emit code for basic blocks with a replacement.
396 if (instr->IsLabel()) {
397 emit_instructions = !LLabel::cast(instr)->HasReplacement();
398 }
399 if (!emit_instructions) continue;
400
401 if (FLAG_code_comments && instr->HasInterestingComment(this)) {
402 Comment(";;; <@%d,#%d> %s",
403 current_instruction_,
404 instr->hydrogen_value()->id(),
405 instr->Mnemonic());
406 }
407
408 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
409
410 RecordAndUpdatePosition(instr->position());
411
412 instr->CompileToNative(this);
413
414 if (!CpuFeatures::IsSupported(SSE2)) {
415 if (instr->IsGoto()) {
416 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr));
417 } else if (FLAG_debug_code && FLAG_enable_slow_asserts &&
418 !instr->IsGap() && !instr->IsReturn()) {
419 if (instr->ClobbersDoubleRegisters()) {
420 if (instr->HasDoubleRegisterResult()) {
421 ASSERT_EQ(1, x87_stack_.depth());
422 } else {
423 ASSERT_EQ(0, x87_stack_.depth());
424 }
425 }
426 __ VerifyX87StackDepth(x87_stack_.depth());
427 }
428 }
429 }
430 EnsureSpaceForLazyDeopt();
431 return !is_aborted();
432 } 371 }
433 372
434 373
374 void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) {
375 if (!CpuFeatures::IsSupported(SSE2)) {
376 if (instr->IsGoto()) {
377 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr));
378 } else if (FLAG_debug_code && FLAG_enable_slow_asserts &&
379 !instr->IsGap() && !instr->IsReturn()) {
380 if (instr->ClobbersDoubleRegisters()) {
381 if (instr->HasDoubleRegisterResult()) {
382 ASSERT_EQ(1, x87_stack_.depth());
383 } else {
384 ASSERT_EQ(0, x87_stack_.depth());
385 }
386 }
387 __ VerifyX87StackDepth(x87_stack_.depth());
388 }
389 }
390 }
391
392
435 bool LCodeGen::GenerateJumpTable() { 393 bool LCodeGen::GenerateJumpTable() {
436 Label needs_frame; 394 Label needs_frame;
437 if (jump_table_.length() > 0) { 395 if (jump_table_.length() > 0) {
438 Comment(";;; -------------------- Jump table --------------------"); 396 Comment(";;; -------------------- Jump table --------------------");
439 } 397 }
440 for (int i = 0; i < jump_table_.length(); i++) { 398 for (int i = 0; i < jump_table_.length(); i++) {
441 __ bind(&jump_table_[i].label); 399 __ bind(&jump_table_[i].label);
442 Address entry = jump_table_[i].address; 400 Address entry = jump_table_[i].address;
443 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; 401 Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
444 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); 402 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
(...skipping 1890 matching lines...) Expand 10 before | Expand all | Expand 10 after
2335 ASSERT(ToRegister(instr->left()).is(edx)); 2293 ASSERT(ToRegister(instr->left()).is(edx));
2336 ASSERT(ToRegister(instr->right()).is(eax)); 2294 ASSERT(ToRegister(instr->right()).is(eax));
2337 ASSERT(ToRegister(instr->result()).is(eax)); 2295 ASSERT(ToRegister(instr->result()).is(eax));
2338 2296
2339 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 2297 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
2340 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2298 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2341 __ nop(); // Signals no inlined code. 2299 __ nop(); // Signals no inlined code.
2342 } 2300 }
2343 2301
2344 2302
2345 int LCodeGen::GetNextEmittedBlock() const {
2346 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
2347 if (!chunk_->GetLabel(i)->HasReplacement()) return i;
2348 }
2349 return -1;
2350 }
2351
2352
2353 template<class InstrType> 2303 template<class InstrType>
2354 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { 2304 void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
2355 int left_block = instr->TrueDestination(chunk_); 2305 int left_block = instr->TrueDestination(chunk_);
2356 int right_block = instr->FalseDestination(chunk_); 2306 int right_block = instr->FalseDestination(chunk_);
2357 2307
2358 int next_block = GetNextEmittedBlock(); 2308 int next_block = GetNextEmittedBlock();
2359 2309
2360 if (right_block == left_block || cc == no_condition) { 2310 if (right_block == left_block || cc == no_condition) {
2361 EmitGoto(left_block); 2311 EmitGoto(left_block);
2362 } else if (left_block == next_block) { 2312 } else if (left_block == next_block) {
(...skipping 3824 matching lines...) Expand 10 before | Expand all | Expand 10 after
6187 __ j(not_equal, &check_frame_marker, Label::kNear); 6137 __ j(not_equal, &check_frame_marker, Label::kNear);
6188 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); 6138 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
6189 6139
6190 // Check the marker in the calling frame. 6140 // Check the marker in the calling frame.
6191 __ bind(&check_frame_marker); 6141 __ bind(&check_frame_marker);
6192 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 6142 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
6193 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); 6143 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6194 } 6144 }
6195 6145
6196 6146
6197 void LCodeGen::EnsureSpaceForLazyDeopt() { 6147 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
6198 if (!info()->IsStub()) { 6148 if (!info()->IsStub()) {
6199 // Ensure that we have enough space after the previous lazy-bailout 6149 // Ensure that we have enough space after the previous lazy-bailout
6200 // instruction for patching the code here. 6150 // instruction for patching the code here.
6201 int current_pc = masm()->pc_offset(); 6151 int current_pc = masm()->pc_offset();
6202 int patch_size = Deoptimizer::patch_size(); 6152 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
6203 if (current_pc < last_lazy_deopt_pc_ + patch_size) { 6153 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
6204 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
6205 __ Nop(padding_size); 6154 __ Nop(padding_size);
6206 } 6155 }
6207 } 6156 }
6208 last_lazy_deopt_pc_ = masm()->pc_offset(); 6157 last_lazy_deopt_pc_ = masm()->pc_offset();
6209 } 6158 }
6210 6159
6211 6160
6212 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 6161 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
6213 EnsureSpaceForLazyDeopt(); 6162 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
6214 ASSERT(instr->HasEnvironment()); 6163 ASSERT(instr->HasEnvironment());
6215 LEnvironment* env = instr->environment(); 6164 LEnvironment* env = instr->environment();
6216 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 6165 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
6217 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 6166 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
6218 } 6167 }
6219 6168
6220 6169
6221 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 6170 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
6222 Deoptimizer::BailoutType type = instr->hydrogen()->type(); 6171 Deoptimizer::BailoutType type = instr->hydrogen()->type();
6223 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the 6172 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
6274 ExternalReference stack_limit = 6223 ExternalReference stack_limit =
6275 ExternalReference::address_of_stack_limit(isolate()); 6224 ExternalReference::address_of_stack_limit(isolate());
6276 __ cmp(esp, Operand::StaticVariable(stack_limit)); 6225 __ cmp(esp, Operand::StaticVariable(stack_limit));
6277 __ j(above_equal, &done, Label::kNear); 6226 __ j(above_equal, &done, Label::kNear);
6278 6227
6279 ASSERT(instr->context()->IsRegister()); 6228 ASSERT(instr->context()->IsRegister());
6280 ASSERT(ToRegister(instr->context()).is(esi)); 6229 ASSERT(ToRegister(instr->context()).is(esi));
6281 CallCode(isolate()->builtins()->StackCheck(), 6230 CallCode(isolate()->builtins()->StackCheck(),
6282 RelocInfo::CODE_TARGET, 6231 RelocInfo::CODE_TARGET,
6283 instr); 6232 instr);
6284 EnsureSpaceForLazyDeopt(); 6233 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
6285 __ bind(&done); 6234 __ bind(&done);
6286 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 6235 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
6287 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 6236 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
6288 } else { 6237 } else {
6289 ASSERT(instr->hydrogen()->is_backwards_branch()); 6238 ASSERT(instr->hydrogen()->is_backwards_branch());
6290 // Perform stack overflow check if this goto needs it before jumping. 6239 // Perform stack overflow check if this goto needs it before jumping.
6291 DeferredStackCheck* deferred_stack_check = 6240 DeferredStackCheck* deferred_stack_check =
6292 new(zone()) DeferredStackCheck(this, instr, x87_stack_); 6241 new(zone()) DeferredStackCheck(this, instr, x87_stack_);
6293 ExternalReference stack_limit = 6242 ExternalReference stack_limit =
6294 ExternalReference::address_of_stack_limit(isolate()); 6243 ExternalReference::address_of_stack_limit(isolate());
6295 __ cmp(esp, Operand::StaticVariable(stack_limit)); 6244 __ cmp(esp, Operand::StaticVariable(stack_limit));
6296 __ j(below, deferred_stack_check->entry()); 6245 __ j(below, deferred_stack_check->entry());
6297 EnsureSpaceForLazyDeopt(); 6246 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
6298 __ bind(instr->done_label()); 6247 __ bind(instr->done_label());
6299 deferred_stack_check->SetExit(instr->done_label()); 6248 deferred_stack_check->SetExit(instr->done_label());
6300 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 6249 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
6301 // Don't record a deoptimization index for the safepoint here. 6250 // Don't record a deoptimization index for the safepoint here.
6302 // This will be done explicitly when emitting call and the safepoint in 6251 // This will be done explicitly when emitting call and the safepoint in
6303 // the deferred code. 6252 // the deferred code.
6304 } 6253 }
6305 } 6254 }
6306 6255
6307 6256
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
6405 FixedArray::kHeaderSize - kPointerSize)); 6354 FixedArray::kHeaderSize - kPointerSize));
6406 __ bind(&done); 6355 __ bind(&done);
6407 } 6356 }
6408 6357
6409 6358
6410 #undef __ 6359 #undef __
6411 6360
6412 } } // namespace v8::internal 6361 } } // namespace v8::internal
6413 6362
6414 #endif // V8_TARGET_ARCH_IA32 6363 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698