| Index: src/compiler/code-generator.cc
 | 
| diff --git a/src/compiler/code-generator.cc b/src/compiler/code-generator.cc
 | 
| index 712cfe0b2da9407cfb3109bde003add32ad8977d..d1957336f2c4a6df106056273a7673ebedec395a 100644
 | 
| --- a/src/compiler/code-generator.cc
 | 
| +++ b/src/compiler/code-generator.cc
 | 
| @@ -31,7 +31,6 @@ class CodeGenerator::JumpTable final : public ZoneObject {
 | 
|    size_t const target_count_;
 | 
|  };
 | 
|  
 | 
| -
 | 
|  CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
 | 
|                               InstructionSequence* code, CompilationInfo* info)
 | 
|      : frame_access_state_(new (code->zone()) FrameAccessState(frame)),
 | 
| @@ -45,6 +44,7 @@ CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
 | 
|        resolver_(this),
 | 
|        safepoints_(code->zone()),
 | 
|        handlers_(code->zone()),
 | 
| +      deoptimization_exits_(code->zone()),
 | 
|        deoptimization_states_(code->zone()),
 | 
|        deoptimization_literals_(code->zone()),
 | 
|        inlined_function_count_(0),
 | 
| @@ -158,6 +158,12 @@ Handle<Code> CodeGenerator::GenerateCode() {
 | 
|      }
 | 
|    }
 | 
|  
 | 
| +  // Assemble all eager deoptimization exits.
 | 
| +  for (DeoptimizationExit* exit : deoptimization_exits_) {
 | 
| +    masm()->bind(exit->label());
 | 
| +    AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER);
 | 
| +  }
 | 
| +
 | 
|    // Ensure there is space for lazy deoptimization in the code.
 | 
|    if (info->ShouldEnsureSpaceForLazyDeopt()) {
 | 
|      int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
 | 
| @@ -291,34 +297,59 @@ void CodeGenerator::AssembleInstruction(Instruction* instr) {
 | 
|  
 | 
|    FlagsMode mode = FlagsModeField::decode(instr->opcode());
 | 
|    FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
 | 
| -  if (mode == kFlags_branch) {
 | 
| -    // Assemble a branch after this instruction.
 | 
| -    InstructionOperandConverter i(this, instr);
 | 
| -    RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
 | 
| -    RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
 | 
| -
 | 
| -    if (true_rpo == false_rpo) {
 | 
| -      // redundant branch.
 | 
| -      if (!IsNextInAssemblyOrder(true_rpo)) {
 | 
| -        AssembleArchJump(true_rpo);
 | 
| +  switch (mode) {
 | 
| +    case kFlags_branch: {
 | 
| +      // Assemble a branch after this instruction.
 | 
| +      InstructionOperandConverter i(this, instr);
 | 
| +      RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
 | 
| +      RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
 | 
| +
 | 
| +      if (true_rpo == false_rpo) {
 | 
| +        // redundant branch.
 | 
| +        if (!IsNextInAssemblyOrder(true_rpo)) {
 | 
| +          AssembleArchJump(true_rpo);
 | 
| +        }
 | 
| +        return;
 | 
|        }
 | 
| -      return;
 | 
| +      if (IsNextInAssemblyOrder(true_rpo)) {
 | 
| +        // true block is next, can fall through if condition negated.
 | 
| +        std::swap(true_rpo, false_rpo);
 | 
| +        condition = NegateFlagsCondition(condition);
 | 
| +      }
 | 
| +      BranchInfo branch;
 | 
| +      branch.condition = condition;
 | 
| +      branch.true_label = GetLabel(true_rpo);
 | 
| +      branch.false_label = GetLabel(false_rpo);
 | 
| +      branch.fallthru = IsNextInAssemblyOrder(false_rpo);
 | 
| +      // Assemble architecture-specific branch.
 | 
| +      AssembleArchBranch(instr, &branch);
 | 
| +      break;
 | 
|      }
 | 
| -    if (IsNextInAssemblyOrder(true_rpo)) {
 | 
| -      // true block is next, can fall through if condition negated.
 | 
| -      std::swap(true_rpo, false_rpo);
 | 
| -      condition = NegateFlagsCondition(condition);
 | 
| +    case kFlags_deoptimize: {
 | 
| +      // Assemble a conditional eager deoptimization after this instruction.
 | 
| +      InstructionOperandConverter i(this, instr);
 | 
| +      size_t frame_state_offset = MiscField::decode(instr->opcode());
 | 
| +      DeoptimizationExit* const exit =
 | 
| +          AddDeoptimizationExit(instr, frame_state_offset);
 | 
| +      Label continue_label;
 | 
| +      BranchInfo branch;
 | 
| +      branch.condition = condition;
 | 
| +      branch.true_label = exit->label();
 | 
| +      branch.false_label = &continue_label;
 | 
| +      branch.fallthru = true;
 | 
| +      // Assemble architecture-specific branch.
 | 
| +      AssembleArchBranch(instr, &branch);
 | 
| +      masm()->bind(&continue_label);
 | 
| +      break;
 | 
| +    }
 | 
| +    case kFlags_set: {
 | 
| +      // Assemble a boolean materialization after this instruction.
 | 
| +      AssembleArchBoolean(instr, condition);
 | 
| +      break;
 | 
| +    }
 | 
| +    case kFlags_none: {
 | 
| +      break;
 | 
|      }
 | 
| -    BranchInfo branch;
 | 
| -    branch.condition = condition;
 | 
| -    branch.true_label = GetLabel(true_rpo);
 | 
| -    branch.false_label = GetLabel(false_rpo);
 | 
| -    branch.fallthru = IsNextInAssemblyOrder(false_rpo);
 | 
| -    // Assemble architecture-specific branch.
 | 
| -    AssembleArchBranch(instr, &branch);
 | 
| -  } else if (mode == kFlags_set) {
 | 
| -    // Assemble a boolean materialization after this instruction.
 | 
| -    AssembleArchBoolean(instr, condition);
 | 
|    }
 | 
|  }
 | 
|  
 | 
| @@ -714,6 +745,15 @@ void CodeGenerator::MarkLazyDeoptSite() {
 | 
|    last_lazy_deopt_pc_ = masm()->pc_offset();
 | 
|  }
 | 
|  
 | 
| +DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
 | 
| +    Instruction* instr, size_t frame_state_offset) {
 | 
| +  int const deoptimization_id = BuildTranslation(
 | 
| +      instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
 | 
| +  DeoptimizationExit* const exit =
 | 
| +      new (zone()) DeoptimizationExit(deoptimization_id);
 | 
| +  deoptimization_exits_.push_back(exit);
 | 
| +  return exit;
 | 
| +}
 | 
|  
 | 
|  int CodeGenerator::TailCallFrameStackSlotDelta(int stack_param_delta) {
 | 
|    CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
 | 
| 
 |