Index: src/crankshaft/s390/lithium-codegen-s390.cc |
diff --git a/src/crankshaft/s390/lithium-codegen-s390.cc b/src/crankshaft/s390/lithium-codegen-s390.cc |
index bb5286d8e7257a396754097822777d71c4d423b4..32784a167f180dc1f6b74ad4f370cb70b0dbaf0a 100644 |
--- a/src/crankshaft/s390/lithium-codegen-s390.cc |
+++ b/src/crankshaft/s390/lithium-codegen-s390.cc |
@@ -113,7 +113,7 @@ bool LCodeGen::GeneratePrologue() { |
info()->set_prologue_offset(prologue_offset); |
if (NeedsEagerFrame()) { |
if (info()->IsStub()) { |
- __ StubPrologue(ip, prologue_offset); |
+ __ StubPrologue(StackFrame::STUB, ip, prologue_offset); |
} else { |
__ Prologue(info()->GeneratePreagedPrologue(), ip, prologue_offset); |
} |
@@ -251,16 +251,14 @@ bool LCodeGen::GenerateDeferredCode() { |
DCHECK(info()->IsStub()); |
frame_is_built_ = true; |
__ LoadSmiLiteral(scratch0(), Smi::FromInt(StackFrame::STUB)); |
- __ PushFixedFrame(scratch0()); |
- __ la(fp, |
- MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); |
+ __ PushCommonFrame(scratch0()); |
Comment(";;; Deferred code"); |
} |
code->Generate(); |
if (NeedsDeferredFrame()) { |
Comment(";;; Destroy frame"); |
DCHECK(frame_is_built_); |
- __ PopFixedFrame(ip); |
+ __ PopCommonFrame(scratch0()); |
frame_is_built_ = false; |
} |
__ b(code->exit()); |
@@ -308,7 +306,7 @@ bool LCodeGen::GenerateJumpTable() { |
if (table_entry->needs_frame) { |
DCHECK(!info()->saves_caller_doubles()); |
Comment(";;; call deopt with frame"); |
- __ PushFixedFrame(); |
+ __ PushCommonFrame(); |
__ b(r14, &needs_frame); |
} else { |
__ b(r14, &call_deopt_entry); |
@@ -325,7 +323,7 @@ bool LCodeGen::GenerateJumpTable() { |
DCHECK(info()->IsStub()); |
__ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::STUB)); |
__ push(ip); |
- __ lay(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); |
+ DCHECK(info()->IsStub()); |
} |
Comment(";;; call deopt"); |
@@ -2108,29 +2106,30 @@ void LCodeGen::DoBranch(LBranch* instr) { |
__ CmpP(ip, Operand::Zero()); |
EmitBranch(instr, ne); |
} else { |
- ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
+ ToBooleanICStub::Types expected = |
+ instr->hydrogen()->expected_input_types(); |
// Avoid deopts in the case where we've never executed this path before. |
- if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
+ if (expected.IsEmpty()) expected = ToBooleanICStub::Types::Generic(); |
- if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
+ if (expected.Contains(ToBooleanICStub::UNDEFINED)) { |
// undefined -> false. |
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
__ beq(instr->FalseLabel(chunk_)); |
} |
- if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
+ if (expected.Contains(ToBooleanICStub::BOOLEAN)) { |
// Boolean -> its value. |
__ CompareRoot(reg, Heap::kTrueValueRootIndex); |
__ beq(instr->TrueLabel(chunk_)); |
__ CompareRoot(reg, Heap::kFalseValueRootIndex); |
__ beq(instr->FalseLabel(chunk_)); |
} |
- if (expected.Contains(ToBooleanStub::NULL_TYPE)) { |
+ if (expected.Contains(ToBooleanICStub::NULL_TYPE)) { |
// 'null' -> false. |
__ CompareRoot(reg, Heap::kNullValueRootIndex); |
__ beq(instr->FalseLabel(chunk_)); |
} |
- if (expected.Contains(ToBooleanStub::SMI)) { |
+ if (expected.Contains(ToBooleanICStub::SMI)) { |
// Smis: 0 -> false, all other -> true. |
__ CmpP(reg, Operand::Zero()); |
__ beq(instr->FalseLabel(chunk_)); |
@@ -2153,13 +2152,13 @@ void LCodeGen::DoBranch(LBranch* instr) { |
} |
} |
- if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { |
+ if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) { |
// spec object -> true. |
__ CompareInstanceType(map, ip, FIRST_JS_RECEIVER_TYPE); |
__ bge(instr->TrueLabel(chunk_)); |
} |
- if (expected.Contains(ToBooleanStub::STRING)) { |
+ if (expected.Contains(ToBooleanICStub::STRING)) { |
// String value -> false iff empty. |
Label not_string; |
__ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); |
@@ -2171,20 +2170,20 @@ void LCodeGen::DoBranch(LBranch* instr) { |
__ bind(¬_string); |
} |
- if (expected.Contains(ToBooleanStub::SYMBOL)) { |
+ if (expected.Contains(ToBooleanICStub::SYMBOL)) { |
// Symbol value -> true. |
__ CompareInstanceType(map, ip, SYMBOL_TYPE); |
__ beq(instr->TrueLabel(chunk_)); |
} |
- if (expected.Contains(ToBooleanStub::SIMD_VALUE)) { |
+ if (expected.Contains(ToBooleanICStub::SIMD_VALUE)) { |
// SIMD value -> true. |
Label not_simd; |
__ CompareInstanceType(map, ip, SIMD128_VALUE_TYPE); |
__ beq(instr->TrueLabel(chunk_)); |
} |
- if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { |
+ if (expected.Contains(ToBooleanICStub::HEAP_NUMBER)) { |
// heap number -> false iff +0, -0, or NaN. |
Label not_heap_number; |
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
@@ -2417,11 +2416,10 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
DCHECK(ToRegister(instr->left()).is(r3)); |
DCHECK(ToRegister(instr->right()).is(r2)); |
- Handle<Code> code = CodeFactory::StringCompare(isolate()).code(); |
+ Handle<Code> code = CodeFactory::StringCompare(isolate(), instr->op()).code(); |
CallCode(code, RelocInfo::CODE_TARGET, instr); |
- __ CmpP(r2, Operand::Zero()); |
- |
- EmitBranch(instr, ComputeCompareCondition(instr->op())); |
+ __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
+ EmitBranch(instr, eq); |
} |
static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { |
@@ -3162,8 +3160,9 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
// Check if the calling frame is an arguments adaptor frame. |
Label done, adapted; |
__ LoadP(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
- __ LoadP(result, |
- MemOperand(scratch, StandardFrameConstants::kContextOffset)); |
+ __ LoadP( |
+ result, |
+ MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); |
__ CmpSmiLiteral(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
// Result is the frame pointer for the frame if not adapted and for the real |
@@ -3287,14 +3286,26 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
__ BranchOnCount(length, &loop); |
__ bind(&invoke); |
+ |
+ InvokeFlag flag = CALL_FUNCTION; |
+ if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) { |
+ DCHECK(!info()->saves_caller_doubles()); |
+ // TODO(ishell): drop current frame before pushing arguments to the stack. |
+ flag = JUMP_FUNCTION; |
+ ParameterCount actual(r2); |
+ // It is safe to use r5, r6 and r7 as scratch registers here given that |
+ // 1) we are not going to return to caller function anyway, |
+ // 2) r5 (new.target) will be initialized below. |
+ PrepareForTailCall(actual, r5, r6, r7); |
+ } |
+ |
DCHECK(instr->HasPointerMap()); |
LPointerMap* pointers = instr->pointer_map(); |
SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); |
// The number of arguments is stored in receiver which is r2, as expected |
// by InvokeFunction. |
ParameterCount actual(receiver); |
- __ InvokeFunction(function, no_reg, actual, CALL_FUNCTION, |
- safepoint_generator); |
+ __ InvokeFunction(function, no_reg, actual, flag, safepoint_generator); |
} |
void LCodeGen::DoPushArgument(LPushArgument* instr) { |
@@ -3336,7 +3347,7 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
int formal_parameter_count, int arity, |
- LInstruction* instr) { |
+ bool is_tail_call, LInstruction* instr) { |
bool dont_adapt_arguments = |
formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
bool can_invoke_directly = |
@@ -3358,19 +3369,31 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
// Invoke function. |
if (is_self_call) { |
- __ CallSelf(); |
+ Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location())); |
+ if (is_tail_call) { |
+ __ Jump(self, RelocInfo::CODE_TARGET); |
+ } else { |
+ __ Call(self, RelocInfo::CODE_TARGET); |
+ } |
} else { |
__ LoadP(ip, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset)); |
- __ CallJSEntry(ip); |
+ if (is_tail_call) { |
+ __ JumpToJSEntry(ip); |
+ } else { |
+ __ CallJSEntry(ip); |
+ } |
} |
- // Set up deoptimization. |
- RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
+ if (!is_tail_call) { |
+ // Set up deoptimization. |
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
+ } |
} else { |
SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
- ParameterCount count(arity); |
+ ParameterCount actual(arity); |
ParameterCount expected(formal_parameter_count); |
- __ InvokeFunction(function_reg, expected, count, CALL_FUNCTION, generator); |
+ InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION; |
+ __ InvokeFunction(function_reg, expected, actual, flag, generator); |
} |
} |
@@ -3689,21 +3712,77 @@ void LCodeGen::DoMathClz32(LMathClz32* instr) { |
__ bind(&done); |
} |
+void LCodeGen::PrepareForTailCall(const ParameterCount& actual, |
+ Register scratch1, Register scratch2, |
+ Register scratch3) { |
+#if DEBUG |
+ if (actual.is_reg()) { |
+ DCHECK(!AreAliased(actual.reg(), scratch1, scratch2, scratch3)); |
+ } else { |
+ DCHECK(!AreAliased(scratch1, scratch2, scratch3)); |
+ } |
+#endif |
+ if (FLAG_code_comments) { |
+ if (actual.is_reg()) { |
+ Comment(";;; PrepareForTailCall, actual: %s {", actual.reg().ToString()); |
+ } else { |
+ Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate()); |
+ } |
+ } |
+ |
+ // Check if next frame is an arguments adaptor frame. |
+ Register caller_args_count_reg = scratch1; |
+ Label no_arguments_adaptor, formal_parameter_count_loaded; |
+ __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
+ __ LoadP(scratch3, |
+ MemOperand(scratch2, StandardFrameConstants::kContextOffset)); |
+ __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
+ __ bne(&no_arguments_adaptor); |
+ |
+ // Drop current frame and load arguments count from arguments adaptor frame. |
+ __ LoadRR(fp, scratch2); |
+ __ LoadP(caller_args_count_reg, |
+ MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
+ __ SmiUntag(caller_args_count_reg); |
+ __ b(&formal_parameter_count_loaded); |
+ |
+ __ bind(&no_arguments_adaptor); |
+ // Load caller's formal parameter count |
+ __ mov(caller_args_count_reg, Operand(info()->literal()->parameter_count())); |
+ |
+ __ bind(&formal_parameter_count_loaded); |
+ __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3); |
+ |
+ Comment(";;; }"); |
+} |
+ |
void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
+ HInvokeFunction* hinstr = instr->hydrogen(); |
DCHECK(ToRegister(instr->context()).is(cp)); |
DCHECK(ToRegister(instr->function()).is(r3)); |
DCHECK(instr->HasPointerMap()); |
- Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
+ bool is_tail_call = hinstr->tail_call_mode() == TailCallMode::kAllow; |
+ |
+ if (is_tail_call) { |
+ DCHECK(!info()->saves_caller_doubles()); |
+ ParameterCount actual(instr->arity()); |
+ // It is safe to use r5, r6 and r7 as scratch registers here given that |
+ // 1) we are not going to return to caller function anyway, |
+ // 2) r5 (new.target) will be initialized below. |
+ PrepareForTailCall(actual, r5, r6, r7); |
+ } |
+ |
+ Handle<JSFunction> known_function = hinstr->known_function(); |
if (known_function.is_null()) { |
LPointerMap* pointers = instr->pointer_map(); |
SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
- ParameterCount count(instr->arity()); |
- __ InvokeFunction(r3, no_reg, count, CALL_FUNCTION, generator); |
+ ParameterCount actual(instr->arity()); |
+ InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION; |
+ __ InvokeFunction(r3, no_reg, actual, flag, generator); |
} else { |
- CallKnownFunction(known_function, |
- instr->hydrogen()->formal_parameter_count(), |
- instr->arity(), instr); |
+ CallKnownFunction(known_function, hinstr->formal_parameter_count(), |
+ instr->arity(), is_tail_call, instr); |
} |
} |