Chromium Code Reviews| Index: src/code-stubs.cc |
| diff --git a/src/code-stubs.cc b/src/code-stubs.cc |
| index c60cc344166060cbc745f70b4cfbd07b4c403306..2d3cf2669e4e50f4e26aefd745af17451b5d7141 100644 |
| --- a/src/code-stubs.cc |
| +++ b/src/code-stubs.cc |
| @@ -882,6 +882,47 @@ compiler::Node* AddStub::Generate(CodeStubAssembler* assembler, |
| } |
| // static |
| +compiler::Node* AddSmiStub::Generate(CodeStubAssembler* assembler, |
| + compiler::Node* left, |
| + compiler::Node* right, |
| + compiler::Node* context) { |
| + typedef CodeStubAssembler::Label Label; |
| + typedef compiler::Node Node; |
| + typedef CodeStubAssembler::Variable Variable; |
| + |
| + Variable var_result(assembler, MachineRepresentation::kTagged); |
| + Label fastpath(assembler), slowpath(assembler, Label::kDeferred), |
| + end(assembler); |
| + |
| + // {right} is known to be a Smi. |
|
rmcilroy
2016/07/04 12:35:32
Maybe add some code under FLAG_debug_code which ab
oth
2016/07/04 14:56:24
Done. CodeStubAssembler::Assert is conditional on
|
| + // Check if the {left} is a Smi take the fast path. |
| + assembler->BranchIf(assembler->WordIsSmi(left), &fastpath, &slowpath); |
| + assembler->Bind(&fastpath); |
| + { |
| + // Try fast Smi addition first. |
| + Node* pair = assembler->SmiAddWithOverflow(left, right); |
| + Node* overflow = assembler->Projection(1, pair); |
| + |
| + // Check if the Smi additon overflowed. |
| + Label if_notoverflow(assembler); |
| + assembler->BranchIf(overflow, &slowpath, &if_notoverflow); |
| + assembler->Bind(&if_notoverflow); |
| + { |
| + var_result.Bind(assembler->Projection(0, pair)); |
| + assembler->Goto(&end); |
| + } |
| + } |
| + assembler->Bind(&slowpath); |
|
rmcilroy
2016/07/04 12:35:32
I wonder if it is worth doing the check for heap-n
oth
2016/07/04 14:56:24
Yes, I tried this. For x64 the stub size increases
|
| + { |
| + Callable callable = CodeFactory::Add(assembler->isolate()); |
| + var_result.Bind(assembler->CallStub(callable, context, left, right)); |
| + assembler->Goto(&end); |
| + } |
| + assembler->Bind(&end); |
| + return var_result.value(); |
| +} |
| + |
| +// static |
| compiler::Node* SubtractStub::Generate(CodeStubAssembler* assembler, |
| compiler::Node* left, |
| compiler::Node* right, |
| @@ -1057,6 +1098,47 @@ compiler::Node* SubtractStub::Generate(CodeStubAssembler* assembler, |
| } |
| // static |
| +compiler::Node* SubtractSmiStub::Generate(CodeStubAssembler* assembler, |
| + compiler::Node* left, |
| + compiler::Node* right, |
| + compiler::Node* context) { |
| + typedef CodeStubAssembler::Label Label; |
| + typedef compiler::Node Node; |
| + typedef CodeStubAssembler::Variable Variable; |
| + |
| + Variable var_result(assembler, MachineRepresentation::kTagged); |
| + Label fastpath(assembler), slowpath(assembler, Label::kDeferred), |
| + end(assembler); |
| + |
| + // {right} is known to be a Smi. |
| + // Check if the {left} is a Smi take the fast path. |
| + assembler->BranchIf(assembler->WordIsSmi(left), &fastpath, &slowpath); |
| + assembler->Bind(&fastpath); |
| + { |
| + // Try fast Smi subtraction first. |
| + Node* pair = assembler->SmiSubWithOverflow(left, right); |
| + Node* overflow = assembler->Projection(1, pair); |
| + |
| + // Check if the Smi subtraction overflowed. |
| + Label if_notoverflow(assembler); |
| + assembler->BranchIf(overflow, &slowpath, &if_notoverflow); |
| + assembler->Bind(&if_notoverflow); |
| + { |
| + var_result.Bind(assembler->Projection(0, pair)); |
| + assembler->Goto(&end); |
| + } |
| + } |
| + assembler->Bind(&slowpath); |
| + { |
| + Callable callable = CodeFactory::Subtract(assembler->isolate()); |
| + var_result.Bind(assembler->CallStub(callable, context, left, right)); |
| + assembler->Goto(&end); |
| + } |
| + assembler->Bind(&end); |
| + return var_result.value(); |
| +} |
| + |
| +// static |
| compiler::Node* MultiplyStub::Generate(CodeStubAssembler* assembler, |
| compiler::Node* left, |
| compiler::Node* right, |
| @@ -1593,6 +1675,22 @@ compiler::Node* ShiftLeftStub::Generate(CodeStubAssembler* assembler, |
| } |
| // static |
| +compiler::Node* ShiftLeftSmiStub::Generate(CodeStubAssembler* assembler, |
| + compiler::Node* left, |
| + compiler::Node* right, |
| + compiler::Node* context) { |
| + using compiler::Node; |
| + |
| + Node* lhs_value = assembler->TruncateTaggedToWord32(context, left); |
| + Node* rhs_value = assembler->SmiToWord32(right); |
| + Node* shift_count = |
| + assembler->Word32And(rhs_value, assembler->Int32Constant(0x1f)); |
| + Node* value = assembler->Word32Shl(lhs_value, shift_count); |
| + Node* result = assembler->ChangeInt32ToTagged(value); |
| + return result; |
| +} |
| + |
| +// static |
| compiler::Node* ShiftRightStub::Generate(CodeStubAssembler* assembler, |
| compiler::Node* left, |
| compiler::Node* right, |
| @@ -1609,6 +1707,22 @@ compiler::Node* ShiftRightStub::Generate(CodeStubAssembler* assembler, |
| } |
| // static |
| +compiler::Node* ShiftRightSmiStub::Generate(CodeStubAssembler* assembler, |
| + compiler::Node* left, |
| + compiler::Node* right, |
| + compiler::Node* context) { |
| + using compiler::Node; |
| + |
| + Node* lhs_value = assembler->TruncateTaggedToWord32(context, left); |
| + Node* rhs_value = assembler->SmiToWord32(right); |
| + Node* shift_count = |
| + assembler->Word32And(rhs_value, assembler->Int32Constant(0x1f)); |
| + Node* value = assembler->Word32Sar(lhs_value, shift_count); |
| + Node* result = assembler->ChangeInt32ToTagged(value); |
| + return result; |
| +} |
| + |
| +// static |
| compiler::Node* ShiftRightLogicalStub::Generate(CodeStubAssembler* assembler, |
| compiler::Node* left, |
| compiler::Node* right, |
| @@ -1639,6 +1753,20 @@ compiler::Node* BitwiseAndStub::Generate(CodeStubAssembler* assembler, |
| } |
| // static |
| +compiler::Node* BitwiseAndSmiStub::Generate(CodeStubAssembler* assembler, |
| + compiler::Node* left, |
| + compiler::Node* right, |
| + compiler::Node* context) { |
| + using compiler::Node; |
| + |
| + Node* lhs_value = assembler->TruncateTaggedToWord32(context, left); |
| + Node* rhs_value = assembler->SmiToWord32(right); |
| + Node* value = assembler->Word32And(lhs_value, rhs_value); |
| + Node* result = assembler->ChangeInt32ToTagged(value); |
| + return result; |
| +} |
| + |
| +// static |
| compiler::Node* BitwiseOrStub::Generate(CodeStubAssembler* assembler, |
| compiler::Node* left, |
| compiler::Node* right, |
| @@ -1653,6 +1781,20 @@ compiler::Node* BitwiseOrStub::Generate(CodeStubAssembler* assembler, |
| } |
| // static |
| +compiler::Node* BitwiseOrSmiStub::Generate(CodeStubAssembler* assembler, |
| + compiler::Node* left, |
| + compiler::Node* right, |
| + compiler::Node* context) { |
| + using compiler::Node; |
| + |
| + Node* lhs_value = assembler->TruncateTaggedToWord32(context, left); |
| + Node* rhs_value = assembler->SmiToWord32(right); |
| + Node* value = assembler->Word32Or(lhs_value, rhs_value); |
| + Node* result = assembler->ChangeInt32ToTagged(value); |
| + return result; |
| +} |
| + |
| +// static |
| compiler::Node* BitwiseXorStub::Generate(CodeStubAssembler* assembler, |
| compiler::Node* left, |
| compiler::Node* right, |
| @@ -1698,7 +1840,7 @@ compiler::Node* IncStub::Generate(CodeStubAssembler* assembler, |
| Node* pair = assembler->SmiAddWithOverflow(value, one); |
| Node* overflow = assembler->Projection(1, pair); |
| - // Check if the Smi additon overflowed. |
| + // Check if the Smi addition overflowed. |
| Label if_overflow(assembler), if_notoverflow(assembler); |
| assembler->Branch(overflow, &if_overflow, &if_notoverflow); |