Index: runtime/vm/intrinsifier_arm64.cc |
diff --git a/runtime/vm/intrinsifier_arm64.cc b/runtime/vm/intrinsifier_arm64.cc |
index f8611a1cee4f987d6bfc03342991d161b71f6ef5..a5738820386a80726cce20c581368a8ac0cb515c 100644 |
--- a/runtime/vm/intrinsifier_arm64.cc |
+++ b/runtime/vm/intrinsifier_arm64.cc |
@@ -1384,11 +1384,12 @@ void Intrinsifier::Double_lessEqualThan(Assembler* assembler) { |
// Expects left argument to be double (receiver). Right argument is unknown. |
// Both arguments are on stack. |
static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { |
- Label fall_through; |
+ Label fall_through, is_smi, double_op; |
- TestLastArgumentIsDouble(assembler, &fall_through, &fall_through); |
+ TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
// Both arguments are double, right operand is in R0. |
__ LoadDFieldFromOffset(V1, R0, Double::value_offset()); |
+ __ Bind(&double_op); |
__ ldr(R0, Address(SP, 1 * kWordSize)); // Left argument. |
__ LoadDFieldFromOffset(V0, R0, Double::value_offset()); |
switch (kind) { |
@@ -1403,6 +1404,12 @@ static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { |
__ TryAllocate(double_class, &fall_through, R0, R1); |
__ StoreDFieldToOffset(V0, R0, Double::value_offset()); |
__ ret(); |
+ |
+ __ Bind(&is_smi); // Convert R0 to a double. |
+ __ SmiUntag(R0); |
+ __ scvtfdx(V1, R0); |
+ __ b(&double_op); // Then do the comparison. |
zra
2016/03/22 17:12:21
comparison -> operation. For other platforms as we
Florian Schneider
2016/03/25 16:09:47
Done.
|
+ |
__ Bind(&fall_through); |
} |