Index: runtime/vm/stub_code_arm64.cc |
diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc |
index 4228a7f97cddc5b9d3613f3f523565cc27e08edb..bbedec96f14b20bac3501d5c059668f60ca08b31 100644 |
--- a/runtime/vm/stub_code_arm64.cc |
+++ b/runtime/vm/stub_code_arm64.cc |
@@ -1277,7 +1277,8 @@ void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, |
static void EmitFastSmiOp(Assembler* assembler, |
Token::Kind kind, |
intptr_t num_args, |
- Label* not_smi_or_overflow) { |
+ Label* not_smi_or_overflow, |
+ bool should_update_result_range) { |
if (FLAG_throw_on_javascript_int_overflow) { |
// The overflow check is more complex than implemented below. |
return; |
@@ -1308,6 +1309,12 @@ static void EmitFastSmiOp(Assembler* assembler, |
default: UNIMPLEMENTED(); |
} |
+ if (should_update_result_range) { |
+ Label done; |
+ __ UpdateRangeFeedback(R0, 2, R5, R1, R6, &done); |
+ __ Bind(&done); |
+ } |
+ |
// R5: IC data object (preserved). |
__ LoadFieldFromOffset(R6, R5, ICData::ic_data_offset(), kNoPP); |
// R6: ic_data_array with check entries: classes and target functions. |
@@ -1353,7 +1360,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub( |
Assembler* assembler, |
intptr_t num_args, |
const RuntimeEntry& handle_ic_miss, |
- Token::Kind kind) { |
+ Token::Kind kind, |
+ RangeCollectionMode range_collection_mode) { |
ASSERT(num_args > 0); |
#if defined(DEBUG) |
{ Label ok; |
@@ -1379,11 +1387,25 @@ void StubCode::GenerateNArgsCheckInlineCacheStub( |
__ b(&stepping, NE); |
__ Bind(&done_stepping); |
+ Label not_smi_or_overflow; |
+ if (range_collection_mode == kCollectRanges) { |
+ ASSERT((num_args == 1) || (num_args == 2)); |
+ if (num_args == 2) { |
+ __ ldr(R0, Address(SP, 1 * kWordSize)); |
+ __ UpdateRangeFeedback(R0, 0, R5, R1, R4, ¬_smi_or_overflow); |
+ } |
+ |
+ __ ldr(R0, Address(SP, 0 * kWordSize)); |
+ __ UpdateRangeFeedback(R0, num_args - 1, R5, R1, R4, ¬_smi_or_overflow); |
+ } |
if (kind != Token::kILLEGAL) { |
- Label not_smi_or_overflow; |
- EmitFastSmiOp(assembler, kind, num_args, ¬_smi_or_overflow); |
- __ Bind(¬_smi_or_overflow); |
+ EmitFastSmiOp(assembler, |
+ kind, |
+ num_args, |
+ ¬_smi_or_overflow, |
+ (range_collection_mode == kCollectRanges)); |
} |
+ __ Bind(¬_smi_or_overflow); |
// Load arguments descriptor into R4. |
__ LoadFieldFromOffset(R4, R5, ICData::arguments_descriptor_offset(), kNoPP); |
@@ -1497,7 +1519,28 @@ void StubCode::GenerateNArgsCheckInlineCacheStub( |
__ LoadFieldFromOffset(R2, R0, Function::instructions_offset(), kNoPP); |
__ AddImmediate( |
R2, R2, Instructions::HeaderSize() - kHeapObjectTag, kNoPP); |
- __ br(R2); |
+ if (range_collection_mode == kCollectRanges) { |
+ __ ldr(R1, Address(SP, 0 * kWordSize)); |
+ if (num_args == 2) { |
+ __ ldr(R3, Address(SP, 1 * kWordSize)); |
+ } |
+ __ EnterStubFrame(); |
+ __ Push(R5); |
+ if (num_args == 2) { |
+ __ Push(R3); |
+ } |
+ __ Push(R1); |
+ __ blr(R2); |
+ |
+ Label done; |
+ __ ldr(R5, Address(FP, kFirstLocalSlotFromFp * kWordSize)); |
+ __ UpdateRangeFeedback(R0, 2, R5, R1, R4, &done); |
+ __ Bind(&done); |
+ __ LeaveStubFrame(); |
+ __ ret(); |
+ } else { |
+ __ br(R2); |
+ } |
__ Bind(&stepping); |
__ EnterStubFrame(); |
@@ -1522,42 +1565,68 @@ void StubCode::GenerateNArgsCheckInlineCacheStub( |
void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 1, |
- kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
+ kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 2, |
- kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); |
+ kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
void StubCode::GenerateThreeArgsCheckInlineCacheStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 3, |
- kInlineCacheMissHandlerThreeArgsRuntimeEntry, Token::kILLEGAL); |
+ kInlineCacheMissHandlerThreeArgsRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 2, |
- kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD); |
+ kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD, |
+ kCollectRanges); |
} |
void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 2, |
- kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB); |
+ kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB, |
+ kCollectRanges); |
} |
void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 2, |
- kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ); |
+ kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ, |
+ kIgnoreRanges); |
+} |
+ |
+ |
+void StubCode::GenerateUnaryRangeCollectingInlineCacheStub( |
+ Assembler* assembler) { |
+ GenerateUsageCounterIncrement(assembler, R6); |
+ GenerateNArgsCheckInlineCacheStub(assembler, 1, |
+ kInlineCacheMissHandlerOneArgRuntimeEntry, |
+ Token::kILLEGAL, |
+ kCollectRanges); |
+} |
+ |
+ |
+void StubCode::GenerateBinaryRangeCollectingInlineCacheStub( |
+ Assembler* assembler) { |
+ GenerateUsageCounterIncrement(assembler, R6); |
+ GenerateNArgsCheckInlineCacheStub(assembler, 2, |
+ kInlineCacheMissHandlerTwoArgsRuntimeEntry, |
+ Token::kILLEGAL, |
+ kCollectRanges); |
} |
@@ -1565,7 +1634,8 @@ void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub( |
Assembler* assembler) { |
GenerateOptimizedUsageCounterIncrement(assembler); |
GenerateNArgsCheckInlineCacheStub(assembler, 1, |
- kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
+ kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
@@ -1573,7 +1643,8 @@ void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub( |
Assembler* assembler) { |
GenerateOptimizedUsageCounterIncrement(assembler); |
GenerateNArgsCheckInlineCacheStub(assembler, 2, |
- kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); |
+ kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
@@ -1581,7 +1652,8 @@ void StubCode::GenerateThreeArgsOptimizedCheckInlineCacheStub( |
Assembler* assembler) { |
GenerateOptimizedUsageCounterIncrement(assembler); |
GenerateNArgsCheckInlineCacheStub(assembler, 3, |
- kInlineCacheMissHandlerThreeArgsRuntimeEntry, Token::kILLEGAL); |
+ kInlineCacheMissHandlerThreeArgsRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
@@ -1652,14 +1724,16 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub( |
- assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL); |
+ assembler, 1, kStaticCallMissHandlerOneArgRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |
void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) { |
GenerateUsageCounterIncrement(assembler, R6); |
GenerateNArgsCheckInlineCacheStub(assembler, 2, |
- kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL); |
+ kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL, |
+ kIgnoreRanges); |
} |