| Index: runtime/vm/flow_graph_compiler_arm64.cc
|
| diff --git a/runtime/vm/flow_graph_compiler_arm64.cc b/runtime/vm/flow_graph_compiler_arm64.cc
|
| index 3a8310a89778ba27fdb203f1192c3f534445cbed..e51c0ab389f0abedfc5f830e1f08a6e69ea408f2 100644
|
| --- a/runtime/vm/flow_graph_compiler_arm64.cc
|
| +++ b/runtime/vm/flow_graph_compiler_arm64.cc
|
| @@ -1224,16 +1224,14 @@ void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
|
|
|
|
|
| void FlowGraphCompiler::EmitMegamorphicInstanceCall(
|
| - const ICData& ic_data,
|
| + const String& name,
|
| + const Array& arguments_descriptor,
|
| intptr_t argument_count,
|
| intptr_t deopt_id,
|
| TokenPosition token_pos,
|
| LocationSummary* locs,
|
| intptr_t try_index,
|
| intptr_t slow_path_argument_count) {
|
| - const String& name = String::Handle(zone(), ic_data.target_name());
|
| - const Array& arguments_descriptor =
|
| - Array::ZoneHandle(zone(), ic_data.arguments_descriptor());
|
| ASSERT(!arguments_descriptor.IsNull() && (arguments_descriptor.Length() > 0));
|
| const MegamorphicCache& cache = MegamorphicCache::ZoneHandle(
|
| zone(),
|
| @@ -1466,120 +1464,43 @@ void FlowGraphCompiler::ClobberDeadTempRegisters(LocationSummary* locs) {
|
| #endif
|
|
|
|
|
| -void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
|
| - intptr_t argument_count,
|
| - const Array& argument_names,
|
| - Label* failed,
|
| - Label* match_found,
|
| - intptr_t deopt_id,
|
| - TokenPosition token_index,
|
| - LocationSummary* locs,
|
| - bool complete,
|
| - intptr_t total_ic_calls) {
|
| - ASSERT(is_optimizing());
|
| +void FlowGraphCompiler::EmitTestAndCallLoadReceiver(
|
| + intptr_t argument_count,
|
| + const Array& arguments_descriptor) {
|
| __ Comment("EmitTestAndCall");
|
| - const Array& arguments_descriptor = Array::ZoneHandle(
|
| - zone(), ArgumentsDescriptor::New(argument_count, argument_names));
|
| -
|
| // Load receiver into R0.
|
| __ LoadFromOffset(R0, SP, (argument_count - 1) * kWordSize);
|
| __ LoadObject(R4, arguments_descriptor);
|
| +}
|
|
|
| - const bool kFirstCheckIsSmi = ic_data.GetReceiverClassIdAt(0) == kSmiCid;
|
| - const intptr_t num_checks = ic_data.NumberOfChecks();
|
| -
|
| - ASSERT(!ic_data.IsNull() && (num_checks > 0));
|
|
|
| - Label after_smi_test;
|
| - if (kFirstCheckIsSmi) {
|
| - __ tsti(R0, Immediate(kSmiTagMask));
|
| - // Jump if receiver is not Smi.
|
| - if (num_checks == 1) {
|
| - __ b(failed, NE);
|
| - } else {
|
| - __ b(&after_smi_test, NE);
|
| - }
|
| - // Do not use the code from the function, but let the code be patched so
|
| - // that we can record the outgoing edges to other code.
|
| - const Function& function =
|
| - Function::ZoneHandle(zone(), ic_data.GetTargetAt(0));
|
| - GenerateStaticDartCall(deopt_id, token_index,
|
| - *StubCode::CallStaticFunction_entry(),
|
| - RawPcDescriptors::kOther, locs, function);
|
| - __ Drop(argument_count);
|
| - if (num_checks > 1) {
|
| - __ b(match_found);
|
| - }
|
| - } else {
|
| - // Receiver is Smi, but Smi is not a valid class therefore fail.
|
| - // (Smi class must be first in the list).
|
| - if (!complete) {
|
| - __ tsti(R0, Immediate(kSmiTagMask));
|
| - __ b(failed, EQ);
|
| - }
|
| - }
|
| - __ Bind(&after_smi_test);
|
| -
|
| - ASSERT(!ic_data.IsNull() && (num_checks > 0));
|
| - GrowableArray<CidRangeTarget> sorted(num_checks);
|
| - SortICDataByCount(ic_data, &sorted, /* drop_smi = */ true);
|
| +void FlowGraphCompiler::EmitTestAndCallSmiBranch(Label* label, bool if_smi) {
|
| + __ tsti(R0, Immediate(kSmiTagMask));
|
| + // Jump if receiver is not Smi.
|
| + __ b(label, if_smi ? EQ : NE);
|
| +}
|
|
|
| - const intptr_t sorted_len = sorted.length();
|
| - // If sorted_len is 0 then only a Smi check was needed; the Smi check above
|
| - // will fail if there was only one check and receiver is not Smi.
|
| - if (sorted_len == 0) return;
|
|
|
| - // Value is not Smi,
|
| +void FlowGraphCompiler::EmitTestAndCallLoadCid() {
|
| __ LoadClassId(R2, R0);
|
| +}
|
|
|
| - bool add_megamorphic_call = false;
|
| - const int kMaxImmediateInInstruction = 256;
|
| - int bias =
|
| - ComputeGoodBiasForCidComparison(sorted, kMaxImmediateInInstruction);
|
| - if (bias != 0) __ AddImmediate(R2, R2, -bias);
|
| -
|
| - for (intptr_t i = 0; i < sorted_len; i++) {
|
| - const bool is_last_check = (i == (sorted_len - 1));
|
| - int cid_start = sorted[i].cid_start;
|
| - int cid_end = sorted[i].cid_end;
|
| - int count = sorted[i].count;
|
| - if (!is_last_check && !complete && count < (total_ic_calls >> 5)) {
|
| - // This case is hit too rarely to be worth writing class-id checks inline
|
| - // for.
|
| - add_megamorphic_call = true;
|
| - break;
|
| - }
|
| - ASSERT(cid_start > kSmiCid || cid_end < kSmiCid);
|
| - Label next_test;
|
| - if (!complete || !is_last_check) {
|
| - Label* next_label = is_last_check ? failed : &next_test;
|
| - if (cid_start == cid_end) {
|
| - __ CompareImmediate(R2, cid_start - bias);
|
| - __ b(next_label, NE);
|
| - } else {
|
| - __ AddImmediate(R2, R2, bias - cid_start);
|
| - bias = cid_start;
|
| - __ CompareImmediate(R2, cid_end - cid_start);
|
| - __ b(next_label, HI); // Unsigned higher.
|
| - }
|
| - }
|
| - // Do not use the code from the function, but let the code be patched so
|
| - // that we can record the outgoing edges to other code.
|
| - const Function& function = *sorted[i].target;
|
| - GenerateStaticDartCall(deopt_id, token_index,
|
| - *StubCode::CallStaticFunction_entry(),
|
| - RawPcDescriptors::kOther, locs, function);
|
| - __ Drop(argument_count);
|
| - if (!is_last_check) {
|
| - __ b(match_found);
|
| - }
|
| - __ Bind(&next_test);
|
| - }
|
| - if (add_megamorphic_call) {
|
| - int try_index = CatchClauseNode::kInvalidTryIndex;
|
| - EmitMegamorphicInstanceCall(ic_data, argument_count, deopt_id, token_index,
|
| - locs, try_index, argument_count);
|
| +
|
| +int FlowGraphCompiler::EmitTestAndCallCheckCid(Label* next_label,
|
| + const CidRangeTarget& target,
|
| + int bias) {
|
| + intptr_t cid_start = target.cid_start;
|
| + intptr_t cid_end = target.cid_end;
|
| + if (cid_start == cid_end) {
|
| + __ CompareImmediate(R2, cid_start - bias);
|
| + __ b(next_label, NE);
|
| + } else {
|
| + __ AddImmediate(R2, R2, bias - cid_start);
|
| + bias = cid_start;
|
| + __ CompareImmediate(R2, cid_end - cid_start);
|
| + __ b(next_label, HI); // Unsigned higher.
|
| }
|
| + return bias;
|
| }
|
|
|
|
|
|
|