Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(409)

Unified Diff: runtime/vm/flow_graph_compiler_arm64.cc

Issue 1270803003: VM: More abstract interface for generating stub calls. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: runtime/vm/flow_graph_compiler_arm64.cc
diff --git a/runtime/vm/flow_graph_compiler_arm64.cc b/runtime/vm/flow_graph_compiler_arm64.cc
index 39c1206b421843840af1c94876f01e527372047d..a473953e163fe122d55d04864c1072b5cb5ccf8b 100644
--- a/runtime/vm/flow_graph_compiler_arm64.cc
+++ b/runtime/vm/flow_graph_compiler_arm64.cc
@@ -189,7 +189,7 @@ void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
ASSERT(deopt_env() != NULL);
- __ BranchLink(&StubCode::DeoptimizeLabel());
+ __ BranchLink(*StubCode::Deoptimize_entry());
set_pc_offset(assem->CodeSize());
#undef __
}
@@ -229,14 +229,14 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
if (test_kind == kTestTypeOneArg) {
ASSERT(type_arguments_reg == kNoRegister);
__ LoadObject(R1, Object::null_object());
- __ BranchLink(&StubCode::Subtype1TestCacheLabel());
+ __ BranchLink(*StubCode::Subtype1TestCache_entry());
} else if (test_kind == kTestTypeTwoArgs) {
ASSERT(type_arguments_reg == kNoRegister);
__ LoadObject(R1, Object::null_object());
- __ BranchLink(&StubCode::Subtype2TestCacheLabel());
+ __ BranchLink(*StubCode::Subtype2TestCache_entry());
} else if (test_kind == kTestTypeThreeArgs) {
ASSERT(type_arguments_reg == R1);
- __ BranchLink(&StubCode::Subtype3TestCacheLabel());
+ __ BranchLink(*StubCode::Subtype3TestCache_entry());
} else {
UNREACHABLE();
}
@@ -922,7 +922,7 @@ void FlowGraphCompiler::CopyParameters() {
__ LeaveDartFrame(); // The arguments are still on the stack.
// Do not use caller's pool ptr in branch.
ASSERT(!assembler()->constant_pool_allowed());
- __ BranchPatchable(&StubCode::CallClosureNoSuchMethodLabel());
+ __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry());
__ set_constant_pool_allowed(true);
// The noSuchMethod call may return to the caller, but not here.
} else if (check_correct_named_args) {
@@ -1008,7 +1008,7 @@ void FlowGraphCompiler::EmitFrameEntry() {
ASSERT(function_reg == R6);
Label dont_optimize;
__ b(&dont_optimize, LT);
- __ Branch(&StubCode::OptimizeFunctionLabel());
+ __ Branch(*StubCode::OptimizeFunction_entry());
__ Bind(&dont_optimize);
} else if (!flow_graph().IsCompiledForOsr()) {
entry_patch_pc_offset_ = assembler()->CodeSize();
@@ -1077,7 +1077,7 @@ void FlowGraphCompiler::CompileGraph() {
__ LeaveDartFrame(); // The arguments are still on the stack.
// Do not use caller's pool ptr in branch.
ASSERT(!assembler()->constant_pool_allowed());
- __ BranchPatchable(&StubCode::CallClosureNoSuchMethodLabel());
+ __ BranchPatchable(*StubCode::CallClosureNoSuchMethod_entry());
__ set_constant_pool_allowed(true);
// The noSuchMethod call may return to the caller, but not here.
} else {
@@ -1135,20 +1135,20 @@ void FlowGraphCompiler::CompileGraph() {
// Emit function patching code. This will be swapped with the first 3
// instructions at entry point.
patch_code_pc_offset_ = assembler()->CodeSize();
- __ BranchPatchable(&StubCode::FixCallersTargetLabel());
+ __ BranchPatchable(*StubCode::FixCallersTarget_entry());
if (is_optimizing()) {
lazy_deopt_pc_offset_ = assembler()->CodeSize();
- __ BranchPatchable(&StubCode::DeoptimizeLazyLabel());
+ __ BranchPatchable(*StubCode::DeoptimizeLazy_entry());
}
}
void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
- const ExternalLabel* label,
+ const StubEntry& stub_entry,
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
- __ BranchLinkPatchable(label);
+ __ BranchLinkPatchable(stub_entry);
AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
RecordSafepoint(locs);
}
@@ -1156,10 +1156,10 @@ void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
intptr_t token_pos,
- const ExternalLabel* label,
+ const StubEntry& stub_entry,
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
- __ BranchLinkPatchable(label);
+ __ BranchLinkPatchable(stub_entry);
AddCurrentDescriptor(kind, deopt_id, token_pos);
RecordSafepoint(locs);
// Marks either the continuation point in unoptimized code or the
@@ -1216,7 +1216,7 @@ void FlowGraphCompiler::EmitEdgeCounter() {
void FlowGraphCompiler::EmitOptimizedInstanceCall(
- ExternalLabel* target_label,
+ const StubEntry& stub_entry,
const ICData& ic_data,
intptr_t argument_count,
intptr_t deopt_id,
@@ -1234,14 +1234,14 @@ void FlowGraphCompiler::EmitOptimizedInstanceCall(
__ LoadUniqueObject(R5, ic_data);
GenerateDartCall(deopt_id,
token_pos,
- target_label,
+ stub_entry,
RawPcDescriptors::kIcCall,
locs);
__ Drop(argument_count);
}
-void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label,
+void FlowGraphCompiler::EmitInstanceCall(const StubEntry& stub_entry,
const ICData& ic_data,
intptr_t argument_count,
intptr_t deopt_id,
@@ -1251,7 +1251,7 @@ void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label,
__ LoadUniqueObject(R5, ic_data);
GenerateDartCall(deopt_id,
token_pos,
- target_label,
+ stub_entry,
RawPcDescriptors::kIcCall,
locs);
__ Drop(argument_count);
@@ -1278,7 +1278,7 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
__ LoadObject(cacheR, cache);
if (FLAG_use_megamorphic_stub) {
- __ BranchLink(&StubCode::MegamorphicLookupLabel());
+ __ BranchLink(*StubCode::MegamorphicLookup_entry());
} else {
StubCode::EmitMegamorphicLookup(assembler(), receiverR, cacheR, targetR);
}
@@ -1306,13 +1306,12 @@ void FlowGraphCompiler::EmitUnoptimizedStaticCall(
intptr_t token_pos,
LocationSummary* locs,
const ICData& ic_data) {
- const uword label_address =
- StubCode::UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested());
- ExternalLabel target_label(label_address);
+ const StubEntry* stub_entry =
+ StubCode::UnoptimizedStaticCallEntry(ic_data.NumArgsTested());
__ LoadObject(R5, ic_data);
GenerateDartCall(deopt_id,
token_pos,
- &target_label,
+ *stub_entry,
RawPcDescriptors::kUnoptStaticCall,
locs);
__ Drop(argument_count);
@@ -1331,7 +1330,7 @@ void FlowGraphCompiler::EmitOptimizedStaticCall(
// we can record the outgoing edges to other code.
GenerateDartCall(deopt_id,
token_pos,
- &StubCode::CallStaticFunctionLabel(),
+ *StubCode::CallStaticFunction_entry(),
RawPcDescriptors::kOther,
locs);
AddStaticCallTarget(function);
@@ -1350,10 +1349,10 @@ Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
__ PushObject(obj);
if (is_optimizing()) {
__ BranchLinkPatchable(
- &StubCode::OptimizedIdenticalWithNumberCheckLabel());
+ *StubCode::OptimizedIdenticalWithNumberCheck_entry());
} else {
__ BranchLinkPatchable(
- &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
+ *StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
}
if (token_pos != Scanner::kNoSourcePos) {
AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
@@ -1379,10 +1378,10 @@ Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
__ Push(right);
if (is_optimizing()) {
__ BranchLinkPatchable(
- &StubCode::OptimizedIdenticalWithNumberCheckLabel());
+ *StubCode::OptimizedIdenticalWithNumberCheck_entry());
} else {
__ BranchLinkPatchable(
- &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
+ *StubCode::UnoptimizedIdenticalWithNumberCheck_entry());
}
if (token_pos != Scanner::kNoSourcePos) {
AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
@@ -1508,7 +1507,7 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
// that we can record the outgoing edges to other code.
GenerateDartCall(deopt_id,
token_index,
- &StubCode::CallStaticFunctionLabel(),
+ *StubCode::CallStaticFunction_entry(),
RawPcDescriptors::kOther,
locs);
const Function& function = Function::Handle(zone(), ic_data.GetTargetAt(0));
@@ -1549,7 +1548,7 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
// that we can record the outgoing edges to other code.
GenerateDartCall(deopt_id,
token_index,
- &StubCode::CallStaticFunctionLabel(),
+ *StubCode::CallStaticFunction_entry(),
RawPcDescriptors::kOther,
locs);
const Function& function = *sorted[i].target;
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698