Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(471)

Unified Diff: runtime/vm/flow_graph_compiler_arm.cc

Issue 1247783002: Make array allocation stub shared between isolates. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_compiler_arm64.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: runtime/vm/flow_graph_compiler_arm.cc
diff --git a/runtime/vm/flow_graph_compiler_arm.cc b/runtime/vm/flow_graph_compiler_arm.cc
index d169b8de8902ff98dd33089705c943126773bd18..478daf0cd46679d2b2d470fc9b958fa7bc7959aa 100644
--- a/runtime/vm/flow_graph_compiler_arm.cc
+++ b/runtime/vm/flow_graph_compiler_arm.cc
@@ -194,12 +194,11 @@ void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
ASSERT(deopt_env() != NULL);
- StubCode* stub_code = compiler->isolate()->stub_code();
// LR may be live. It will be clobbered by BranchLink, so cache it in IP.
// It will be restored at the top of the deoptimization stub, specifically in
// GenerateDeoptimizationSequence in stub_code_arm.cc.
__ mov(IP, Operand(LR));
- __ BranchLink(&stub_code->DeoptimizeLabel());
+ __ BranchLink(&StubCode::DeoptimizeLabel());
koda 2015/07/23 00:58:01 True already before your change, but even more obv
Florian Schneider 2015/07/23 10:43:22 I would also prefer a const-reference. I'll see if
set_pc_offset(assem->CodeSize());
#undef __
}
@@ -236,19 +235,18 @@ RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
ASSERT(temp_reg == kNoRegister); // Unused on ARM.
const SubtypeTestCache& type_test_cache =
SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
- StubCode* stub_code = isolate()->stub_code();
__ LoadUniqueObject(R2, type_test_cache);
if (test_kind == kTestTypeOneArg) {
ASSERT(type_arguments_reg == kNoRegister);
__ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null()));
- __ BranchLink(&stub_code->Subtype1TestCacheLabel());
+ __ BranchLink(&StubCode::Subtype1TestCacheLabel());
} else if (test_kind == kTestTypeTwoArgs) {
ASSERT(type_arguments_reg == kNoRegister);
__ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null()));
- __ BranchLink(&stub_code->Subtype2TestCacheLabel());
+ __ BranchLink(&StubCode::Subtype2TestCacheLabel());
} else if (test_kind == kTestTypeThreeArgs) {
ASSERT(type_arguments_reg == R1);
- __ BranchLink(&stub_code->Subtype3TestCacheLabel());
+ __ BranchLink(&StubCode::Subtype3TestCacheLabel());
} else {
UNREACHABLE();
}
@@ -923,7 +921,7 @@ void FlowGraphCompiler::CopyParameters() {
__ Bind(&wrong_num_arguments);
if (function.IsClosureFunction()) {
__ LeaveDartFrame(); // The arguments are still on the stack.
- __ Branch(&isolate()->stub_code()->CallClosureNoSuchMethodLabel());
+ __ Branch(&StubCode::CallClosureNoSuchMethodLabel());
// The noSuchMethod call may return to the caller, but not here.
} else if (check_correct_named_args) {
__ Stop("Wrong arguments");
@@ -983,7 +981,6 @@ void FlowGraphCompiler::EmitFrameEntry() {
function.IsOptimizable() &&
(!is_optimizing() || may_reoptimize())) {
const Register function_reg = R6;
- StubCode* stub_code = isolate()->stub_code();
// The pool pointer is not setup before entering the Dart frame.
// Preserve PP of caller.
@@ -1009,7 +1006,7 @@ void FlowGraphCompiler::EmitFrameEntry() {
}
__ CompareImmediate(R7, GetOptimizationThreshold());
ASSERT(function_reg == R6);
- __ Branch(&stub_code->OptimizeFunctionLabel(), GE);
+ __ Branch(&StubCode::OptimizeFunctionLabel(), GE);
} else if (!flow_graph().IsCompiledForOsr()) {
entry_patch_pc_offset_ = assembler()->CodeSize();
}
@@ -1046,7 +1043,6 @@ void FlowGraphCompiler::CompileGraph() {
const int num_fixed_params = function.num_fixed_parameters();
const int num_copied_params = parsed_function().num_copied_params();
const int num_locals = parsed_function().num_stack_locals();
- StubCode* stub_code = isolate()->stub_code();
// We check the number of passed arguments when we have to copy them due to
// the presence of optional parameters.
@@ -1074,7 +1070,7 @@ void FlowGraphCompiler::CompileGraph() {
__ Bind(&wrong_num_arguments);
if (function.IsClosureFunction()) {
__ LeaveDartFrame(); // The arguments are still on the stack.
- __ Branch(&isolate()->stub_code()->CallClosureNoSuchMethodLabel());
+ __ Branch(&StubCode::CallClosureNoSuchMethodLabel());
// The noSuchMethod call may return to the caller, but not here.
} else {
__ Stop("Wrong number of arguments");
@@ -1129,11 +1125,11 @@ void FlowGraphCompiler::CompileGraph() {
// Emit function patching code. This will be swapped with the first 3
// instructions at entry point.
patch_code_pc_offset_ = assembler()->CodeSize();
- __ BranchPatchable(&stub_code->FixCallersTargetLabel());
+ __ BranchPatchable(&StubCode::FixCallersTargetLabel());
if (is_optimizing()) {
lazy_deopt_pc_offset_ = assembler()->CodeSize();
- __ Branch(&stub_code->DeoptimizeLazyLabel());
+ __ Branch(&StubCode::DeoptimizeLazyLabel());
}
}
@@ -1295,8 +1291,7 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
__ LoadObject(cacheR, cache);
if (FLAG_use_megamorphic_stub) {
- StubCode* stub_code = isolate()->stub_code();
- __ BranchLink(&stub_code->MegamorphicLookupLabel());
+ __ BranchLink(&StubCode::MegamorphicLookupLabel());
} else {
StubCode::EmitMegamorphicLookup(assembler(), receiverR, cacheR, targetR);
}
@@ -1325,9 +1320,8 @@ void FlowGraphCompiler::EmitUnoptimizedStaticCall(
intptr_t token_pos,
LocationSummary* locs,
const ICData& ic_data) {
- StubCode* stub_code = isolate()->stub_code();
const uword label_address =
- stub_code->UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested());
+ StubCode::UnoptimizedStaticCallEntryPoint(ic_data.NumArgsTested());
ExternalLabel target_label(label_address);
__ LoadObject(R5, ic_data);
GenerateDartCall(deopt_id,
@@ -1346,13 +1340,12 @@ void FlowGraphCompiler::EmitOptimizedStaticCall(
intptr_t deopt_id,
intptr_t token_pos,
LocationSummary* locs) {
- StubCode* stub_code = isolate()->stub_code();
__ LoadObject(R4, arguments_descriptor);
// Do not use the code from the function, but let the code be patched so that
// we can record the outgoing edges to other code.
GenerateDartCall(deopt_id,
token_pos,
- &stub_code->CallStaticFunctionLabel(),
+ &StubCode::CallStaticFunctionLabel(),
RawPcDescriptors::kOther,
locs);
AddStaticCallTarget(function);
@@ -1366,16 +1359,15 @@ Condition FlowGraphCompiler::EmitEqualityRegConstCompare(
bool needs_number_check,
intptr_t token_pos) {
if (needs_number_check) {
- StubCode* stub_code = isolate()->stub_code();
ASSERT(!obj.IsMint() && !obj.IsDouble() && !obj.IsBigint());
__ Push(reg);
__ PushObject(obj);
if (is_optimizing()) {
__ BranchLinkPatchable(
- &stub_code->OptimizedIdenticalWithNumberCheckLabel());
+ &StubCode::OptimizedIdenticalWithNumberCheckLabel());
} else {
__ BranchLinkPatchable(
- &stub_code->UnoptimizedIdenticalWithNumberCheckLabel());
+ &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
}
if (token_pos != Scanner::kNoSourcePos) {
AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
@@ -1397,15 +1389,14 @@ Condition FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
bool needs_number_check,
intptr_t token_pos) {
if (needs_number_check) {
- StubCode* stub_code = isolate()->stub_code();
__ Push(left);
__ Push(right);
if (is_optimizing()) {
__ BranchLinkPatchable(
- &stub_code->OptimizedIdenticalWithNumberCheckLabel());
+ &StubCode::OptimizedIdenticalWithNumberCheckLabel());
} else {
__ BranchLinkPatchable(
- &stub_code->UnoptimizedIdenticalWithNumberCheckLabel());
+ &StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
}
if (token_pos != Scanner::kNoSourcePos) {
AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
@@ -1524,7 +1515,6 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
const Array& arguments_descriptor =
Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
argument_names));
- StubCode* stub_code = isolate()->stub_code();
// Load receiver into R0.
__ LoadFromOffset(kWord, R0, SP, (argument_count - 1) * kWordSize);
@@ -1548,7 +1538,7 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
// that we can record the outgoing edges to other code.
GenerateDartCall(deopt_id,
token_index,
- &stub_code->CallStaticFunctionLabel(),
+ &StubCode::CallStaticFunctionLabel(),
RawPcDescriptors::kOther,
locs);
const Function& function = Function::Handle(ic_data.GetTargetAt(0));
@@ -1589,7 +1579,7 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
// that we can record the outgoing edges to other code.
GenerateDartCall(deopt_id,
token_index,
- &stub_code->CallStaticFunctionLabel(),
+ &StubCode::CallStaticFunctionLabel(),
RawPcDescriptors::kOther,
locs);
const Function& function = *sorted[i].target;
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_compiler_arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698