| Index: runtime/vm/flow_graph_compiler_mips.cc
|
| ===================================================================
|
| --- runtime/vm/flow_graph_compiler_mips.cc (revision 20729)
|
| +++ runtime/vm/flow_graph_compiler_mips.cc (working copy)
|
| @@ -7,10 +7,24 @@
|
|
|
| #include "vm/flow_graph_compiler.h"
|
|
|
| -#include "vm/longjump.h"
|
| +#include "lib/error.h"
|
| +#include "vm/ast_printer.h"
|
| +#include "vm/dart_entry.h"
|
| +#include "vm/il_printer.h"
|
| +#include "vm/locations.h"
|
| +#include "vm/object_store.h"
|
| +#include "vm/parser.h"
|
| +#include "vm/stub_code.h"
|
| +#include "vm/symbols.h"
|
|
|
| namespace dart {
|
|
|
| +DECLARE_FLAG(int, optimization_counter_threshold);
|
| +DECLARE_FLAG(bool, print_ast);
|
| +DECLARE_FLAG(bool, print_scopes);
|
| +DECLARE_FLAG(bool, enable_type_checks);
|
| +
|
| +
|
| FlowGraphCompiler::~FlowGraphCompiler() {
|
| // BlockInfos are zone-allocated, so their destructors are not called.
|
| // Verify the labels explicitly here.
|
| @@ -31,6 +45,9 @@
|
| }
|
|
|
|
|
| +#define __ assembler()->
|
| +
|
| +
|
| void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
|
| Label* is_true,
|
| Label* is_false) {
|
| @@ -128,12 +145,24 @@
|
|
|
|
|
| void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
|
| - UNIMPLEMENTED();
|
| + if (!is_optimizing()) {
|
| + if (FLAG_enable_type_checks && instr->IsAssertAssignable()) {
|
| + AssertAssignableInstr* assert = instr->AsAssertAssignable();
|
| + AddCurrentDescriptor(PcDescriptors::kDeoptBefore,
|
| + assert->deopt_id(),
|
| + assert->token_pos());
|
| + }
|
| + AllocateRegistersLocally(instr);
|
| + }
|
| }
|
|
|
|
|
| void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) {
|
| - UNIMPLEMENTED();
|
| + if (is_optimizing()) return;
|
| + Definition* defn = instr->AsDefinition();
|
| + if ((defn != NULL) && defn->is_used()) {
|
| + __ Push(defn->locs()->out().reg());
|
| + }
|
| }
|
|
|
|
|
| @@ -153,12 +182,221 @@
|
|
|
|
|
| void FlowGraphCompiler::EmitFrameEntry() {
|
| - UNIMPLEMENTED();
|
| + const Function& function = parsed_function().function();
|
| + if (CanOptimizeFunction() && function.is_optimizable()) {
|
| + const bool can_optimize = !is_optimizing() || may_reoptimize();
|
| + const Register function_reg = T0;
|
| + if (can_optimize) {
|
| + Label next;
|
| + // The pool pointer is not setup before entering the Dart frame.
|
| +
|
| + __ mov(TMP, RA); // Save RA.
|
| + __ bal(&next); // Branch and link to next instruction to get PC in RA.
|
| + __ delay_slot()->mov(T2, RA); // Save PC of the following mov.
|
| +
|
| + // Calculate offset of pool pointer from the PC.
|
| + const intptr_t object_pool_pc_dist =
|
| + Instructions::HeaderSize() - Instructions::object_pool_offset() +
|
| + assembler()->CodeSize();
|
| +
|
| + __ Bind(&next);
|
| + __ mov(RA, TMP); // Restore RA.
|
| +
|
| + // Preserve PP of caller.
|
| + __ mov(T1, PP);
|
| +
|
| + // Temporarily setup pool pointer for this dart function.
|
| + __ lw(PP, Address(T2, -object_pool_pc_dist));
|
| +
|
| + // Load function object from object pool.
|
| + __ LoadObject(function_reg, function); // Uses PP.
|
| +
|
| + // Restore PP of caller.
|
| + __ mov(PP, T1);
|
| + }
|
| + // Patch point is after the eventually inlined function object.
|
| + AddCurrentDescriptor(PcDescriptors::kEntryPatch,
|
| + Isolate::kNoDeoptId,
|
| + 0); // No token position.
|
| + if (can_optimize) {
|
| + // Reoptimization of optimized function is triggered by counting in
|
| + // IC stubs, but not at the entry of the function.
|
| + if (!is_optimizing()) {
|
| + __ lw(T1, FieldAddress(function_reg,
|
| + Function::usage_counter_offset()));
|
| + __ addiu(T1, T1, Immediate(1));
|
| + __ sw(T1, FieldAddress(function_reg,
|
| + Function::usage_counter_offset()));
|
| + } else {
|
| + __ lw(T1, FieldAddress(function_reg,
|
| + Function::usage_counter_offset()));
|
| + }
|
| +
|
| + // Skip Branch if T1 is less than the threshold.
|
| + Label dont_branch;
|
| + __ LoadImmediate(T2, FLAG_optimization_counter_threshold);
|
| + __ sltu(T2, T1, T2);
|
| + __ bgtz(T2, &dont_branch);
|
| +
|
| + ASSERT(function_reg == T0);
|
| + __ Branch(&StubCode::OptimizeFunctionLabel());
|
| +
|
| + __ Bind(&dont_branch);
|
| + }
|
| + } else {
|
| + AddCurrentDescriptor(PcDescriptors::kEntryPatch,
|
| + Isolate::kNoDeoptId,
|
| + 0); // No token position.
|
| + }
|
| + __ Comment("Enter frame");
|
| + __ EnterDartFrame((StackSize() * kWordSize));
|
| }
|
|
|
| +// Input parameters:
|
| +// RA: return address.
|
| +// SP: address of last argument.
|
| +// FP: caller's frame pointer.
|
| +// PP: caller's pool pointer.
|
| +// S5: ic-data.
|
| +// S4: arguments descriptor array.
|
| +void FlowGraphCompiler::CompileGraph() {
|
| + InitCompiler();
|
| + if (TryIntrinsify()) {
|
| + // Although this intrinsified code will never be patched, it must satisfy
|
| + // CodePatcher::CodeIsPatchable, which verifies that this code has a minimum
|
| + // code size.
|
| + __ break_(0);
|
| + __ Branch(&StubCode::FixCallersTargetLabel());
|
| + return;
|
| + }
|
|
|
| -void FlowGraphCompiler::CompileGraph() {
|
| - UNIMPLEMENTED();
|
| + EmitFrameEntry();
|
| +
|
| + const Function& function = parsed_function().function();
|
| +
|
| + const int num_fixed_params = function.num_fixed_parameters();
|
| + const int num_copied_params = parsed_function().num_copied_params();
|
| + const int num_locals = parsed_function().num_stack_locals();
|
| +
|
| + // We check the number of passed arguments when we have to copy them due to
|
| + // the presence of optional parameters.
|
| + // No such checking code is generated if only fixed parameters are declared,
|
| + // unless we are in debug mode or unless we are compiling a closure.
|
| + LocalVariable* saved_args_desc_var =
|
| + parsed_function().GetSavedArgumentsDescriptorVar();
|
| + if (num_copied_params == 0) {
|
| +#ifdef DEBUG
|
| + ASSERT(!parsed_function().function().HasOptionalParameters());
|
| + const bool check_arguments = true;
|
| +#else
|
| + const bool check_arguments = function.IsClosureFunction();
|
| +#endif
|
| + if (check_arguments) {
|
| + __ Comment("Check argument count");
|
| + // Check that exactly num_fixed arguments are passed in.
|
| + Label correct_num_arguments, wrong_num_arguments;
|
| + __ lw(T0, FieldAddress(S4, ArgumentsDescriptor::count_offset()));
|
| + __ LoadImmediate(T1, Smi::RawValue(num_fixed_params));
|
| + __ bne(T0, T1, &wrong_num_arguments);
|
| +
|
| + __ lw(T1, FieldAddress(S4,
|
| + ArgumentsDescriptor::positional_count_offset()));
|
| + __ beq(T0, T1, &correct_num_arguments);
|
| + __ Bind(&wrong_num_arguments);
|
| + if (function.IsClosureFunction()) {
|
| + if (StackSize() != 0) {
|
| + // We need to unwind the space we reserved for locals and copied
|
| + // parameters. The NoSuchMethodFunction stub does not expect to see
|
| + // that area on the stack.
|
| + __ addiu(SP, SP, Immediate(StackSize() * kWordSize));
|
| + }
|
| + // The call below has an empty stackmap because we have just
|
| + // dropped the spill slots.
|
| + BitmapBuilder* empty_stack_bitmap = new BitmapBuilder();
|
| +
|
| + // Invoke noSuchMethod function passing "call" as the function name.
|
| + const int kNumArgsChecked = 1;
|
| + const ICData& ic_data = ICData::ZoneHandle(
|
| + ICData::New(function, Symbols::Call(),
|
| + Isolate::kNoDeoptId, kNumArgsChecked));
|
| + __ LoadObject(S5, ic_data);
|
| + // FP - 4 : saved PP, object pool pointer of caller.
|
| + // FP + 0 : previous frame pointer.
|
| + // FP + 4 : return address.
|
| + // FP + 8 : PC marker, for easy identification of RawInstruction obj.
|
| + // FP + 12: last argument (arg n-1).
|
| + // SP + 0 : saved PP.
|
| + // SP + 16 + 4*(n-1) : first argument (arg 0).
|
| + // S5 : ic-data.
|
| + // S4 : arguments descriptor array.
|
| + __ BranchLink(&StubCode::CallNoSuchMethodFunctionLabel());
|
| + if (is_optimizing()) {
|
| + stackmap_table_builder_->AddEntry(assembler()->CodeSize(),
|
| + empty_stack_bitmap,
|
| + 0); // No registers.
|
| + }
|
| + // The noSuchMethod call may return.
|
| + __ LeaveDartFrame();
|
| + __ Ret();
|
| + } else {
|
| + __ Stop("Wrong number of arguments");
|
| + }
|
| + __ Bind(&correct_num_arguments);
|
| + }
|
| + // The arguments descriptor is never saved in the absence of optional
|
| + // parameters, since any argument definition test would always yield true.
|
| + ASSERT(saved_args_desc_var == NULL);
|
| + } else {
|
| + if (saved_args_desc_var != NULL) {
|
| + __ Comment("Save arguments descriptor");
|
| + const Register kArgumentsDescriptorReg = S4;
|
| + // The saved_args_desc_var is allocated one slot before the first local.
|
| + const intptr_t slot = parsed_function().first_stack_local_index() + 1;
|
| + // If the saved_args_desc_var is captured, it is first moved to the stack
|
| + // and later to the context, once the context is allocated.
|
| + ASSERT(saved_args_desc_var->is_captured() ||
|
| + (saved_args_desc_var->index() == slot));
|
| + __ sw(kArgumentsDescriptorReg, Address(FP, slot * kWordSize));
|
| + }
|
| + CopyParameters();
|
| + }
|
| +
|
| + // In unoptimized code, initialize (non-argument) stack allocated slots to
|
| + // null. This does not cover the saved_args_desc_var slot.
|
| + if (!is_optimizing() && (num_locals > 0)) {
|
| + __ Comment("Initialize spill slots");
|
| + const intptr_t slot_base = parsed_function().first_stack_local_index();
|
| + __ LoadImmediate(T0, reinterpret_cast<intptr_t>(Object::null()));
|
| + for (intptr_t i = 0; i < num_locals; ++i) {
|
| + // Subtract index i (locals lie at lower addresses than FP).
|
| + __ sw(T0, Address(FP, (slot_base - i) * kWordSize));
|
| + }
|
| + }
|
| +
|
| + if (FLAG_print_scopes) {
|
| + // Print the function scope (again) after generating the prologue in order
|
| + // to see annotations such as allocation indices of locals.
|
| + if (FLAG_print_ast) {
|
| + // Second printing.
|
| + OS::Print("Annotated ");
|
| + }
|
| + AstPrinter::PrintFunctionScope(parsed_function());
|
| + }
|
| +
|
| + VisitBlocks();
|
| +
|
| + __ break_(0);
|
| + GenerateDeferredCode();
|
| + // Emit function patching code. This will be swapped with the first 5 bytes
|
| + // at entry point.
|
| + AddCurrentDescriptor(PcDescriptors::kPatchCode,
|
| + Isolate::kNoDeoptId,
|
| + 0); // No token position.
|
| + __ Branch(&StubCode::FixCallersTargetLabel());
|
| + AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump,
|
| + Isolate::kNoDeoptId,
|
| + 0); // No token position.
|
| + __ Branch(&StubCode::DeoptimizeLazyLabel());
|
| }
|
|
|
|
|
| @@ -183,7 +421,7 @@
|
| intptr_t deopt_id,
|
| const RuntimeEntry& entry,
|
| LocationSummary* locs) {
|
| - UNIMPLEMENTED();
|
| + __ Unimplemented("call runtime");
|
| }
|
|
|
|
|
| @@ -252,12 +490,51 @@
|
|
|
|
|
| void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) {
|
| - UNIMPLEMENTED();
|
| + // TODO(vegorov): consider saving only caller save (volatile) registers.
|
| + const intptr_t fpu_registers = locs->live_registers()->fpu_registers();
|
| + if (fpu_registers > 0) {
|
| + UNIMPLEMENTED();
|
| + }
|
| +
|
| + // Store general purpose registers with the lowest register number at the
|
| + // lowest address.
|
| + const intptr_t cpu_registers = locs->live_registers()->cpu_registers();
|
| + ASSERT((cpu_registers & ~kAllCpuRegistersList) == 0);
|
| + const int register_count = Utils::CountOneBits(cpu_registers);
|
| + int registers_pushed = 0;
|
| +
|
| + __ addiu(SP, SP, Immediate(-register_count * kWordSize));
|
| + for (int i = 0; i < kNumberOfCpuRegisters; i++) {
|
| + Register r = static_cast<Register>(i);
|
| + if (locs->live_registers()->ContainsRegister(r)) {
|
| + __ sw(r, Address(SP, registers_pushed * kWordSize));
|
| + registers_pushed++;
|
| + }
|
| + }
|
| }
|
|
|
|
|
| void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) {
|
| - UNIMPLEMENTED();
|
| + // General purpose registers have the lowest register number at the
|
| + // lowest address.
|
| + const intptr_t cpu_registers = locs->live_registers()->cpu_registers();
|
| + ASSERT((cpu_registers & ~kAllCpuRegistersList) == 0);
|
| + const int register_count = Utils::CountOneBits(cpu_registers);
|
| + int registers_popped = 0;
|
| +
|
| + for (int i = 0; i < kNumberOfCpuRegisters; i++) {
|
| + Register r = static_cast<Register>(i);
|
| + if (locs->live_registers()->ContainsRegister(r)) {
|
| + __ lw(r, Address(SP, registers_popped * kWordSize));
|
| + registers_popped++;
|
| + }
|
| + }
|
| + __ addiu(SP, SP, Immediate(register_count * kWordSize));
|
| +
|
| + const intptr_t fpu_registers = locs->live_registers()->fpu_registers();
|
| + if (fpu_registers > 0) {
|
| + UNIMPLEMENTED();
|
| + }
|
| }
|
|
|
|
|
|
|