| Index: runtime/vm/flow_graph_compiler_arm64.cc
|
| ===================================================================
|
| --- runtime/vm/flow_graph_compiler_arm64.cc (revision 0)
|
| +++ runtime/vm/flow_graph_compiler_arm64.cc (revision 0)
|
| @@ -0,0 +1,429 @@
|
| +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
|
| +// for details. All rights reserved. Use of this source code is governed by a
|
| +// BSD-style license that can be found in the LICENSE file.
|
| +
|
| +#include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64.
|
| +#if defined(TARGET_ARCH_ARM64)
|
| +
|
| +#include "vm/flow_graph_compiler.h"
|
| +
|
| +#include "vm/ast_printer.h"
|
| +#include "vm/compiler.h"
|
| +#include "vm/cpu.h"
|
| +#include "vm/dart_entry.h"
|
| +#include "vm/deopt_instructions.h"
|
| +#include "vm/il_printer.h"
|
| +#include "vm/locations.h"
|
| +#include "vm/object_store.h"
|
| +#include "vm/parser.h"
|
| +#include "vm/stack_frame.h"
|
| +#include "vm/stub_code.h"
|
| +#include "vm/symbols.h"
|
| +
|
| +namespace dart {
|
| +
|
| +FlowGraphCompiler::~FlowGraphCompiler() {
|
| + // BlockInfos are zone-allocated, so their destructors are not called.
|
| + // Verify the labels explicitly here.
|
| + for (int i = 0; i < block_info_.length(); ++i) {
|
| + ASSERT(!block_info_[i]->jump_label()->IsLinked());
|
| + }
|
| +}
|
| +
|
| +
|
| +bool FlowGraphCompiler::SupportsUnboxedMints() {
|
| + return false;
|
| +}
|
| +
|
| +
|
| +bool FlowGraphCompiler::SupportsUnboxedSimd128() {
|
| + return false;
|
| +}
|
| +
|
| +
|
| +bool FlowGraphCompiler::SupportsSinCos() {
|
| + return false;
|
| +}
|
| +
|
| +
|
| +RawDeoptInfo* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler,
|
| + DeoptInfoBuilder* builder,
|
| + const Array& deopt_table) {
|
| + UNIMPLEMENTED();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
|
| + intptr_t stub_ix) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +#define __ assembler()->
|
| +
|
| +
|
| +// Fall through if bool_register contains null.
|
| +void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
|
| + Label* is_true,
|
| + Label* is_false) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
|
| + TypeTestStubKind test_kind,
|
| + Register instance_reg,
|
| + Register type_arguments_reg,
|
| + Register temp_reg,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl) {
|
| + UNIMPLEMENTED();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +RawSubtypeTestCache*
|
| +FlowGraphCompiler::GenerateInstantiatedTypeWithArgumentsTest(
|
| + intptr_t token_pos,
|
| + const AbstractType& type,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl) {
|
| + UNIMPLEMENTED();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::CheckClassIds(Register class_id_reg,
|
| + const GrowableArray<intptr_t>& class_ids,
|
| + Label* is_equal_lbl,
|
| + Label* is_not_equal_lbl) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest(
|
| + intptr_t token_pos,
|
| + const AbstractType& type,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl) {
|
| + UNIMPLEMENTED();
|
| + return false;
|
| +}
|
| +
|
| +
|
| +RawSubtypeTestCache* FlowGraphCompiler::GenerateSubtype1TestCacheLookup(
|
| + intptr_t token_pos,
|
| + const Class& type_class,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl) {
|
| + UNIMPLEMENTED();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +RawSubtypeTestCache* FlowGraphCompiler::GenerateUninstantiatedTypeTest(
|
| + intptr_t token_pos,
|
| + const AbstractType& type,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl) {
|
| + UNIMPLEMENTED();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +RawSubtypeTestCache* FlowGraphCompiler::GenerateInlineInstanceof(
|
| + intptr_t token_pos,
|
| + const AbstractType& type,
|
| + Label* is_instance_lbl,
|
| + Label* is_not_instance_lbl) {
|
| + UNIMPLEMENTED();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos,
|
| + intptr_t deopt_id,
|
| + const AbstractType& type,
|
| + bool negate_result,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateAssertAssignable(intptr_t token_pos,
|
| + intptr_t deopt_id,
|
| + const AbstractType& dst_type,
|
| + const String& dst_name,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::CopyParameters() {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitFrameEntry() {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::CompileGraph() {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateCall(intptr_t token_pos,
|
| + const ExternalLabel* label,
|
| + PcDescriptors::Kind kind,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
|
| + intptr_t token_pos,
|
| + const ExternalLabel* label,
|
| + PcDescriptors::Kind kind,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::GenerateRuntimeCall(intptr_t token_pos,
|
| + intptr_t deopt_id,
|
| + const RuntimeEntry& entry,
|
| + intptr_t argument_count,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitEdgeCounter() {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitOptimizedInstanceCall(
|
| + ExternalLabel* target_label,
|
| + const ICData& ic_data,
|
| + intptr_t argument_count,
|
| + intptr_t deopt_id,
|
| + intptr_t token_pos,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitInstanceCall(ExternalLabel* target_label,
|
| + const ICData& ic_data,
|
| + intptr_t argument_count,
|
| + intptr_t deopt_id,
|
| + intptr_t token_pos,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitMegamorphicInstanceCall(
|
| + const ICData& ic_data,
|
| + intptr_t argument_count,
|
| + intptr_t deopt_id,
|
| + intptr_t token_pos,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitUnoptimizedStaticCall(
|
| + const Function& target_function,
|
| + const Array& arguments_descriptor,
|
| + intptr_t argument_count,
|
| + intptr_t deopt_id,
|
| + intptr_t token_pos,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitOptimizedStaticCall(
|
| + const Function& function,
|
| + const Array& arguments_descriptor,
|
| + intptr_t argument_count,
|
| + intptr_t deopt_id,
|
| + intptr_t token_pos,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitEqualityRegConstCompare(Register reg,
|
| + const Object& obj,
|
| + bool needs_number_check,
|
| + intptr_t token_pos) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitEqualityRegRegCompare(Register left,
|
| + Register right,
|
| + bool needs_number_check,
|
| + intptr_t token_pos) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +// This function must be in sync with FlowGraphCompiler::RecordSafepoint and
|
| +// FlowGraphCompiler::SlowPathEnvironmentFor.
|
| +void FlowGraphCompiler::SaveLiveRegisters(LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::RestoreLiveRegisters(LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
|
| + Register class_id_reg,
|
| + intptr_t argument_count,
|
| + const Array& argument_names,
|
| + Label* deopt,
|
| + intptr_t deopt_id,
|
| + intptr_t token_index,
|
| + LocationSummary* locs) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +// Do not implement or use this function.
|
| +FieldAddress FlowGraphCompiler::ElementAddressForIntIndex(intptr_t cid,
|
| + intptr_t index_scale,
|
| + Register array,
|
| + intptr_t index) {
|
| + UNREACHABLE();
|
| + return FieldAddress(array, index);
|
| +}
|
| +
|
| +
|
| +// Do not implement or use this function.
|
| +FieldAddress FlowGraphCompiler::ElementAddressForRegIndex(intptr_t cid,
|
| + intptr_t index_scale,
|
| + Register array,
|
| + Register index) {
|
| + UNREACHABLE(); // No register indexed with offset addressing mode on ARM.
|
| + return FieldAddress(array, index);
|
| +}
|
| +
|
| +
|
| +Address FlowGraphCompiler::ExternalElementAddressForIntIndex(
|
| + intptr_t index_scale,
|
| + Register array,
|
| + intptr_t index) {
|
| + UNREACHABLE();
|
| + return FieldAddress(array, index);
|
| +}
|
| +
|
| +
|
| +Address FlowGraphCompiler::ExternalElementAddressForRegIndex(
|
| + intptr_t index_scale,
|
| + Register array,
|
| + Register index) {
|
| + UNREACHABLE();
|
| + return FieldAddress(array, index);
|
| +}
|
| +
|
| +
|
| +#undef __
|
| +#define __ compiler_->assembler()->
|
| +
|
| +
|
| +void ParallelMoveResolver::EmitMove(int index) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::EmitSwap(int index) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst,
|
| + const Address& src) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +// Do not call or implement this function. Instead, use the form below that
|
| +// uses an offset from the frame pointer instead of an Address.
|
| +void ParallelMoveResolver::Exchange(Register reg, const Address& mem) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +// Do not call or implement this function. Instead, use the form below that
|
| +// uses offsets from the frame pointer instead of Addresses.
|
| +void ParallelMoveResolver::Exchange(const Address& mem1, const Address& mem2) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::Exchange(Register reg, intptr_t stack_offset) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::Exchange(intptr_t stack_offset1,
|
| + intptr_t stack_offset2) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::SpillScratch(Register reg) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::RestoreScratch(Register reg) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::SpillFpuScratch(FpuRegister reg) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +void ParallelMoveResolver::RestoreFpuScratch(FpuRegister reg) {
|
| + UNIMPLEMENTED();
|
| +}
|
| +
|
| +
|
| +#undef __
|
| +
|
| +} // namespace dart
|
| +
|
| +#endif // defined TARGET_ARCH_ARM64
|
|
|