OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
9 | 9 |
10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
220 Label* is_instance_lbl, | 220 Label* is_instance_lbl, |
221 Label* is_not_instance_lbl) { | 221 Label* is_not_instance_lbl) { |
222 __ Comment("CallSubtypeTestStub"); | 222 __ Comment("CallSubtypeTestStub"); |
223 ASSERT(instance_reg == A0); | 223 ASSERT(instance_reg == A0); |
224 ASSERT(temp_reg == kNoRegister); // Unused on MIPS. | 224 ASSERT(temp_reg == kNoRegister); // Unused on MIPS. |
225 const SubtypeTestCache& type_test_cache = | 225 const SubtypeTestCache& type_test_cache = |
226 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New()); | 226 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New()); |
227 __ LoadUniqueObject(A2, type_test_cache); | 227 __ LoadUniqueObject(A2, type_test_cache); |
228 if (test_kind == kTestTypeOneArg) { | 228 if (test_kind == kTestTypeOneArg) { |
229 ASSERT(type_arguments_reg == kNoRegister); | 229 ASSERT(type_arguments_reg == kNoRegister); |
230 __ LoadImmediate(A1, reinterpret_cast<int32_t>(Object::null())); | 230 __ LoadObject(A1, Object::null_object()); |
231 __ BranchLink(*StubCode::Subtype1TestCache_entry()); | 231 __ BranchLink(*StubCode::Subtype1TestCache_entry()); |
232 } else if (test_kind == kTestTypeTwoArgs) { | 232 } else if (test_kind == kTestTypeTwoArgs) { |
233 ASSERT(type_arguments_reg == kNoRegister); | 233 ASSERT(type_arguments_reg == kNoRegister); |
234 __ LoadImmediate(A1, reinterpret_cast<int32_t>(Object::null())); | 234 __ LoadObject(A1, Object::null_object()); |
235 __ BranchLink(*StubCode::Subtype2TestCache_entry()); | 235 __ BranchLink(*StubCode::Subtype2TestCache_entry()); |
236 } else if (test_kind == kTestTypeThreeArgs) { | 236 } else if (test_kind == kTestTypeThreeArgs) { |
237 ASSERT(type_arguments_reg == A1); | 237 ASSERT(type_arguments_reg == A1); |
238 __ BranchLink(*StubCode::Subtype3TestCache_entry()); | 238 __ BranchLink(*StubCode::Subtype3TestCache_entry()); |
239 } else { | 239 } else { |
240 UNREACHABLE(); | 240 UNREACHABLE(); |
241 } | 241 } |
242 // Result is in V0: null -> not found, otherwise Bool::True or Bool::False. | 242 // Result is in V0: null -> not found, otherwise Bool::True or Bool::False. |
243 GenerateBoolToJump(V0, is_instance_lbl, is_not_instance_lbl); | 243 GenerateBoolToJump(V0, is_instance_lbl, is_not_instance_lbl); |
244 return type_test_cache.raw(); | 244 return type_test_cache.raw(); |
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
438 Label* is_not_instance_lbl) { | 438 Label* is_not_instance_lbl) { |
439 __ Comment("UninstantiatedTypeTest"); | 439 __ Comment("UninstantiatedTypeTest"); |
440 ASSERT(!type.IsInstantiated()); | 440 ASSERT(!type.IsInstantiated()); |
441 // Skip check if destination is a dynamic type. | 441 // Skip check if destination is a dynamic type. |
442 if (type.IsTypeParameter()) { | 442 if (type.IsTypeParameter()) { |
443 const TypeParameter& type_param = TypeParameter::Cast(type); | 443 const TypeParameter& type_param = TypeParameter::Cast(type); |
444 // Load instantiator (or null) and instantiator type arguments on stack. | 444 // Load instantiator (or null) and instantiator type arguments on stack. |
445 __ lw(A1, Address(SP, 0)); // Get instantiator type arguments. | 445 __ lw(A1, Address(SP, 0)); // Get instantiator type arguments. |
446 // A1: instantiator type arguments. | 446 // A1: instantiator type arguments. |
447 // Check if type arguments are null, i.e. equivalent to vector of dynamic. | 447 // Check if type arguments are null, i.e. equivalent to vector of dynamic. |
448 __ LoadImmediate(T7, reinterpret_cast<int32_t>(Object::null())); | 448 __ LoadObject(T7, Object::null_object()); |
449 __ beq(A1, T7, is_instance_lbl); | 449 __ beq(A1, T7, is_instance_lbl); |
450 __ lw(T2, | 450 __ lw(T2, |
451 FieldAddress(A1, TypeArguments::type_at_offset(type_param.index()))); | 451 FieldAddress(A1, TypeArguments::type_at_offset(type_param.index()))); |
452 // R2: concrete type of type. | 452 // R2: concrete type of type. |
453 // Check if type argument is dynamic. | 453 // Check if type argument is dynamic. |
454 __ BranchEqual(T2, | 454 __ BranchEqual(T2, |
455 Type::ZoneHandle(zone(), Type::DynamicType()), is_instance_lbl); | 455 Type::ZoneHandle(zone(), Type::DynamicType()), is_instance_lbl); |
456 __ BranchEqual(T2, | 456 __ BranchEqual(T2, |
457 Type::ZoneHandle(zone(), Type::ObjectType()), is_instance_lbl); | 457 Type::ZoneHandle(zone(), Type::ObjectType()), is_instance_lbl); |
458 | 458 |
(...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
947 __ sll(T2, T2, 1); // T2 is a Smi. | 947 __ sll(T2, T2, 1); // T2 is a Smi. |
948 | 948 |
949 __ Comment("Null arguments loop"); | 949 __ Comment("Null arguments loop"); |
950 Label null_args_loop, null_args_loop_exit; | 950 Label null_args_loop, null_args_loop_exit; |
951 __ blez(T2, &null_args_loop_exit); | 951 __ blez(T2, &null_args_loop_exit); |
952 __ delay_slot()->addiu(T1, FP, | 952 __ delay_slot()->addiu(T1, FP, |
953 Immediate((kParamEndSlotFromFp + 1) * kWordSize)); | 953 Immediate((kParamEndSlotFromFp + 1) * kWordSize)); |
954 __ Bind(&null_args_loop); | 954 __ Bind(&null_args_loop); |
955 __ addiu(T2, T2, Immediate(-kWordSize)); | 955 __ addiu(T2, T2, Immediate(-kWordSize)); |
956 __ addu(T3, T1, T2); | 956 __ addu(T3, T1, T2); |
957 __ LoadImmediate(T5, reinterpret_cast<int32_t>(Object::null())); | 957 __ LoadObject(T5, Object::null_object()); |
958 __ bgtz(T2, &null_args_loop); | 958 __ bgtz(T2, &null_args_loop); |
959 __ delay_slot()->sw(T5, Address(T3)); | 959 __ delay_slot()->sw(T5, Address(T3)); |
960 __ Bind(&null_args_loop_exit); | 960 __ Bind(&null_args_loop_exit); |
961 } | 961 } |
962 | 962 |
963 | 963 |
964 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { | 964 void FlowGraphCompiler::GenerateInlinedGetter(intptr_t offset) { |
965 // RA: return address. | 965 // RA: return address. |
966 // SP: receiver. | 966 // SP: receiver. |
967 // Sequence node has one return node, its input is load field node. | 967 // Sequence node has one return node, its input is load field node. |
968 __ Comment("Inlined Getter"); | 968 __ Comment("Inlined Getter"); |
969 __ lw(V0, Address(SP, 0 * kWordSize)); | 969 __ lw(V0, Address(SP, 0 * kWordSize)); |
970 __ lw(V0, Address(V0, offset - kHeapObjectTag)); | 970 __ lw(V0, Address(V0, offset - kHeapObjectTag)); |
971 __ Ret(); | 971 __ Ret(); |
972 } | 972 } |
973 | 973 |
974 | 974 |
975 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { | 975 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { |
976 // RA: return address. | 976 // RA: return address. |
977 // SP+1: receiver. | 977 // SP+1: receiver. |
978 // SP+0: value. | 978 // SP+0: value. |
979 // Sequence node has one store node and one return NULL node. | 979 // Sequence node has one store node and one return NULL node. |
980 __ Comment("Inlined Setter"); | 980 __ Comment("Inlined Setter"); |
981 __ lw(T0, Address(SP, 1 * kWordSize)); // Receiver. | 981 __ lw(T0, Address(SP, 1 * kWordSize)); // Receiver. |
982 __ lw(T1, Address(SP, 0 * kWordSize)); // Value. | 982 __ lw(T1, Address(SP, 0 * kWordSize)); // Value. |
983 __ StoreIntoObjectOffset(T0, offset, T1); | 983 __ StoreIntoObjectOffset(T0, offset, T1); |
984 __ LoadImmediate(V0, reinterpret_cast<int32_t>(Object::null())); | 984 __ LoadObject(V0, Object::null_object()); |
985 __ Ret(); | 985 __ Ret(); |
986 } | 986 } |
987 | 987 |
988 | 988 |
989 void FlowGraphCompiler::EmitFrameEntry() { | 989 void FlowGraphCompiler::EmitFrameEntry() { |
990 const Function& function = parsed_function().function(); | 990 const Function& function = parsed_function().function(); |
991 if (CanOptimizeFunction() && | 991 if (CanOptimizeFunction() && |
992 function.IsOptimizable() && | 992 function.IsOptimizable() && |
993 (!is_optimizing() || may_reoptimize())) { | 993 (!is_optimizing() || may_reoptimize())) { |
994 const Register function_reg = T0; | 994 const Register function_reg = T0; |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1114 | 1114 |
1115 // In unoptimized code, initialize (non-argument) stack allocated slots to | 1115 // In unoptimized code, initialize (non-argument) stack allocated slots to |
1116 // null. | 1116 // null. |
1117 if (!is_optimizing()) { | 1117 if (!is_optimizing()) { |
1118 ASSERT(num_locals > 0); // There is always at least context_var. | 1118 ASSERT(num_locals > 0); // There is always at least context_var. |
1119 __ Comment("Initialize spill slots"); | 1119 __ Comment("Initialize spill slots"); |
1120 const intptr_t slot_base = parsed_function().first_stack_local_index(); | 1120 const intptr_t slot_base = parsed_function().first_stack_local_index(); |
1121 const intptr_t context_index = | 1121 const intptr_t context_index = |
1122 parsed_function().current_context_var()->index(); | 1122 parsed_function().current_context_var()->index(); |
1123 if (num_locals > 1) { | 1123 if (num_locals > 1) { |
1124 __ LoadImmediate(V0, reinterpret_cast<int32_t>(Object::null())); | 1124 __ LoadObject(V0, Object::null_object()); |
1125 } | 1125 } |
1126 for (intptr_t i = 0; i < num_locals; ++i) { | 1126 for (intptr_t i = 0; i < num_locals; ++i) { |
1127 // Subtract index i (locals lie at lower addresses than FP). | 1127 // Subtract index i (locals lie at lower addresses than FP). |
1128 if (((slot_base - i) == context_index)) { | 1128 if (((slot_base - i) == context_index)) { |
1129 if (function.IsClosureFunction()) { | 1129 if (function.IsClosureFunction()) { |
1130 __ sw(CTX, Address(FP, (slot_base - i) * kWordSize)); | 1130 __ sw(CTX, Address(FP, (slot_base - i) * kWordSize)); |
1131 } else { | 1131 } else { |
1132 const Context& empty_context = Context::ZoneHandle( | 1132 const Context& empty_context = Context::ZoneHandle( |
1133 zone(), isolate()->object_store()->empty_context()); | 1133 zone(), isolate()->object_store()->empty_context()); |
1134 __ LoadObject(V1, empty_context); | 1134 __ LoadObject(V1, empty_context); |
(...skipping 724 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1859 __ AddImmediate(SP, kDoubleSize); | 1859 __ AddImmediate(SP, kDoubleSize); |
1860 } | 1860 } |
1861 | 1861 |
1862 | 1862 |
1863 #undef __ | 1863 #undef __ |
1864 | 1864 |
1865 | 1865 |
1866 } // namespace dart | 1866 } // namespace dart |
1867 | 1867 |
1868 #endif // defined TARGET_ARCH_MIPS | 1868 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |