Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(435)

Side by Side Diff: runtime/vm/flow_graph_compiler_arm.cc

Issue 1332923005: Remove remaining uses of null's absolute address from non-IA32. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_compiler_mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/flow_graph_compiler.h" 8 #include "vm/flow_graph_compiler.h"
9 9
10 #include "vm/ast_printer.h" 10 #include "vm/ast_printer.h"
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
203 203
204 204
205 #define __ assembler()-> 205 #define __ assembler()->
206 206
207 207
208 // Fall through if bool_register contains null. 208 // Fall through if bool_register contains null.
209 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register, 209 void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
210 Label* is_true, 210 Label* is_true,
211 Label* is_false) { 211 Label* is_false) {
212 Label fall_through; 212 Label fall_through;
213 __ CompareImmediate(bool_register, 213 __ CompareObject(bool_register, Object::null_object());
214 reinterpret_cast<intptr_t>(Object::null()));
215 __ b(&fall_through, EQ); 214 __ b(&fall_through, EQ);
216 __ CompareObject(bool_register, Bool::True()); 215 __ CompareObject(bool_register, Bool::True());
217 __ b(is_true, EQ); 216 __ b(is_true, EQ);
218 __ b(is_false); 217 __ b(is_false);
219 __ Bind(&fall_through); 218 __ Bind(&fall_through);
220 } 219 }
221 220
222 221
223 // R0: instance (must be preserved). 222 // R0: instance (must be preserved).
224 // R1: instantiator type arguments (if used). 223 // R1: instantiator type arguments (if used).
225 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub( 224 RawSubtypeTestCache* FlowGraphCompiler::GenerateCallSubtypeTestStub(
226 TypeTestStubKind test_kind, 225 TypeTestStubKind test_kind,
227 Register instance_reg, 226 Register instance_reg,
228 Register type_arguments_reg, 227 Register type_arguments_reg,
229 Register temp_reg, 228 Register temp_reg,
230 Label* is_instance_lbl, 229 Label* is_instance_lbl,
231 Label* is_not_instance_lbl) { 230 Label* is_not_instance_lbl) {
232 ASSERT(instance_reg == R0); 231 ASSERT(instance_reg == R0);
233 ASSERT(temp_reg == kNoRegister); // Unused on ARM. 232 ASSERT(temp_reg == kNoRegister); // Unused on ARM.
234 const SubtypeTestCache& type_test_cache = 233 const SubtypeTestCache& type_test_cache =
235 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New()); 234 SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New());
236 __ LoadUniqueObject(R2, type_test_cache); 235 __ LoadUniqueObject(R2, type_test_cache);
237 if (test_kind == kTestTypeOneArg) { 236 if (test_kind == kTestTypeOneArg) {
238 ASSERT(type_arguments_reg == kNoRegister); 237 ASSERT(type_arguments_reg == kNoRegister);
239 __ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null())); 238 __ LoadObject(R1, Object::null_object());
240 __ BranchLink(*StubCode::Subtype1TestCache_entry()); 239 __ BranchLink(*StubCode::Subtype1TestCache_entry());
241 } else if (test_kind == kTestTypeTwoArgs) { 240 } else if (test_kind == kTestTypeTwoArgs) {
242 ASSERT(type_arguments_reg == kNoRegister); 241 ASSERT(type_arguments_reg == kNoRegister);
243 __ LoadImmediate(R1, reinterpret_cast<intptr_t>(Object::null())); 242 __ LoadObject(R1, Object::null_object());
244 __ BranchLink(*StubCode::Subtype2TestCache_entry()); 243 __ BranchLink(*StubCode::Subtype2TestCache_entry());
245 } else if (test_kind == kTestTypeThreeArgs) { 244 } else if (test_kind == kTestTypeThreeArgs) {
246 ASSERT(type_arguments_reg == R1); 245 ASSERT(type_arguments_reg == R1);
247 __ BranchLink(*StubCode::Subtype3TestCache_entry()); 246 __ BranchLink(*StubCode::Subtype3TestCache_entry());
248 } else { 247 } else {
249 UNREACHABLE(); 248 UNREACHABLE();
250 } 249 }
251 // Result is in R1: null -> not found, otherwise Bool::True or Bool::False. 250 // Result is in R1: null -> not found, otherwise Bool::True or Bool::False.
252 GenerateBoolToJump(R1, is_instance_lbl, is_not_instance_lbl); 251 GenerateBoolToJump(R1, is_instance_lbl, is_not_instance_lbl);
253 return type_test_cache.raw(); 252 return type_test_cache.raw();
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
383 if (type.IsBoolType()) { 382 if (type.IsBoolType()) {
384 __ CompareImmediate(kClassIdReg, kBoolCid); 383 __ CompareImmediate(kClassIdReg, kBoolCid);
385 __ b(is_instance_lbl, EQ); 384 __ b(is_instance_lbl, EQ);
386 __ b(is_not_instance_lbl); 385 __ b(is_not_instance_lbl);
387 return false; 386 return false;
388 } 387 }
389 if (type.IsFunctionType()) { 388 if (type.IsFunctionType()) {
390 // Check if instance is a closure. 389 // Check if instance is a closure.
391 __ LoadClassById(R3, kClassIdReg); 390 __ LoadClassById(R3, kClassIdReg);
392 __ ldr(R3, FieldAddress(R3, Class::signature_function_offset())); 391 __ ldr(R3, FieldAddress(R3, Class::signature_function_offset()));
393 __ CompareImmediate(R3, reinterpret_cast<int32_t>(Object::null())); 392 __ CompareObject(R3, Object::null_object());
394 __ b(is_instance_lbl, NE); 393 __ b(is_instance_lbl, NE);
395 } 394 }
396 // Custom checking for numbers (Smi, Mint, Bigint and Double). 395 // Custom checking for numbers (Smi, Mint, Bigint and Double).
397 // Note that instance is not Smi (checked above). 396 // Note that instance is not Smi (checked above).
398 if (type.IsSubtypeOf(Type::Handle(zone(), Type::Number()), NULL)) { 397 if (type.IsSubtypeOf(Type::Handle(zone(), Type::Number()), NULL)) {
399 GenerateNumberTypeCheck( 398 GenerateNumberTypeCheck(
400 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl); 399 kClassIdReg, type, is_instance_lbl, is_not_instance_lbl);
401 return false; 400 return false;
402 } 401 }
403 if (type.IsStringType()) { 402 if (type.IsStringType()) {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
451 Label* is_not_instance_lbl) { 450 Label* is_not_instance_lbl) {
452 __ Comment("UninstantiatedTypeTest"); 451 __ Comment("UninstantiatedTypeTest");
453 ASSERT(!type.IsInstantiated()); 452 ASSERT(!type.IsInstantiated());
454 // Skip check if destination is a dynamic type. 453 // Skip check if destination is a dynamic type.
455 if (type.IsTypeParameter()) { 454 if (type.IsTypeParameter()) {
456 const TypeParameter& type_param = TypeParameter::Cast(type); 455 const TypeParameter& type_param = TypeParameter::Cast(type);
457 // Load instantiator (or null) and instantiator type arguments on stack. 456 // Load instantiator (or null) and instantiator type arguments on stack.
458 __ ldr(R1, Address(SP, 0)); // Get instantiator type arguments. 457 __ ldr(R1, Address(SP, 0)); // Get instantiator type arguments.
459 // R1: instantiator type arguments. 458 // R1: instantiator type arguments.
460 // Check if type arguments are null, i.e. equivalent to vector of dynamic. 459 // Check if type arguments are null, i.e. equivalent to vector of dynamic.
461 __ CompareImmediate(R1, reinterpret_cast<intptr_t>(Object::null())); 460 __ CompareObject(R1, Object::null_object());
462 __ b(is_instance_lbl, EQ); 461 __ b(is_instance_lbl, EQ);
463 __ ldr(R2, 462 __ ldr(R2,
464 FieldAddress(R1, TypeArguments::type_at_offset(type_param.index()))); 463 FieldAddress(R1, TypeArguments::type_at_offset(type_param.index())));
465 // R2: concrete type of type. 464 // R2: concrete type of type.
466 // Check if type argument is dynamic. 465 // Check if type argument is dynamic.
467 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::DynamicType())); 466 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::DynamicType()));
468 __ b(is_instance_lbl, EQ); 467 __ b(is_instance_lbl, EQ);
469 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::ObjectType())); 468 __ CompareObject(R2, Type::ZoneHandle(zone(), Type::ObjectType()));
470 __ b(is_instance_lbl, EQ); 469 __ b(is_instance_lbl, EQ);
471 470
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
596 // If type is instantiated and non-parameterized, we can inline code 595 // If type is instantiated and non-parameterized, we can inline code
597 // checking whether the tested instance is a Smi. 596 // checking whether the tested instance is a Smi.
598 if (type.IsInstantiated()) { 597 if (type.IsInstantiated()) {
599 // A null object is only an instance of Object and dynamic, which has 598 // A null object is only an instance of Object and dynamic, which has
600 // already been checked above (if the type is instantiated). So we can 599 // already been checked above (if the type is instantiated). So we can
601 // return false here if the instance is null (and if the type is 600 // return false here if the instance is null (and if the type is
602 // instantiated). 601 // instantiated).
603 // We can only inline this null check if the type is instantiated at compile 602 // We can only inline this null check if the type is instantiated at compile
604 // time, since an uninstantiated type at compile time could be Object or 603 // time, since an uninstantiated type at compile time could be Object or
605 // dynamic at run time. 604 // dynamic at run time.
606 __ CompareImmediate(R0, reinterpret_cast<int32_t>(Object::null())); 605 __ CompareObject(R0, Object::null_object());
607 __ b(type.IsNullType() ? &is_instance : &is_not_instance, EQ); 606 __ b(type.IsNullType() ? &is_instance : &is_not_instance, EQ);
608 } 607 }
609 608
610 // Generate inline instanceof test. 609 // Generate inline instanceof test.
611 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); 610 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
612 test_cache = GenerateInlineInstanceof(token_pos, type, 611 test_cache = GenerateInlineInstanceof(token_pos, type,
613 &is_instance, &is_not_instance); 612 &is_instance, &is_not_instance);
614 613
615 // test_cache is null if there is no fall-through. 614 // test_cache is null if there is no fall-through.
616 Label done; 615 Label done;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
672 ASSERT(token_pos >= 0); 671 ASSERT(token_pos >= 0);
673 ASSERT(!dst_type.IsNull()); 672 ASSERT(!dst_type.IsNull());
674 ASSERT(dst_type.IsFinalized()); 673 ASSERT(dst_type.IsFinalized());
675 // Assignable check is skipped in FlowGraphBuilder, not here. 674 // Assignable check is skipped in FlowGraphBuilder, not here.
676 ASSERT(dst_type.IsMalformedOrMalbounded() || 675 ASSERT(dst_type.IsMalformedOrMalbounded() ||
677 (!dst_type.IsDynamicType() && !dst_type.IsObjectType())); 676 (!dst_type.IsDynamicType() && !dst_type.IsObjectType()));
678 // Preserve instantiator (R2) and its type arguments (R1). 677 // Preserve instantiator (R2) and its type arguments (R1).
679 __ PushList((1 << R1) | (1 << R2)); 678 __ PushList((1 << R1) | (1 << R2));
680 // A null object is always assignable and is returned as result. 679 // A null object is always assignable and is returned as result.
681 Label is_assignable, runtime_call; 680 Label is_assignable, runtime_call;
682 __ CompareImmediate(R0, reinterpret_cast<int32_t>(Object::null())); 681 __ CompareObject(R0, Object::null_object());
683 __ b(&is_assignable, EQ); 682 __ b(&is_assignable, EQ);
684 683
685 // Generate throw new TypeError() if the type is malformed or malbounded. 684 // Generate throw new TypeError() if the type is malformed or malbounded.
686 if (dst_type.IsMalformedOrMalbounded()) { 685 if (dst_type.IsMalformedOrMalbounded()) {
687 __ PushObject(Object::null_object()); // Make room for the result. 686 __ PushObject(Object::null_object()); // Make room for the result.
688 __ Push(R0); // Push the source object. 687 __ Push(R0); // Push the source object.
689 __ PushObject(dst_name); // Push the name of the destination. 688 __ PushObject(dst_name); // Push the name of the destination.
690 __ PushObject(dst_type); // Push the type of the destination. 689 __ PushObject(dst_type); // Push the type of the destination.
691 GenerateRuntimeCall(token_pos, 690 GenerateRuntimeCall(token_pos,
692 deopt_id, 691 deopt_id,
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
870 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos; 869 const intptr_t computed_param_pos = kFirstLocalSlotFromFp - param_pos;
871 const Address param_addr(FP, computed_param_pos * kWordSize); 870 const Address param_addr(FP, computed_param_pos * kWordSize);
872 __ str(R5, param_addr); 871 __ str(R5, param_addr);
873 } 872 }
874 delete[] opt_param; 873 delete[] opt_param;
875 delete[] opt_param_position; 874 delete[] opt_param_position;
876 if (check_correct_named_args) { 875 if (check_correct_named_args) {
877 // Check that R6 now points to the null terminator in the arguments 876 // Check that R6 now points to the null terminator in the arguments
878 // descriptor. 877 // descriptor.
879 __ ldr(R5, Address(R6, 0)); 878 __ ldr(R5, Address(R6, 0));
880 __ CompareImmediate(R5, reinterpret_cast<int32_t>(Object::null())); 879 __ CompareObject(R5, Object::null_object());
881 __ b(&all_arguments_processed, EQ); 880 __ b(&all_arguments_processed, EQ);
882 } 881 }
883 } else { 882 } else {
884 ASSERT(num_opt_pos_params > 0); 883 ASSERT(num_opt_pos_params > 0);
885 __ ldr(R9, 884 __ ldr(R9,
886 FieldAddress(R4, ArgumentsDescriptor::positional_count_offset())); 885 FieldAddress(R4, ArgumentsDescriptor::positional_count_offset()));
887 __ SmiUntag(R9); 886 __ SmiUntag(R9);
888 for (int i = 0; i < num_opt_pos_params; i++) { 887 for (int i = 0; i < num_opt_pos_params; i++) {
889 Label next_parameter; 888 Label next_parameter;
890 // Handle this optional positional parameter only if k or fewer positional 889 // Handle this optional positional parameter only if k or fewer positional
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
932 // checked, otherwise noSuchMethod would not see their original values. 931 // checked, otherwise noSuchMethod would not see their original values.
933 // This step can be skipped in case we decide that formal parameters are 932 // This step can be skipped in case we decide that formal parameters are
934 // implicitly final, since garbage collecting the unmodified value is not 933 // implicitly final, since garbage collecting the unmodified value is not
935 // an issue anymore. 934 // an issue anymore.
936 935
937 // R4 : arguments descriptor array. 936 // R4 : arguments descriptor array.
938 __ ldr(R9, FieldAddress(R4, ArgumentsDescriptor::count_offset())); 937 __ ldr(R9, FieldAddress(R4, ArgumentsDescriptor::count_offset()));
939 __ SmiUntag(R9); 938 __ SmiUntag(R9);
940 __ add(R7, FP, Operand((kParamEndSlotFromFp + 1) * kWordSize)); 939 __ add(R7, FP, Operand((kParamEndSlotFromFp + 1) * kWordSize));
941 const Address original_argument_addr(R7, R9, LSL, 2); 940 const Address original_argument_addr(R7, R9, LSL, 2);
942 __ LoadImmediate(IP, reinterpret_cast<intptr_t>(Object::null())); 941 __ LoadObject(IP, Object::null_object());
943 Label null_args_loop, null_args_loop_condition; 942 Label null_args_loop, null_args_loop_condition;
944 __ b(&null_args_loop_condition); 943 __ b(&null_args_loop_condition);
945 __ Bind(&null_args_loop); 944 __ Bind(&null_args_loop);
946 __ str(IP, original_argument_addr); 945 __ str(IP, original_argument_addr);
947 __ Bind(&null_args_loop_condition); 946 __ Bind(&null_args_loop_condition);
948 __ subs(R9, R9, Operand(1)); 947 __ subs(R9, R9, Operand(1));
949 __ b(&null_args_loop, PL); 948 __ b(&null_args_loop, PL);
950 } 949 }
951 950
952 951
(...skipping 10 matching lines...) Expand all
963 962
964 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) { 963 void FlowGraphCompiler::GenerateInlinedSetter(intptr_t offset) {
965 // LR: return address. 964 // LR: return address.
966 // SP+1: receiver. 965 // SP+1: receiver.
967 // SP+0: value. 966 // SP+0: value.
968 // Sequence node has one store node and one return NULL node. 967 // Sequence node has one store node and one return NULL node.
969 __ Comment("Inlined Setter"); 968 __ Comment("Inlined Setter");
970 __ ldr(R0, Address(SP, 1 * kWordSize)); // Receiver. 969 __ ldr(R0, Address(SP, 1 * kWordSize)); // Receiver.
971 __ ldr(R1, Address(SP, 0 * kWordSize)); // Value. 970 __ ldr(R1, Address(SP, 0 * kWordSize)); // Value.
972 __ StoreIntoObjectOffset(R0, offset, R1); 971 __ StoreIntoObjectOffset(R0, offset, R1);
973 __ LoadImmediate(R0, reinterpret_cast<intptr_t>(Object::null())); 972 __ LoadObject(R0, Object::null_object());
974 __ Ret(); 973 __ Ret();
975 } 974 }
976 975
977 976
978 void FlowGraphCompiler::EmitFrameEntry() { 977 void FlowGraphCompiler::EmitFrameEntry() {
979 const Function& function = parsed_function().function(); 978 const Function& function = parsed_function().function();
980 if (CanOptimizeFunction() && 979 if (CanOptimizeFunction() &&
981 function.IsOptimizable() && 980 function.IsOptimizable() &&
982 (!is_optimizing() || may_reoptimize())) { 981 (!is_optimizing() || may_reoptimize())) {
983 const Register function_reg = R6; 982 const Register function_reg = R6;
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
1097 1096
1098 // In unoptimized code, initialize (non-argument) stack allocated slots to 1097 // In unoptimized code, initialize (non-argument) stack allocated slots to
1099 // null. 1098 // null.
1100 if (!is_optimizing()) { 1099 if (!is_optimizing()) {
1101 ASSERT(num_locals > 0); // There is always at least context_var. 1100 ASSERT(num_locals > 0); // There is always at least context_var.
1102 __ Comment("Initialize spill slots"); 1101 __ Comment("Initialize spill slots");
1103 const intptr_t slot_base = parsed_function().first_stack_local_index(); 1102 const intptr_t slot_base = parsed_function().first_stack_local_index();
1104 const intptr_t context_index = 1103 const intptr_t context_index =
1105 parsed_function().current_context_var()->index(); 1104 parsed_function().current_context_var()->index();
1106 if (num_locals > 1) { 1105 if (num_locals > 1) {
1107 __ LoadImmediate(R0, reinterpret_cast<intptr_t>(Object::null())); 1106 __ LoadObject(R0, Object::null_object());
1108 } 1107 }
1109 for (intptr_t i = 0; i < num_locals; ++i) { 1108 for (intptr_t i = 0; i < num_locals; ++i) {
1110 // Subtract index i (locals lie at lower addresses than FP). 1109 // Subtract index i (locals lie at lower addresses than FP).
1111 if (((slot_base - i) == context_index)) { 1110 if (((slot_base - i) == context_index)) {
1112 if (function.IsClosureFunction()) { 1111 if (function.IsClosureFunction()) {
1113 __ StoreToOffset(kWord, CTX, FP, (slot_base - i) * kWordSize); 1112 __ StoreToOffset(kWord, CTX, FP, (slot_base - i) * kWordSize);
1114 } else { 1113 } else {
1115 const Context& empty_context = Context::ZoneHandle( 1114 const Context& empty_context = Context::ZoneHandle(
1116 zone(), isolate()->object_store()->empty_context()); 1115 zone(), isolate()->object_store()->empty_context());
1117 __ LoadObject(R1, empty_context); 1116 __ LoadObject(R1, empty_context);
(...skipping 779 matching lines...) Expand 10 before | Expand all | Expand 10 after
1897 DRegister dreg = EvenDRegisterOf(reg); 1896 DRegister dreg = EvenDRegisterOf(reg);
1898 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex)); 1897 __ vldrd(dreg, Address(SP, kDoubleSize, Address::PostIndex));
1899 } 1898 }
1900 1899
1901 1900
1902 #undef __ 1901 #undef __
1903 1902
1904 } // namespace dart 1903 } // namespace dart
1905 1904
1906 #endif // defined TARGET_ARCH_ARM 1905 #endif // defined TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler.cc ('k') | runtime/vm/flow_graph_compiler_mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698