| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
| 6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
| 7 | 7 |
| 8 #include "vm/flow_graph_compiler.h" | 8 #include "vm/flow_graph_compiler.h" |
| 9 | 9 |
| 10 #include "vm/ast_printer.h" | 10 #include "vm/ast_printer.h" |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 223 __ Comment("InstantiatedTypeWithArgumentsTest"); | 223 __ Comment("InstantiatedTypeWithArgumentsTest"); |
| 224 ASSERT(type.IsInstantiated()); | 224 ASSERT(type.IsInstantiated()); |
| 225 const Class& type_class = Class::ZoneHandle(type.type_class()); | 225 const Class& type_class = Class::ZoneHandle(type.type_class()); |
| 226 ASSERT((type_class.NumTypeArguments() > 0) || type_class.IsSignatureClass()); | 226 ASSERT((type_class.NumTypeArguments() > 0) || type_class.IsSignatureClass()); |
| 227 const Register kInstanceReg = A0; | 227 const Register kInstanceReg = A0; |
| 228 Error& malformed_error = Error::Handle(); | 228 Error& malformed_error = Error::Handle(); |
| 229 const Type& int_type = Type::Handle(Type::IntType()); | 229 const Type& int_type = Type::Handle(Type::IntType()); |
| 230 const bool smi_is_ok = int_type.IsSubtypeOf(type, &malformed_error); | 230 const bool smi_is_ok = int_type.IsSubtypeOf(type, &malformed_error); |
| 231 // Malformed type should have been handled at graph construction time. | 231 // Malformed type should have been handled at graph construction time. |
| 232 ASSERT(smi_is_ok || malformed_error.IsNull()); | 232 ASSERT(smi_is_ok || malformed_error.IsNull()); |
| 233 __ andi(CMPRES, kInstanceReg, Immediate(kSmiTagMask)); | 233 __ andi(CMPRES1, kInstanceReg, Immediate(kSmiTagMask)); |
| 234 if (smi_is_ok) { | 234 if (smi_is_ok) { |
| 235 __ beq(CMPRES, ZR, is_instance_lbl); | 235 __ beq(CMPRES1, ZR, is_instance_lbl); |
| 236 } else { | 236 } else { |
| 237 __ beq(CMPRES, ZR, is_not_instance_lbl); | 237 __ beq(CMPRES1, ZR, is_not_instance_lbl); |
| 238 } | 238 } |
| 239 const AbstractTypeArguments& type_arguments = | 239 const AbstractTypeArguments& type_arguments = |
| 240 AbstractTypeArguments::ZoneHandle(type.arguments()); | 240 AbstractTypeArguments::ZoneHandle(type.arguments()); |
| 241 const bool is_raw_type = type_arguments.IsNull() || | 241 const bool is_raw_type = type_arguments.IsNull() || |
| 242 type_arguments.IsRaw(type_arguments.Length()); | 242 type_arguments.IsRaw(type_arguments.Length()); |
| 243 // Signature class is an instantiated parameterized type. | 243 // Signature class is an instantiated parameterized type. |
| 244 if (!type_class.IsSignatureClass()) { | 244 if (!type_class.IsSignatureClass()) { |
| 245 if (is_raw_type) { | 245 if (is_raw_type) { |
| 246 const Register kClassIdReg = T0; | 246 const Register kClassIdReg = T0; |
| 247 // dynamic type argument, check only classes. | 247 // dynamic type argument, check only classes. |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 421 FieldAddress(A1, TypeArguments::type_at_offset(type_param.index()))); | 421 FieldAddress(A1, TypeArguments::type_at_offset(type_param.index()))); |
| 422 // R2: concrete type of type. | 422 // R2: concrete type of type. |
| 423 // Check if type argument is dynamic. | 423 // Check if type argument is dynamic. |
| 424 __ BranchEqual(T2, Type::ZoneHandle(Type::DynamicType()), is_instance_lbl); | 424 __ BranchEqual(T2, Type::ZoneHandle(Type::DynamicType()), is_instance_lbl); |
| 425 __ beq(T2, T7, is_instance_lbl); | 425 __ beq(T2, T7, is_instance_lbl); |
| 426 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); | 426 const Type& object_type = Type::ZoneHandle(Type::ObjectType()); |
| 427 __ BranchEqual(T2, object_type, is_instance_lbl); | 427 __ BranchEqual(T2, object_type, is_instance_lbl); |
| 428 | 428 |
| 429 // For Smi check quickly against int and num interfaces. | 429 // For Smi check quickly against int and num interfaces. |
| 430 Label not_smi; | 430 Label not_smi; |
| 431 __ andi(CMPRES, A0, Immediate(kSmiTagMask)); | 431 __ andi(CMPRES1, A0, Immediate(kSmiTagMask)); |
| 432 __ bne(CMPRES, ZR, ¬_smi); // Value is Smi? | 432 __ bne(CMPRES1, ZR, ¬_smi); // Value is Smi? |
| 433 __ BranchEqual(T2, Type::ZoneHandle(Type::IntType()), is_instance_lbl); | 433 __ BranchEqual(T2, Type::ZoneHandle(Type::IntType()), is_instance_lbl); |
| 434 __ BranchEqual(T2, Type::ZoneHandle(Type::Number()), is_instance_lbl); | 434 __ BranchEqual(T2, Type::ZoneHandle(Type::Number()), is_instance_lbl); |
| 435 | 435 |
| 436 // Smi must be handled in runtime. | 436 // Smi must be handled in runtime. |
| 437 __ b(&fall_through); | 437 __ b(&fall_through); |
| 438 | 438 |
| 439 __ Bind(¬_smi); | 439 __ Bind(¬_smi); |
| 440 // T1: instantiator type arguments. | 440 // T1: instantiator type arguments. |
| 441 // A0: instance. | 441 // A0: instance. |
| 442 const Register kInstanceReg = A0; | 442 const Register kInstanceReg = A0; |
| 443 const Register kTypeArgumentsReg = A1; | 443 const Register kTypeArgumentsReg = A1; |
| 444 const Register kTempReg = kNoRegister; | 444 const Register kTempReg = kNoRegister; |
| 445 const SubtypeTestCache& type_test_cache = | 445 const SubtypeTestCache& type_test_cache = |
| 446 SubtypeTestCache::ZoneHandle( | 446 SubtypeTestCache::ZoneHandle( |
| 447 GenerateCallSubtypeTestStub(kTestTypeThreeArgs, | 447 GenerateCallSubtypeTestStub(kTestTypeThreeArgs, |
| 448 kInstanceReg, | 448 kInstanceReg, |
| 449 kTypeArgumentsReg, | 449 kTypeArgumentsReg, |
| 450 kTempReg, | 450 kTempReg, |
| 451 is_instance_lbl, | 451 is_instance_lbl, |
| 452 is_not_instance_lbl)); | 452 is_not_instance_lbl)); |
| 453 __ Bind(&fall_through); | 453 __ Bind(&fall_through); |
| 454 return type_test_cache.raw(); | 454 return type_test_cache.raw(); |
| 455 } | 455 } |
| 456 if (type.IsType()) { | 456 if (type.IsType()) { |
| 457 const Register kInstanceReg = A0; | 457 const Register kInstanceReg = A0; |
| 458 const Register kTypeArgumentsReg = A1; | 458 const Register kTypeArgumentsReg = A1; |
| 459 __ andi(CMPRES, kInstanceReg, Immediate(kSmiTagMask)); | 459 __ andi(CMPRES1, kInstanceReg, Immediate(kSmiTagMask)); |
| 460 __ beq(CMPRES, ZR, is_not_instance_lbl); // Is instance Smi? | 460 __ beq(CMPRES1, ZR, is_not_instance_lbl); // Is instance Smi? |
| 461 __ lw(kTypeArgumentsReg, Address(SP, 0)); // Instantiator type args. | 461 __ lw(kTypeArgumentsReg, Address(SP, 0)); // Instantiator type args. |
| 462 // Uninstantiated type class is known at compile time, but the type | 462 // Uninstantiated type class is known at compile time, but the type |
| 463 // arguments are determined at runtime by the instantiator. | 463 // arguments are determined at runtime by the instantiator. |
| 464 const Register kTempReg = kNoRegister; | 464 const Register kTempReg = kNoRegister; |
| 465 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, | 465 return GenerateCallSubtypeTestStub(kTestTypeThreeArgs, |
| 466 kInstanceReg, | 466 kInstanceReg, |
| 467 kTypeArgumentsReg, | 467 kTypeArgumentsReg, |
| 468 kTempReg, | 468 kTempReg, |
| 469 is_instance_lbl, | 469 is_instance_lbl, |
| 470 is_not_instance_lbl); | 470 is_not_instance_lbl); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 490 __ TraceSimMsg("InlineInstanceof"); | 490 __ TraceSimMsg("InlineInstanceof"); |
| 491 __ Comment("InlineInstanceof"); | 491 __ Comment("InlineInstanceof"); |
| 492 if (type.IsVoidType()) { | 492 if (type.IsVoidType()) { |
| 493 // A non-null value is returned from a void function, which will result in a | 493 // A non-null value is returned from a void function, which will result in a |
| 494 // type error. A null value is handled prior to executing this inline code. | 494 // type error. A null value is handled prior to executing this inline code. |
| 495 return SubtypeTestCache::null(); | 495 return SubtypeTestCache::null(); |
| 496 } | 496 } |
| 497 if (TypeCheckAsClassEquality(type)) { | 497 if (TypeCheckAsClassEquality(type)) { |
| 498 const intptr_t type_cid = Class::Handle(type.type_class()).id(); | 498 const intptr_t type_cid = Class::Handle(type.type_class()).id(); |
| 499 const Register kInstanceReg = A0; | 499 const Register kInstanceReg = A0; |
| 500 __ andi(CMPRES, kInstanceReg, Immediate(kSmiTagMask)); | 500 __ andi(CMPRES1, kInstanceReg, Immediate(kSmiTagMask)); |
| 501 if (type_cid == kSmiCid) { | 501 if (type_cid == kSmiCid) { |
| 502 __ beq(CMPRES, ZR, is_instance_lbl); | 502 __ beq(CMPRES1, ZR, is_instance_lbl); |
| 503 } else { | 503 } else { |
| 504 __ beq(CMPRES, ZR, is_not_instance_lbl); | 504 __ beq(CMPRES1, ZR, is_not_instance_lbl); |
| 505 __ LoadClassId(T0, kInstanceReg); | 505 __ LoadClassId(T0, kInstanceReg); |
| 506 __ BranchEqual(T0, type_cid, is_instance_lbl); | 506 __ BranchEqual(T0, type_cid, is_instance_lbl); |
| 507 } | 507 } |
| 508 __ b(is_not_instance_lbl); | 508 __ b(is_not_instance_lbl); |
| 509 return SubtypeTestCache::null(); | 509 return SubtypeTestCache::null(); |
| 510 } | 510 } |
| 511 if (type.IsInstantiated()) { | 511 if (type.IsInstantiated()) { |
| 512 const Class& type_class = Class::ZoneHandle(type.type_class()); | 512 const Class& type_class = Class::ZoneHandle(type.type_class()); |
| 513 // A class equality check is only applicable with a dst type of a | 513 // A class equality check is only applicable with a dst type of a |
| 514 // non-parameterized class, non-signature class, or with a raw dst type of | 514 // non-parameterized class, non-signature class, or with a raw dst type of |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 665 | 665 |
| 666 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { | 666 if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) { |
| 667 // If type checks are not eliminated during the graph building then | 667 // If type checks are not eliminated during the graph building then |
| 668 // a transition sentinel can be seen here. | 668 // a transition sentinel can be seen here. |
| 669 __ BranchEqual(A0, Object::transition_sentinel(), &is_assignable); | 669 __ BranchEqual(A0, Object::transition_sentinel(), &is_assignable); |
| 670 } | 670 } |
| 671 | 671 |
| 672 // Generate throw new TypeError() if the type is malformed or malbounded. | 672 // Generate throw new TypeError() if the type is malformed or malbounded. |
| 673 if (dst_type.IsMalformed() || dst_type.IsMalbounded()) { | 673 if (dst_type.IsMalformed() || dst_type.IsMalbounded()) { |
| 674 __ addiu(SP, SP, Immediate(-4 * kWordSize)); | 674 __ addiu(SP, SP, Immediate(-4 * kWordSize)); |
| 675 __ LoadObject(TMP1, Object::ZoneHandle()); | 675 __ LoadObject(TMP, Object::ZoneHandle()); |
| 676 __ sw(TMP1, Address(SP, 3 * kWordSize)); // Make room for the result. | 676 __ sw(TMP, Address(SP, 3 * kWordSize)); // Make room for the result. |
| 677 __ sw(A0, Address(SP, 2 * kWordSize)); // Push the source object. | 677 __ sw(A0, Address(SP, 2 * kWordSize)); // Push the source object. |
| 678 __ LoadObject(TMP1, dst_name); | 678 __ LoadObject(TMP, dst_name); |
| 679 __ sw(TMP1, Address(SP, 1 * kWordSize)); // Push the destination name. | 679 __ sw(TMP, Address(SP, 1 * kWordSize)); // Push the destination name. |
| 680 __ LoadObject(TMP1, dst_type); | 680 __ LoadObject(TMP, dst_type); |
| 681 __ sw(TMP1, Address(SP, 0 * kWordSize)); // Push the destination type. | 681 __ sw(TMP, Address(SP, 0 * kWordSize)); // Push the destination type. |
| 682 | 682 |
| 683 GenerateRuntimeCall(token_pos, | 683 GenerateRuntimeCall(token_pos, |
| 684 deopt_id, | 684 deopt_id, |
| 685 kBadTypeErrorRuntimeEntry, | 685 kBadTypeErrorRuntimeEntry, |
| 686 3, | 686 3, |
| 687 locs); | 687 locs); |
| 688 // We should never return here. | 688 // We should never return here. |
| 689 __ break_(0); | 689 __ break_(0); |
| 690 | 690 |
| 691 __ Bind(&is_assignable); // For a null object. | 691 __ Bind(&is_assignable); // For a null object. |
| 692 // Restore instantiator and its type arguments. | 692 // Restore instantiator and its type arguments. |
| 693 __ lw(A1, Address(SP, 0 * kWordSize)); | 693 __ lw(A1, Address(SP, 0 * kWordSize)); |
| 694 __ lw(A2, Address(SP, 1 * kWordSize)); | 694 __ lw(A2, Address(SP, 1 * kWordSize)); |
| 695 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 695 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
| 696 return; | 696 return; |
| 697 } | 697 } |
| 698 | 698 |
| 699 // Generate inline type check, linking to runtime call if not assignable. | 699 // Generate inline type check, linking to runtime call if not assignable. |
| 700 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); | 700 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(); |
| 701 test_cache = GenerateInlineInstanceof(token_pos, dst_type, | 701 test_cache = GenerateInlineInstanceof(token_pos, dst_type, |
| 702 &is_assignable, &runtime_call); | 702 &is_assignable, &runtime_call); |
| 703 | 703 |
| 704 __ Bind(&runtime_call); | 704 __ Bind(&runtime_call); |
| 705 // Load instantiator (A2) and its type arguments (A1). | 705 // Load instantiator (A2) and its type arguments (A1). |
| 706 __ lw(A1, Address(SP, 0 * kWordSize)); | 706 __ lw(A1, Address(SP, 0 * kWordSize)); |
| 707 __ lw(A2, Address(SP, 1 * kWordSize)); | 707 __ lw(A2, Address(SP, 1 * kWordSize)); |
| 708 | 708 |
| 709 __ addiu(SP, SP, Immediate(-7 * kWordSize)); | 709 __ addiu(SP, SP, Immediate(-7 * kWordSize)); |
| 710 __ LoadObject(TMP1, Object::ZoneHandle()); | 710 __ LoadObject(TMP, Object::ZoneHandle()); |
| 711 __ sw(TMP1, Address(SP, 6 * kWordSize)); // Make room for the result. | 711 __ sw(TMP, Address(SP, 6 * kWordSize)); // Make room for the result. |
| 712 __ sw(A0, Address(SP, 5 * kWordSize)); // Push the source object. | 712 __ sw(A0, Address(SP, 5 * kWordSize)); // Push the source object. |
| 713 __ LoadObject(TMP1, dst_type); | 713 __ LoadObject(TMP, dst_type); |
| 714 __ sw(TMP1, Address(SP, 4 * kWordSize)); // Push the type of the destination. | 714 __ sw(TMP, Address(SP, 4 * kWordSize)); // Push the type of the destination. |
| 715 __ sw(A2, Address(SP, 3 * kWordSize)); // Push instantiator. | 715 __ sw(A2, Address(SP, 3 * kWordSize)); // Push instantiator. |
| 716 __ sw(A1, Address(SP, 2 * kWordSize)); // Push type arguments. | 716 __ sw(A1, Address(SP, 2 * kWordSize)); // Push type arguments. |
| 717 __ LoadObject(TMP1, dst_name); | 717 __ LoadObject(TMP, dst_name); |
| 718 __ sw(TMP1, Address(SP, 1 * kWordSize)); // Push the name of the destination. | 718 __ sw(TMP, Address(SP, 1 * kWordSize)); // Push the name of the destination. |
| 719 __ LoadObject(T0, test_cache); | 719 __ LoadObject(T0, test_cache); |
| 720 __ sw(T0, Address(SP, 0 * kWordSize)); | 720 __ sw(T0, Address(SP, 0 * kWordSize)); |
| 721 | 721 |
| 722 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); | 722 GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); |
| 723 // Pop the parameters supplied to the runtime entry. The result of the | 723 // Pop the parameters supplied to the runtime entry. The result of the |
| 724 // type check runtime call is the checked value. | 724 // type check runtime call is the checked value. |
| 725 __ lw(A0, Address(SP, 6 * kWordSize)); | 725 __ lw(A0, Address(SP, 6 * kWordSize)); |
| 726 __ addiu(SP, SP, Immediate(7 * kWordSize)); | 726 __ addiu(SP, SP, Immediate(7 * kWordSize)); |
| 727 | 727 |
| 728 __ Bind(&is_assignable); | 728 __ Bind(&is_assignable); |
| (...skipping 617 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1346 MegamorphicCacheTable* table = Isolate::Current()->megamorphic_cache_table(); | 1346 MegamorphicCacheTable* table = Isolate::Current()->megamorphic_cache_table(); |
| 1347 const String& name = String::Handle(ic_data.target_name()); | 1347 const String& name = String::Handle(ic_data.target_name()); |
| 1348 const Array& arguments_descriptor = | 1348 const Array& arguments_descriptor = |
| 1349 Array::ZoneHandle(ic_data.arguments_descriptor()); | 1349 Array::ZoneHandle(ic_data.arguments_descriptor()); |
| 1350 ASSERT(!arguments_descriptor.IsNull()); | 1350 ASSERT(!arguments_descriptor.IsNull()); |
| 1351 const MegamorphicCache& cache = | 1351 const MegamorphicCache& cache = |
| 1352 MegamorphicCache::ZoneHandle(table->Lookup(name, arguments_descriptor)); | 1352 MegamorphicCache::ZoneHandle(table->Lookup(name, arguments_descriptor)); |
| 1353 Label not_smi, load_cache; | 1353 Label not_smi, load_cache; |
| 1354 __ TraceSimMsg("MegamorphicInstanceCall"); | 1354 __ TraceSimMsg("MegamorphicInstanceCall"); |
| 1355 __ lw(T0, Address(SP, (argument_count - 1) * kWordSize)); | 1355 __ lw(T0, Address(SP, (argument_count - 1) * kWordSize)); |
| 1356 __ andi(CMPRES, T0, Immediate(kSmiTagMask)); | 1356 __ andi(CMPRES1, T0, Immediate(kSmiTagMask)); |
| 1357 __ bne(CMPRES, ZR, ¬_smi); | 1357 __ bne(CMPRES1, ZR, ¬_smi); |
| 1358 __ LoadImmediate(T0, Smi::RawValue(kSmiCid)); | 1358 __ LoadImmediate(T0, Smi::RawValue(kSmiCid)); |
| 1359 __ b(&load_cache); | 1359 __ b(&load_cache); |
| 1360 | 1360 |
| 1361 __ Bind(¬_smi); | 1361 __ Bind(¬_smi); |
| 1362 __ LoadClassId(T0, T0); | 1362 __ LoadClassId(T0, T0); |
| 1363 __ SmiTag(T0); | 1363 __ SmiTag(T0); |
| 1364 | 1364 |
| 1365 // T0: class ID of the receiver (smi). | 1365 // T0: class ID of the receiver (smi). |
| 1366 __ Bind(&load_cache); | 1366 __ Bind(&load_cache); |
| 1367 __ LoadObject(T1, cache); | 1367 __ LoadObject(T1, cache); |
| 1368 __ lw(T2, FieldAddress(T1, MegamorphicCache::buckets_offset())); | 1368 __ lw(T2, FieldAddress(T1, MegamorphicCache::buckets_offset())); |
| 1369 __ lw(T1, FieldAddress(T1, MegamorphicCache::mask_offset())); | 1369 __ lw(T1, FieldAddress(T1, MegamorphicCache::mask_offset())); |
| 1370 // T2: cache buckets array. | 1370 // T2: cache buckets array. |
| 1371 // T1: mask. | 1371 // T1: mask. |
| 1372 __ mov(T3, T0); | 1372 __ mov(T3, T0); |
| 1373 | 1373 |
| 1374 Label loop, update, call_target_function; | 1374 Label loop, update, call_target_function; |
| 1375 __ b(&loop); | 1375 __ b(&loop); |
| 1376 | 1376 |
| 1377 __ Bind(&update); | 1377 __ Bind(&update); |
| 1378 __ addiu(T3, T3, Immediate(Smi::RawValue(1))); | 1378 __ addiu(T3, T3, Immediate(Smi::RawValue(1))); |
| 1379 __ Bind(&loop); | 1379 __ Bind(&loop); |
| 1380 __ and_(T3, T3, T1); | 1380 __ and_(T3, T3, T1); |
| 1381 const intptr_t base = Array::data_offset(); | 1381 const intptr_t base = Array::data_offset(); |
| 1382 // T3 is smi tagged, but table entries are two words, so LSL 2. | 1382 // T3 is smi tagged, but table entries are two words, so LSL 2. |
| 1383 __ sll(TMP1, T3, 2); | 1383 __ sll(TMP, T3, 2); |
| 1384 __ addu(TMP1, T2, TMP1); | 1384 __ addu(TMP, T2, TMP); |
| 1385 __ lw(T4, FieldAddress(TMP, base)); | 1385 __ lw(T4, FieldAddress(TMP, base)); |
| 1386 | 1386 |
| 1387 ASSERT(kIllegalCid == 0); | 1387 ASSERT(kIllegalCid == 0); |
| 1388 __ beq(T4, ZR, &call_target_function); | 1388 __ beq(T4, ZR, &call_target_function); |
| 1389 __ bne(T4, T0, &update); | 1389 __ bne(T4, T0, &update); |
| 1390 | 1390 |
| 1391 __ Bind(&call_target_function); | 1391 __ Bind(&call_target_function); |
| 1392 // Call the target found in the cache. For a class id match, this is a | 1392 // Call the target found in the cache. For a class id match, this is a |
| 1393 // proper target for the given name and arguments descriptor. If the | 1393 // proper target for the given name and arguments descriptor. If the |
| 1394 // illegal class id was found, the target is a cache miss handler that can | 1394 // illegal class id was found, the target is a cache miss handler that can |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1482 | 1482 |
| 1483 void FlowGraphCompiler::EmitEqualityRegConstCompare(Register reg, | 1483 void FlowGraphCompiler::EmitEqualityRegConstCompare(Register reg, |
| 1484 const Object& obj, | 1484 const Object& obj, |
| 1485 bool needs_number_check, | 1485 bool needs_number_check, |
| 1486 intptr_t token_pos) { | 1486 intptr_t token_pos) { |
| 1487 __ TraceSimMsg("EqualityRegConstCompare"); | 1487 __ TraceSimMsg("EqualityRegConstCompare"); |
| 1488 if (needs_number_check && | 1488 if (needs_number_check && |
| 1489 (obj.IsMint() || obj.IsDouble() || obj.IsBigint())) { | 1489 (obj.IsMint() || obj.IsDouble() || obj.IsBigint())) { |
| 1490 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 1490 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
| 1491 __ sw(reg, Address(SP, 1 * kWordSize)); | 1491 __ sw(reg, Address(SP, 1 * kWordSize)); |
| 1492 __ LoadObject(TMP1, obj); | 1492 __ LoadObject(TMP, obj); |
| 1493 __ sw(TMP1, Address(SP, 0 * kWordSize)); | 1493 __ sw(TMP, Address(SP, 0 * kWordSize)); |
| 1494 if (is_optimizing()) { | 1494 if (is_optimizing()) { |
| 1495 __ BranchLinkPatchable( | 1495 __ BranchLinkPatchable( |
| 1496 &StubCode::OptimizedIdenticalWithNumberCheckLabel()); | 1496 &StubCode::OptimizedIdenticalWithNumberCheckLabel()); |
| 1497 } else { | 1497 } else { |
| 1498 __ BranchLinkPatchable( | 1498 __ BranchLinkPatchable( |
| 1499 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); | 1499 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); |
| 1500 } | 1500 } |
| 1501 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, | 1501 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, |
| 1502 Isolate::kNoDeoptId, | 1502 Isolate::kNoDeoptId, |
| 1503 token_pos); | 1503 token_pos); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1524 __ BranchLinkPatchable( | 1524 __ BranchLinkPatchable( |
| 1525 &StubCode::OptimizedIdenticalWithNumberCheckLabel()); | 1525 &StubCode::OptimizedIdenticalWithNumberCheckLabel()); |
| 1526 } else { | 1526 } else { |
| 1527 __ BranchLinkPatchable( | 1527 __ BranchLinkPatchable( |
| 1528 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); | 1528 &StubCode::UnoptimizedIdenticalWithNumberCheckLabel()); |
| 1529 } | 1529 } |
| 1530 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, | 1530 AddCurrentDescriptor(PcDescriptors::kRuntimeCall, |
| 1531 Isolate::kNoDeoptId, | 1531 Isolate::kNoDeoptId, |
| 1532 token_pos); | 1532 token_pos); |
| 1533 __ TraceSimMsg("EqualityRegRegCompare return"); | 1533 __ TraceSimMsg("EqualityRegRegCompare return"); |
| 1534 // Stub returns result in CMPRES. If it is 0, then left and right are equal. | 1534 // Stub returns result in CMPRES1. If it is 0, then left and right are |
| 1535 // equal. |
| 1535 __ lw(right, Address(SP, 0 * kWordSize)); | 1536 __ lw(right, Address(SP, 0 * kWordSize)); |
| 1536 __ lw(left, Address(SP, 1 * kWordSize)); | 1537 __ lw(left, Address(SP, 1 * kWordSize)); |
| 1537 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 1538 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
| 1538 } else { | 1539 } else { |
| 1539 __ slt(CMPRES1, left, right); | 1540 __ slt(CMPRES1, left, right); |
| 1540 __ slt(CMPRES2, right, left); | 1541 __ slt(CMPRES2, right, left); |
| 1541 } | 1542 } |
| 1542 } | 1543 } |
| 1543 | 1544 |
| 1544 | 1545 |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1693 assembler()->LoadImmediate(TMP, 1); | 1694 assembler()->LoadImmediate(TMP, 1); |
| 1694 if (true_condition == NE) { | 1695 if (true_condition == NE) { |
| 1695 assembler()->movf(CMPRES1, ZR); | 1696 assembler()->movf(CMPRES1, ZR); |
| 1696 assembler()->movt(CMPRES1, TMP); | 1697 assembler()->movt(CMPRES1, TMP); |
| 1697 } else { | 1698 } else { |
| 1698 assembler()->movf(CMPRES1, TMP); | 1699 assembler()->movf(CMPRES1, TMP); |
| 1699 assembler()->movt(CMPRES1, ZR); | 1700 assembler()->movt(CMPRES1, ZR); |
| 1700 } | 1701 } |
| 1701 assembler()->mov(CMPRES2, ZR); | 1702 assembler()->mov(CMPRES2, ZR); |
| 1702 | 1703 |
| 1703 // EmitBranchOnCondition expects ordering to be described by CMPRES, CMPRES2. | 1704 // EmitBranchOnCondition expects ordering to be described by CMPRES1, CMPRES2. |
| 1704 branch->EmitBranchOnCondition(this, EQ); | 1705 branch->EmitBranchOnCondition(this, EQ); |
| 1705 } | 1706 } |
| 1706 | 1707 |
| 1707 | 1708 |
| 1708 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, | 1709 void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition, |
| 1709 FpuRegister left, | 1710 FpuRegister left, |
| 1710 FpuRegister right, | 1711 FpuRegister right, |
| 1711 Register result) { | 1712 Register result) { |
| 1712 Label done; | 1713 Label done; |
| 1713 __ Comment("DoubleCompareBool"); | 1714 __ Comment("DoubleCompareBool"); |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1923 } else if (other_move.Blocks(destination)) { | 1924 } else if (other_move.Blocks(destination)) { |
| 1924 moves_[i]->set_src(source); | 1925 moves_[i]->set_src(source); |
| 1925 } | 1926 } |
| 1926 } | 1927 } |
| 1927 } | 1928 } |
| 1928 | 1929 |
| 1929 | 1930 |
| 1930 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, | 1931 void ParallelMoveResolver::MoveMemoryToMemory(const Address& dst, |
| 1931 const Address& src) { | 1932 const Address& src) { |
| 1932 __ TraceSimMsg("ParallelMoveResolver::MoveMemoryToMemory"); | 1933 __ TraceSimMsg("ParallelMoveResolver::MoveMemoryToMemory"); |
| 1933 __ lw(TMP1, src); | 1934 __ lw(TMP, src); |
| 1934 __ sw(TMP1, dst); | 1935 __ sw(TMP, dst); |
| 1935 } | 1936 } |
| 1936 | 1937 |
| 1937 | 1938 |
| 1938 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { | 1939 void ParallelMoveResolver::StoreObject(const Address& dst, const Object& obj) { |
| 1939 __ TraceSimMsg("ParallelMoveResolver::StoreObject"); | 1940 __ TraceSimMsg("ParallelMoveResolver::StoreObject"); |
| 1940 __ LoadObject(TMP1, obj); | 1941 __ LoadObject(TMP, obj); |
| 1941 __ sw(TMP1, dst); | 1942 __ sw(TMP, dst); |
| 1942 } | 1943 } |
| 1943 | 1944 |
| 1944 | 1945 |
| 1945 // Do not call or implement this function. Instead, use the form below that | 1946 // Do not call or implement this function. Instead, use the form below that |
| 1946 // uses an offset from the frame pointer instead of an Address. | 1947 // uses an offset from the frame pointer instead of an Address. |
| 1947 void ParallelMoveResolver::Exchange(Register reg, const Address& mem) { | 1948 void ParallelMoveResolver::Exchange(Register reg, const Address& mem) { |
| 1948 UNREACHABLE(); | 1949 UNREACHABLE(); |
| 1949 } | 1950 } |
| 1950 | 1951 |
| 1951 | 1952 |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1998 __ AddImmediate(SP, kDoubleSize); | 1999 __ AddImmediate(SP, kDoubleSize); |
| 1999 } | 2000 } |
| 2000 | 2001 |
| 2001 | 2002 |
| 2002 #undef __ | 2003 #undef __ |
| 2003 | 2004 |
| 2004 | 2005 |
| 2005 } // namespace dart | 2006 } // namespace dart |
| 2006 | 2007 |
| 2007 #endif // defined TARGET_ARCH_MIPS | 2008 #endif // defined TARGET_ARCH_MIPS |
| OLD | NEW |