| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/intermediate_language.h" | 5 #include "vm/intermediate_language.h" |
| 6 | 6 |
| 7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
| 8 #include "vm/dart_entry.h" | 8 #include "vm/dart_entry.h" |
| 9 #include "vm/flow_graph_allocator.h" | 9 #include "vm/flow_graph_allocator.h" |
| 10 #include "vm/flow_graph_builder.h" | 10 #include "vm/flow_graph_builder.h" |
| 11 #include "vm/flow_graph_compiler.h" | 11 #include "vm/flow_graph_compiler.h" |
| 12 #include "vm/flow_graph_optimizer.h" | 12 #include "vm/flow_graph_optimizer.h" |
| 13 #include "vm/locations.h" | 13 #include "vm/locations.h" |
| 14 #include "vm/object.h" | 14 #include "vm/object.h" |
| 15 #include "vm/object_store.h" | 15 #include "vm/object_store.h" |
| 16 #include "vm/os.h" | 16 #include "vm/os.h" |
| 17 #include "vm/scopes.h" | 17 #include "vm/scopes.h" |
| 18 #include "vm/stub_code.h" | 18 #include "vm/stub_code.h" |
| 19 #include "vm/symbols.h" | 19 #include "vm/symbols.h" |
| 20 | 20 |
| 21 namespace dart { | 21 namespace dart { |
| 22 | 22 |
| 23 DEFINE_FLAG(bool, new_identity_spec, true, | 23 DEFINE_FLAG(bool, new_identity_spec, true, |
| 24 "Use new identity check rules for numbers."); | 24 "Use new identity check rules for numbers."); |
| 25 DEFINE_FLAG(bool, propagate_ic_data, true, | 25 DEFINE_FLAG(bool, propagate_ic_data, true, |
| 26 "Propagate IC data from unoptimized to optimized IC calls."); | 26 "Propagate IC data from unoptimized to optimized IC calls."); |
| 27 DECLARE_FLAG(bool, enable_type_checks); | 27 DECLARE_FLAG(bool, enable_type_checks); |
| 28 DECLARE_FLAG(bool, eliminate_type_checks); |
| 28 DECLARE_FLAG(int, max_polymorphic_checks); | 29 DECLARE_FLAG(int, max_polymorphic_checks); |
| 29 DECLARE_FLAG(bool, trace_optimization); | 30 DECLARE_FLAG(bool, trace_optimization); |
| 30 | 31 |
| 31 Definition::Definition() | 32 Definition::Definition() |
| 32 : range_(NULL), | 33 : range_(NULL), |
| 34 type_(NULL), |
| 33 temp_index_(-1), | 35 temp_index_(-1), |
| 34 ssa_temp_index_(-1), | 36 ssa_temp_index_(-1), |
| 35 propagated_type_(AbstractType::Handle()), | |
| 36 propagated_cid_(kIllegalCid), | |
| 37 input_use_list_(NULL), | 37 input_use_list_(NULL), |
| 38 env_use_list_(NULL), | 38 env_use_list_(NULL), |
| 39 use_kind_(kValue), // Phis and parameters rely on this default. | 39 use_kind_(kValue), // Phis and parameters rely on this default. |
| 40 constant_value_(Object::ZoneHandle(ConstantPropagator::Unknown())) { | 40 constant_value_(Object::ZoneHandle(ConstantPropagator::Unknown())) { |
| 41 } | 41 } |
| 42 | 42 |
| 43 | 43 |
| 44 intptr_t Instruction::Hashcode() const { | 44 intptr_t Instruction::Hashcode() const { |
| 45 intptr_t result = tag(); | 45 intptr_t result = tag(); |
| 46 for (intptr_t i = 0; i < InputCount(); ++i) { | 46 for (intptr_t i = 0; i < InputCount(); ++i) { |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 139 ASSERT(other_op != NULL); | 139 ASSERT(other_op != NULL); |
| 140 return (op_kind() == other_op->op_kind()) && | 140 return (op_kind() == other_op->op_kind()) && |
| 141 (overflow_ == other_op->overflow_); | 141 (overflow_ == other_op->overflow_); |
| 142 } | 142 } |
| 143 | 143 |
| 144 | 144 |
| 145 bool LoadFieldInstr::AttributesEqual(Instruction* other) const { | 145 bool LoadFieldInstr::AttributesEqual(Instruction* other) const { |
| 146 LoadFieldInstr* other_load = other->AsLoadField(); | 146 LoadFieldInstr* other_load = other->AsLoadField(); |
| 147 ASSERT(other_load != NULL); | 147 ASSERT(other_load != NULL); |
| 148 ASSERT((offset_in_bytes() != other_load->offset_in_bytes()) || | 148 ASSERT((offset_in_bytes() != other_load->offset_in_bytes()) || |
| 149 ((immutable_ == other_load->immutable_) && | 149 ((immutable_ == other_load->immutable_))); |
| 150 ((ResultCid() == other_load->ResultCid()) || | |
| 151 (ResultCid() == kDynamicCid) || | |
| 152 (other_load->ResultCid() == kDynamicCid)))); | |
| 153 return offset_in_bytes() == other_load->offset_in_bytes(); | 150 return offset_in_bytes() == other_load->offset_in_bytes(); |
| 154 } | 151 } |
| 155 | 152 |
| 156 | 153 |
| 157 bool LoadStaticFieldInstr::AttributesEqual(Instruction* other) const { | 154 bool LoadStaticFieldInstr::AttributesEqual(Instruction* other) const { |
| 158 LoadStaticFieldInstr* other_load = other->AsLoadStaticField(); | 155 LoadStaticFieldInstr* other_load = other->AsLoadStaticField(); |
| 159 ASSERT(other_load != NULL); | 156 ASSERT(other_load != NULL); |
| 160 // Assert that the field is initialized. | 157 // Assert that the field is initialized. |
| 161 ASSERT(field().value() != Object::sentinel().raw()); | 158 ASSERT(field().value() != Object::sentinel().raw()); |
| 162 ASSERT(field().value() != Object::transition_sentinel().raw()); | 159 ASSERT(field().value() != Object::transition_sentinel().raw()); |
| (...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 400 ForwardInstructionIterator it(entry); | 397 ForwardInstructionIterator it(entry); |
| 401 current_iterator_ = ⁢ | 398 current_iterator_ = ⁢ |
| 402 for (; !it.Done(); it.Advance()) { | 399 for (; !it.Done(); it.Advance()) { |
| 403 it.Current()->Accept(this); | 400 it.Current()->Accept(this); |
| 404 } | 401 } |
| 405 current_iterator_ = NULL; | 402 current_iterator_ = NULL; |
| 406 } | 403 } |
| 407 } | 404 } |
| 408 | 405 |
| 409 | 406 |
| 410 // TODO(regis): Support a set of compile types for the given value. | 407 bool Value::NeedsStoreBuffer() { |
| 411 bool Value::CanComputeIsNull(bool* is_null) const { | 408 if (Type()->IsNull() || |
| 412 ASSERT(is_null != NULL); | 409 (Type()->ToNullableCid() == kSmiCid) || |
| 413 // For now, we can only return a meaningful result if the value is constant. | 410 (Type()->ToNullableCid() == kBoolCid)) { |
| 414 if (!BindsToConstant()) { | |
| 415 return false; | 411 return false; |
| 416 } | 412 } |
| 417 | 413 |
| 418 // Return true if the constant value is Object::null. | |
| 419 if (BindsToConstantNull()) { | |
| 420 *is_null = true; | |
| 421 return true; | |
| 422 } | |
| 423 | |
| 424 // Consider the compile type of the value to check for sentinels, which are | |
| 425 // also treated as null. | |
| 426 const AbstractType& compile_type = AbstractType::Handle(CompileType()); | |
| 427 ASSERT(!compile_type.IsMalformed()); | |
| 428 ASSERT(!compile_type.IsVoidType()); | |
| 429 | |
| 430 // There are only three instances that can be of type Null: | |
| 431 // Object::null(), Object::sentinel(), and Object::transition_sentinel(). | |
| 432 // The inline code and run time code performing the type check will only | |
| 433 // encounter the 2 sentinel values if type check elimination was disabled. | |
| 434 // Otherwise, the type check of a sentinel value will be eliminated here, | |
| 435 // because these sentinel values can only be encountered as constants, never | |
| 436 // as actual value of a heap object being type checked. | |
| 437 if (compile_type.IsNullType()) { | |
| 438 *is_null = true; | |
| 439 return true; | |
| 440 } | |
| 441 | |
| 442 return false; | |
| 443 } | |
| 444 | |
| 445 | |
| 446 // TODO(regis): Support a set of compile types for the given value. | |
| 447 bool Value::CanComputeIsInstanceOf(const AbstractType& type, | |
| 448 bool* is_instance) const { | |
| 449 ASSERT(is_instance != NULL); | |
| 450 // We cannot give an answer if the given type is malformed. | |
| 451 if (type.IsMalformed()) { | |
| 452 return false; | |
| 453 } | |
| 454 | |
| 455 // We should never test for an instance of null. | |
| 456 ASSERT(!type.IsNullType()); | |
| 457 | |
| 458 // Consider the compile type of the value. | |
| 459 const AbstractType& compile_type = AbstractType::Handle(CompileType()); | |
| 460 if (compile_type.IsMalformed()) { | |
| 461 return false; | |
| 462 } | |
| 463 | |
| 464 // If the compile type of the value is void, we are type checking the result | |
| 465 // of a void function, which was checked to be null at the return statement | |
| 466 // inside the function. | |
| 467 if (compile_type.IsVoidType()) { | |
| 468 ASSERT(FLAG_enable_type_checks); | |
| 469 *is_instance = true; | |
| 470 return true; | |
| 471 } | |
| 472 | |
| 473 // The Null type is only a subtype of Object and of dynamic. | |
| 474 // Functions that do not explicitly return a value, implicitly return null, | |
| 475 // except generative constructors, which return the object being constructed. | |
| 476 // It is therefore acceptable for void functions to return null. | |
| 477 if (compile_type.IsNullType()) { | |
| 478 *is_instance = | |
| 479 type.IsObjectType() || type.IsDynamicType() || type.IsVoidType(); | |
| 480 return true; | |
| 481 } | |
| 482 | |
| 483 // Until we support a set of compile types, we can only give answers for | |
| 484 // constant values. Indeed, a variable of the proper compile time type may | |
| 485 // still hold null at run time and therefore fail the test. | |
| 486 if (!BindsToConstant()) { | |
| 487 return false; | |
| 488 } | |
| 489 | |
| 490 // A non-null constant is not an instance of void. | |
| 491 if (type.IsVoidType()) { | |
| 492 *is_instance = false; | |
| 493 return true; | |
| 494 } | |
| 495 | |
| 496 // Since the value is a constant, its type is instantiated. | |
| 497 ASSERT(compile_type.IsInstantiated()); | |
| 498 | |
| 499 // The run time type of the value is guaranteed to be a subtype of the | |
| 500 // compile time type of the value. However, establishing here that the | |
| 501 // compile time type is a subtype of the given type does not guarantee that | |
| 502 // the run time type will also be a subtype of the given type, because the | |
| 503 // subtype relation is not transitive when an uninstantiated type is | |
| 504 // involved. | |
| 505 Error& malformed_error = Error::Handle(); | |
| 506 if (type.IsInstantiated()) { | |
| 507 // Perform the test on the compile-time type and provide the answer, unless | |
| 508 // the type test produced a malformed error (e.g. an upper bound error). | |
| 509 *is_instance = compile_type.IsSubtypeOf(type, &malformed_error); | |
| 510 } else { | |
| 511 // However, the 'more specific than' relation is transitive and used here. | |
| 512 // In other words, if the compile type of the value is more specific than | |
| 513 // the given type, the run time type of the value, which is guaranteed to be | |
| 514 // a subtype of the compile type, is also guaranteed to be a subtype of the | |
| 515 // given type. | |
| 516 *is_instance = compile_type.IsMoreSpecificThan(type, &malformed_error); | |
| 517 } | |
| 518 return malformed_error.IsNull(); | |
| 519 } | |
| 520 | |
| 521 | |
| 522 bool Value::NeedsStoreBuffer() const { | |
| 523 const intptr_t cid = ResultCid(); | |
| 524 if ((cid == kSmiCid) || (cid == kBoolCid) || (cid == kNullCid)) { | |
| 525 return false; | |
| 526 } | |
| 527 return !BindsToConstant(); | 414 return !BindsToConstant(); |
| 528 } | 415 } |
| 529 | 416 |
| 530 | 417 |
| 531 RawAbstractType* PhiInstr::CompileType() const { | |
| 532 ASSERT(!HasPropagatedType()); | |
| 533 // Since type propagation has not yet occured, we are reaching this phi via a | |
| 534 // back edge phi input. Return null as compile type so that this input is | |
| 535 // ignored in the first iteration of type propagation. | |
| 536 return AbstractType::null(); | |
| 537 } | |
| 538 | |
| 539 | |
| 540 RawAbstractType* PhiInstr::LeastSpecificInputType() const { | |
| 541 AbstractType& least_specific_type = AbstractType::Handle(); | |
| 542 AbstractType& input_type = AbstractType::Handle(); | |
| 543 for (intptr_t i = 0; i < InputCount(); i++) { | |
| 544 input_type = InputAt(i)->CompileType(); | |
| 545 if (input_type.IsNull()) { | |
| 546 // This input is on a back edge and we are in the first iteration of type | |
| 547 // propagation. Ignore it. | |
| 548 continue; | |
| 549 } | |
| 550 ASSERT(!input_type.IsNull()); | |
| 551 if (least_specific_type.IsNull() || | |
| 552 least_specific_type.IsMoreSpecificThan(input_type, NULL)) { | |
| 553 // Type input_type is less specific than the current least_specific_type. | |
| 554 least_specific_type = input_type.raw(); | |
| 555 } else if (input_type.IsMoreSpecificThan(least_specific_type, NULL)) { | |
| 556 // Type least_specific_type is less specific than input_type. No change. | |
| 557 } else { | |
| 558 // The types are unrelated. No need to continue. | |
| 559 least_specific_type = Type::ObjectType(); | |
| 560 break; | |
| 561 } | |
| 562 } | |
| 563 return least_specific_type.raw(); | |
| 564 } | |
| 565 | |
| 566 | |
| 567 RawAbstractType* ParameterInstr::CompileType() const { | |
| 568 ASSERT(!HasPropagatedType()); | |
| 569 // Note that returning the declared type of the formal parameter would be | |
| 570 // incorrect, because ParameterInstr is used as input to the type check | |
| 571 // verifying the run time type of the passed-in parameter and this check would | |
| 572 // always be wrongly eliminated. | |
| 573 return Type::DynamicType(); | |
| 574 } | |
| 575 | |
| 576 | |
| 577 RawAbstractType* PushArgumentInstr::CompileType() const { | |
| 578 return AbstractType::null(); | |
| 579 } | |
| 580 | |
| 581 | |
| 582 void JoinEntryInstr::AddPredecessor(BlockEntryInstr* predecessor) { | 418 void JoinEntryInstr::AddPredecessor(BlockEntryInstr* predecessor) { |
| 583 // Require the predecessors to be sorted by block_id to make managing | 419 // Require the predecessors to be sorted by block_id to make managing |
| 584 // their corresponding phi inputs simpler. | 420 // their corresponding phi inputs simpler. |
| 585 intptr_t pred_id = predecessor->block_id(); | 421 intptr_t pred_id = predecessor->block_id(); |
| 586 intptr_t index = 0; | 422 intptr_t index = 0; |
| 587 while ((index < predecessors_.length()) && | 423 while ((index < predecessors_.length()) && |
| 588 (predecessors_[index]->block_id() < pred_id)) { | 424 (predecessors_[index]->block_id() < pred_id)) { |
| 589 ++index; | 425 ++index; |
| 590 } | 426 } |
| 591 #if defined(DEBUG) | 427 #if defined(DEBUG) |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 720 | 556 |
| 721 previous()->LinkTo(other); | 557 previous()->LinkTo(other); |
| 722 other->LinkTo(next()); | 558 other->LinkTo(next()); |
| 723 | 559 |
| 724 set_previous(NULL); | 560 set_previous(NULL); |
| 725 set_next(NULL); | 561 set_next(NULL); |
| 726 } | 562 } |
| 727 } | 563 } |
| 728 | 564 |
| 729 | 565 |
| 730 bool Definition::SetPropagatedCid(intptr_t cid) { | |
| 731 if (cid == kIllegalCid) { | |
| 732 return false; | |
| 733 } | |
| 734 if (propagated_cid_ == kIllegalCid) { | |
| 735 // First setting, nothing has changed. | |
| 736 propagated_cid_ = cid; | |
| 737 return false; | |
| 738 } | |
| 739 bool has_changed = (propagated_cid_ != cid); | |
| 740 propagated_cid_ = cid; | |
| 741 return has_changed; | |
| 742 } | |
| 743 | |
| 744 | |
| 745 intptr_t Definition::GetPropagatedCid() { | |
| 746 if (has_propagated_cid()) return propagated_cid(); | |
| 747 intptr_t cid = ResultCid(); | |
| 748 ASSERT(cid != kIllegalCid); | |
| 749 SetPropagatedCid(cid); | |
| 750 return cid; | |
| 751 } | |
| 752 | |
| 753 | |
| 754 intptr_t PhiInstr::GetPropagatedCid() { | |
| 755 return propagated_cid(); | |
| 756 } | |
| 757 | |
| 758 | |
| 759 intptr_t ParameterInstr::GetPropagatedCid() { | |
| 760 return propagated_cid(); | |
| 761 } | |
| 762 | |
| 763 | |
| 764 intptr_t AssertAssignableInstr::GetPropagatedCid() { | |
| 765 return propagated_cid(); | |
| 766 } | |
| 767 | |
| 768 | |
| 769 // ==== Postorder graph traversal. | 566 // ==== Postorder graph traversal. |
| 770 static bool IsMarked(BlockEntryInstr* block, | 567 static bool IsMarked(BlockEntryInstr* block, |
| 771 GrowableArray<BlockEntryInstr*>* preorder) { | 568 GrowableArray<BlockEntryInstr*>* preorder) { |
| 772 // Detect that a block has been visited as part of the current | 569 // Detect that a block has been visited as part of the current |
| 773 // DiscoverBlocks (we can call DiscoverBlocks multiple times). The block | 570 // DiscoverBlocks (we can call DiscoverBlocks multiple times). The block |
| 774 // will be 'marked' by (1) having a preorder number in the range of the | 571 // will be 'marked' by (1) having a preorder number in the range of the |
| 775 // preorder array and (2) being in the preorder array at that index. | 572 // preorder array and (2) being in the preorder array at that index. |
| 776 intptr_t i = block->preorder_number(); | 573 intptr_t i = block->preorder_number(); |
| 777 return (i >= 0) && (i < preorder->length()) && ((*preorder)[i] == block); | 574 return (i >= 0) && (i < preorder->length()) && ((*preorder)[i] == block); |
| 778 } | 575 } |
| (...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1020 ASSERT(index == 0); | 817 ASSERT(index == 0); |
| 1021 return successor(); | 818 return successor(); |
| 1022 } | 819 } |
| 1023 | 820 |
| 1024 | 821 |
| 1025 void Instruction::Goto(JoinEntryInstr* entry) { | 822 void Instruction::Goto(JoinEntryInstr* entry) { |
| 1026 LinkTo(new GotoInstr(entry)); | 823 LinkTo(new GotoInstr(entry)); |
| 1027 } | 824 } |
| 1028 | 825 |
| 1029 | 826 |
| 1030 RawAbstractType* Value::CompileType() const { | |
| 1031 if (definition()->HasPropagatedType()) { | |
| 1032 return definition()->PropagatedType(); | |
| 1033 } | |
| 1034 // The compile type may be requested when building the flow graph, i.e. before | |
| 1035 // type propagation has occurred. To avoid repeatedly computing the compile | |
| 1036 // type of the definition, we store it as initial propagated type. | |
| 1037 AbstractType& type = AbstractType::Handle(definition()->CompileType()); | |
| 1038 definition()->SetPropagatedType(type); | |
| 1039 return type.raw(); | |
| 1040 } | |
| 1041 | |
| 1042 | |
| 1043 intptr_t Value::ResultCid() const { | |
| 1044 if (reaching_cid() == kIllegalCid) { | |
| 1045 return definition()->GetPropagatedCid(); | |
| 1046 } | |
| 1047 return reaching_cid(); | |
| 1048 } | |
| 1049 | |
| 1050 | |
| 1051 | |
| 1052 RawAbstractType* ConstantInstr::CompileType() const { | |
| 1053 if (value().IsNull()) { | |
| 1054 return Type::NullType(); | |
| 1055 } | |
| 1056 if (value().IsInstance()) { | |
| 1057 return Instance::Cast(value()).GetType(); | |
| 1058 } else { | |
| 1059 ASSERT(value().IsAbstractTypeArguments()); | |
| 1060 return AbstractType::null(); | |
| 1061 } | |
| 1062 } | |
| 1063 | |
| 1064 | |
| 1065 intptr_t ConstantInstr::ResultCid() const { | |
| 1066 if (value().IsNull()) { | |
| 1067 return kNullCid; | |
| 1068 } | |
| 1069 if (value().IsInstance()) { | |
| 1070 return Class::Handle(value().clazz()).id(); | |
| 1071 } else { | |
| 1072 ASSERT(value().IsAbstractTypeArguments()); | |
| 1073 return kDynamicCid; | |
| 1074 } | |
| 1075 } | |
| 1076 | |
| 1077 | |
| 1078 RawAbstractType* AssertAssignableInstr::CompileType() const { | |
| 1079 const AbstractType& value_compile_type = | |
| 1080 AbstractType::Handle(value()->CompileType()); | |
| 1081 if (!value_compile_type.IsNull() && | |
| 1082 value_compile_type.IsMoreSpecificThan(dst_type(), NULL)) { | |
| 1083 return value_compile_type.raw(); | |
| 1084 } | |
| 1085 return dst_type().raw(); | |
| 1086 } | |
| 1087 | |
| 1088 | |
| 1089 RawAbstractType* AssertBooleanInstr::CompileType() const { | |
| 1090 return Type::BoolType(); | |
| 1091 } | |
| 1092 | |
| 1093 | |
| 1094 RawAbstractType* ArgumentDefinitionTestInstr::CompileType() const { | |
| 1095 return Type::BoolType(); | |
| 1096 } | |
| 1097 | |
| 1098 | |
| 1099 RawAbstractType* CurrentContextInstr::CompileType() const { | |
| 1100 return AbstractType::null(); | |
| 1101 } | |
| 1102 | |
| 1103 | |
| 1104 RawAbstractType* StoreContextInstr::CompileType() const { | |
| 1105 return AbstractType::null(); | |
| 1106 } | |
| 1107 | |
| 1108 | |
| 1109 RawAbstractType* ClosureCallInstr::CompileType() const { | |
| 1110 // Because of function subtyping rules, the declared return type of a closure | |
| 1111 // call cannot be relied upon for compile type analysis. For example, a | |
| 1112 // function returning dynamic can be assigned to a closure variable declared | |
| 1113 // to return int and may actually return a double at run-time. | |
| 1114 return Type::DynamicType(); | |
| 1115 } | |
| 1116 | |
| 1117 | |
| 1118 RawAbstractType* InstanceCallInstr::CompileType() const { | |
| 1119 // TODO(regis): Return a more specific type than dynamic for recognized | |
| 1120 // combinations of receiver type and method name. | |
| 1121 return Type::DynamicType(); | |
| 1122 } | |
| 1123 | |
| 1124 | |
| 1125 RawAbstractType* PolymorphicInstanceCallInstr::CompileType() const { | |
| 1126 return Type::DynamicType(); | |
| 1127 } | |
| 1128 | |
| 1129 | |
| 1130 RawAbstractType* StaticCallInstr::CompileType() const { | |
| 1131 if (FLAG_enable_type_checks) { | |
| 1132 return function().result_type(); | |
| 1133 } | |
| 1134 return Type::DynamicType(); | |
| 1135 } | |
| 1136 | |
| 1137 | |
| 1138 RawAbstractType* LoadLocalInstr::CompileType() const { | |
| 1139 if (FLAG_enable_type_checks) { | |
| 1140 return local().type().raw(); | |
| 1141 } | |
| 1142 return Type::DynamicType(); | |
| 1143 } | |
| 1144 | |
| 1145 | |
| 1146 RawAbstractType* StoreLocalInstr::CompileType() const { | |
| 1147 return value()->CompileType(); | |
| 1148 } | |
| 1149 | |
| 1150 | |
| 1151 RawAbstractType* StrictCompareInstr::CompileType() const { | |
| 1152 return Type::BoolType(); | |
| 1153 } | |
| 1154 | |
| 1155 | |
| 1156 // Only known == targets return a Boolean. | |
| 1157 RawAbstractType* EqualityCompareInstr::CompileType() const { | |
| 1158 if ((receiver_class_id() == kSmiCid) || | |
| 1159 (receiver_class_id() == kDoubleCid) || | |
| 1160 (receiver_class_id() == kNumberCid)) { | |
| 1161 return Type::BoolType(); | |
| 1162 } | |
| 1163 return Type::DynamicType(); | |
| 1164 } | |
| 1165 | |
| 1166 | |
| 1167 intptr_t EqualityCompareInstr::ResultCid() const { | |
| 1168 if ((receiver_class_id() == kSmiCid) || | |
| 1169 (receiver_class_id() == kDoubleCid) || | |
| 1170 (receiver_class_id() == kNumberCid)) { | |
| 1171 // Known/library equalities that are guaranteed to return Boolean. | |
| 1172 return kBoolCid; | |
| 1173 } | |
| 1174 return kDynamicCid; | |
| 1175 } | |
| 1176 | |
| 1177 | |
| 1178 bool EqualityCompareInstr::IsPolymorphic() const { | 827 bool EqualityCompareInstr::IsPolymorphic() const { |
| 1179 return HasICData() && | 828 return HasICData() && |
| 1180 (ic_data()->NumberOfChecks() > 0) && | 829 (ic_data()->NumberOfChecks() > 0) && |
| 1181 (ic_data()->NumberOfChecks() <= FLAG_max_polymorphic_checks); | 830 (ic_data()->NumberOfChecks() <= FLAG_max_polymorphic_checks); |
| 1182 } | 831 } |
| 1183 | 832 |
| 1184 | 833 |
| 1185 RawAbstractType* RelationalOpInstr::CompileType() const { | |
| 1186 if ((operands_class_id() == kSmiCid) || | |
| 1187 (operands_class_id() == kDoubleCid) || | |
| 1188 (operands_class_id() == kNumberCid)) { | |
| 1189 // Known/library relational ops that are guaranteed to return Boolean. | |
| 1190 return Type::BoolType(); | |
| 1191 } | |
| 1192 return Type::DynamicType(); | |
| 1193 } | |
| 1194 | |
| 1195 | |
| 1196 intptr_t RelationalOpInstr::ResultCid() const { | |
| 1197 if ((operands_class_id() == kSmiCid) || | |
| 1198 (operands_class_id() == kDoubleCid) || | |
| 1199 (operands_class_id() == kNumberCid)) { | |
| 1200 // Known/library relational ops that are guaranteed to return Boolean. | |
| 1201 return kBoolCid; | |
| 1202 } | |
| 1203 return kDynamicCid; | |
| 1204 } | |
| 1205 | |
| 1206 | |
| 1207 RawAbstractType* NativeCallInstr::CompileType() const { | |
| 1208 // The result type of the native function is identical to the result type of | |
| 1209 // the enclosing native Dart function. However, we prefer to check the type | |
| 1210 // of the value returned from the native call. | |
| 1211 return Type::DynamicType(); | |
| 1212 } | |
| 1213 | |
| 1214 | |
| 1215 RawAbstractType* StringFromCharCodeInstr::CompileType() const { | |
| 1216 return Type::StringType(); | |
| 1217 } | |
| 1218 | |
| 1219 | |
| 1220 RawAbstractType* LoadIndexedInstr::CompileType() const { | |
| 1221 switch (class_id_) { | |
| 1222 case kArrayCid: | |
| 1223 case kImmutableArrayCid: | |
| 1224 return Type::DynamicType(); | |
| 1225 case kFloat32ArrayCid : | |
| 1226 case kFloat64ArrayCid : | |
| 1227 return Type::Double(); | |
| 1228 case kInt8ArrayCid: | |
| 1229 case kUint8ArrayCid: | |
| 1230 case kUint8ClampedArrayCid: | |
| 1231 case kExternalUint8ArrayCid: | |
| 1232 case kExternalUint8ClampedArrayCid: | |
| 1233 case kInt16ArrayCid: | |
| 1234 case kUint16ArrayCid: | |
| 1235 case kInt32ArrayCid: | |
| 1236 case kUint32ArrayCid: | |
| 1237 case kOneByteStringCid: | |
| 1238 case kTwoByteStringCid: | |
| 1239 return Type::IntType(); | |
| 1240 default: | |
| 1241 UNIMPLEMENTED(); | |
| 1242 return Type::IntType(); | |
| 1243 } | |
| 1244 } | |
| 1245 | |
| 1246 | |
| 1247 RawAbstractType* StoreIndexedInstr::CompileType() const { | |
| 1248 return AbstractType::null(); | |
| 1249 } | |
| 1250 | |
| 1251 | |
| 1252 RawAbstractType* StoreInstanceFieldInstr::CompileType() const { | |
| 1253 return value()->CompileType(); | |
| 1254 } | |
| 1255 | |
| 1256 | |
| 1257 RawAbstractType* LoadStaticFieldInstr::CompileType() const { | |
| 1258 if (FLAG_enable_type_checks) { | |
| 1259 return field().type(); | |
| 1260 } | |
| 1261 return Type::DynamicType(); | |
| 1262 } | |
| 1263 | |
| 1264 | |
| 1265 RawAbstractType* StoreStaticFieldInstr::CompileType() const { | |
| 1266 return value()->CompileType(); | |
| 1267 } | |
| 1268 | |
| 1269 | |
| 1270 RawAbstractType* BooleanNegateInstr::CompileType() const { | |
| 1271 return Type::BoolType(); | |
| 1272 } | |
| 1273 | |
| 1274 | |
| 1275 RawAbstractType* InstanceOfInstr::CompileType() const { | |
| 1276 return Type::BoolType(); | |
| 1277 } | |
| 1278 | |
| 1279 | |
| 1280 RawAbstractType* CreateArrayInstr::CompileType() const { | |
| 1281 return type().raw(); | |
| 1282 } | |
| 1283 | |
| 1284 | |
| 1285 RawAbstractType* CreateClosureInstr::CompileType() const { | |
| 1286 const Function& fun = function(); | |
| 1287 const Class& signature_class = Class::Handle(fun.signature_class()); | |
| 1288 return signature_class.SignatureType(); | |
| 1289 } | |
| 1290 | |
| 1291 | |
| 1292 RawAbstractType* AllocateObjectInstr::CompileType() const { | |
| 1293 // TODO(regis): Be more specific. | |
| 1294 return Type::DynamicType(); | |
| 1295 } | |
| 1296 | |
| 1297 | |
| 1298 RawAbstractType* AllocateObjectWithBoundsCheckInstr::CompileType() const { | |
| 1299 // TODO(regis): Be more specific. | |
| 1300 return Type::DynamicType(); | |
| 1301 } | |
| 1302 | |
| 1303 | |
| 1304 RawAbstractType* LoadFieldInstr::CompileType() const { | |
| 1305 // Type may be null if the field is a VM field, e.g. context parent. | |
| 1306 // Keep it as null for debug purposes and do not return dynamic in production | |
| 1307 // mode, since misuse of the type would remain undetected. | |
| 1308 if (type().IsNull()) { | |
| 1309 return AbstractType::null(); | |
| 1310 } | |
| 1311 if (FLAG_enable_type_checks) { | |
| 1312 return type().raw(); | |
| 1313 } | |
| 1314 return Type::DynamicType(); | |
| 1315 } | |
| 1316 | |
| 1317 | |
| 1318 RawAbstractType* StoreVMFieldInstr::CompileType() const { | |
| 1319 return value()->CompileType(); | |
| 1320 } | |
| 1321 | |
| 1322 | |
| 1323 RawAbstractType* InstantiateTypeArgumentsInstr::CompileType() const { | |
| 1324 return AbstractType::null(); | |
| 1325 } | |
| 1326 | |
| 1327 | |
| 1328 RawAbstractType* ExtractConstructorTypeArgumentsInstr::CompileType() const { | |
| 1329 return AbstractType::null(); | |
| 1330 } | |
| 1331 | |
| 1332 | |
| 1333 RawAbstractType* ExtractConstructorInstantiatorInstr::CompileType() const { | |
| 1334 return AbstractType::null(); | |
| 1335 } | |
| 1336 | |
| 1337 | |
| 1338 RawAbstractType* AllocateContextInstr::CompileType() const { | |
| 1339 return AbstractType::null(); | |
| 1340 } | |
| 1341 | |
| 1342 | |
| 1343 RawAbstractType* ChainContextInstr::CompileType() const { | |
| 1344 return AbstractType::null(); | |
| 1345 } | |
| 1346 | |
| 1347 | |
| 1348 RawAbstractType* CloneContextInstr::CompileType() const { | |
| 1349 return AbstractType::null(); | |
| 1350 } | |
| 1351 | |
| 1352 | |
| 1353 RawAbstractType* CatchEntryInstr::CompileType() const { | |
| 1354 return AbstractType::null(); | |
| 1355 } | |
| 1356 | |
| 1357 | |
| 1358 RawAbstractType* CheckStackOverflowInstr::CompileType() const { | |
| 1359 return AbstractType::null(); | |
| 1360 } | |
| 1361 | |
| 1362 | |
| 1363 RawAbstractType* BinarySmiOpInstr::CompileType() const { | |
| 1364 return Type::SmiType(); | |
| 1365 } | |
| 1366 | |
| 1367 | |
| 1368 intptr_t BinarySmiOpInstr::ResultCid() const { | |
| 1369 return kSmiCid; | |
| 1370 } | |
| 1371 | |
| 1372 | |
| 1373 bool BinarySmiOpInstr::CanDeoptimize() const { | 834 bool BinarySmiOpInstr::CanDeoptimize() const { |
| 1374 switch (op_kind()) { | 835 switch (op_kind()) { |
| 1375 case Token::kBIT_AND: | 836 case Token::kBIT_AND: |
| 1376 case Token::kBIT_OR: | 837 case Token::kBIT_OR: |
| 1377 case Token::kBIT_XOR: | 838 case Token::kBIT_XOR: |
| 1378 return false; | 839 return false; |
| 1379 case Token::kSHR: { | 840 case Token::kSHR: { |
| 1380 // Can't deopt if shift-count is known positive. | 841 // Can't deopt if shift-count is known positive. |
| 1381 Range* right_range = this->right()->definition()->range(); | 842 Range* right_range = this->right()->definition()->range(); |
| 1382 return (right_range == NULL) | 843 return (right_range == NULL) |
| 1383 || !right_range->IsWithin(0, RangeBoundary::kPlusInfinity); | 844 || !right_range->IsWithin(0, RangeBoundary::kPlusInfinity); |
| 1384 } | 845 } |
| 1385 default: | 846 default: |
| 1386 return overflow_; | 847 return overflow_; |
| 1387 } | 848 } |
| 1388 } | 849 } |
| 1389 | 850 |
| 1390 | 851 |
| 1391 bool BinarySmiOpInstr::RightIsPowerOfTwoConstant() const { | 852 bool BinarySmiOpInstr::RightIsPowerOfTwoConstant() const { |
| 1392 if (!right()->definition()->IsConstant()) return false; | 853 if (!right()->definition()->IsConstant()) return false; |
| 1393 const Object& constant = right()->definition()->AsConstant()->value(); | 854 const Object& constant = right()->definition()->AsConstant()->value(); |
| 1394 if (!constant.IsSmi()) return false; | 855 if (!constant.IsSmi()) return false; |
| 1395 const intptr_t int_value = Smi::Cast(constant).Value(); | 856 const intptr_t int_value = Smi::Cast(constant).Value(); |
| 1396 if (int_value == 0) return false; | 857 if (int_value == 0) return false; |
| 1397 return Utils::IsPowerOfTwo(Utils::Abs(int_value)); | 858 return Utils::IsPowerOfTwo(Utils::Abs(int_value)); |
| 1398 } | 859 } |
| 1399 | 860 |
| 1400 | 861 |
| 1401 RawAbstractType* BinaryMintOpInstr::CompileType() const { | |
| 1402 return Type::IntType(); | |
| 1403 } | |
| 1404 | |
| 1405 | |
| 1406 intptr_t BinaryMintOpInstr::ResultCid() const { | |
| 1407 return kDynamicCid; | |
| 1408 } | |
| 1409 | |
| 1410 | |
| 1411 RawAbstractType* ShiftMintOpInstr::CompileType() const { | |
| 1412 return Type::IntType(); | |
| 1413 } | |
| 1414 | |
| 1415 | |
| 1416 intptr_t ShiftMintOpInstr::ResultCid() const { | |
| 1417 return kDynamicCid; | |
| 1418 } | |
| 1419 | |
| 1420 | |
| 1421 RawAbstractType* UnaryMintOpInstr::CompileType() const { | |
| 1422 return Type::IntType(); | |
| 1423 } | |
| 1424 | |
| 1425 | |
| 1426 intptr_t UnaryMintOpInstr::ResultCid() const { | |
| 1427 return kDynamicCid; | |
| 1428 } | |
| 1429 | |
| 1430 | |
| 1431 RawAbstractType* BinaryDoubleOpInstr::CompileType() const { | |
| 1432 return Type::Double(); | |
| 1433 } | |
| 1434 | |
| 1435 | |
| 1436 intptr_t BinaryDoubleOpInstr::ResultCid() const { | |
| 1437 // The output is not an instance but when it is boxed it becomes double. | |
| 1438 return kDoubleCid; | |
| 1439 } | |
| 1440 | |
| 1441 | |
| 1442 static bool ToIntegerConstant(Value* value, intptr_t* result) { | 862 static bool ToIntegerConstant(Value* value, intptr_t* result) { |
| 1443 if (!value->BindsToConstant()) { | 863 if (!value->BindsToConstant()) { |
| 1444 if (value->definition()->IsUnboxDouble()) { | 864 if (value->definition()->IsUnboxDouble()) { |
| 1445 return ToIntegerConstant(value->definition()->AsUnboxDouble()->value(), | 865 return ToIntegerConstant(value->definition()->AsUnboxDouble()->value(), |
| 1446 result); | 866 result); |
| 1447 } | 867 } |
| 1448 | 868 |
| 1449 return false; | 869 return false; |
| 1450 } | 870 } |
| 1451 | 871 |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1592 right(), | 1012 right(), |
| 1593 left()); | 1013 left()); |
| 1594 if (result != NULL) { | 1014 if (result != NULL) { |
| 1595 return result; | 1015 return result; |
| 1596 } | 1016 } |
| 1597 | 1017 |
| 1598 return this; | 1018 return this; |
| 1599 } | 1019 } |
| 1600 | 1020 |
| 1601 | 1021 |
| 1602 RawAbstractType* MathSqrtInstr::CompileType() const { | |
| 1603 return Type::Double(); | |
| 1604 } | |
| 1605 | |
| 1606 | |
| 1607 RawAbstractType* UnboxDoubleInstr::CompileType() const { | |
| 1608 return Type::null(); | |
| 1609 } | |
| 1610 | |
| 1611 | |
| 1612 intptr_t BoxDoubleInstr::ResultCid() const { | |
| 1613 return kDoubleCid; | |
| 1614 } | |
| 1615 | |
| 1616 | |
| 1617 RawAbstractType* BoxDoubleInstr::CompileType() const { | |
| 1618 return Type::Double(); | |
| 1619 } | |
| 1620 | |
| 1621 | |
| 1622 intptr_t BoxIntegerInstr::ResultCid() const { | |
| 1623 return kDynamicCid; | |
| 1624 } | |
| 1625 | |
| 1626 | |
| 1627 RawAbstractType* BoxIntegerInstr::CompileType() const { | |
| 1628 return Type::IntType(); | |
| 1629 } | |
| 1630 | |
| 1631 | |
| 1632 intptr_t UnboxIntegerInstr::ResultCid() const { | |
| 1633 return kDynamicCid; | |
| 1634 } | |
| 1635 | |
| 1636 | |
| 1637 RawAbstractType* UnboxIntegerInstr::CompileType() const { | |
| 1638 return Type::null(); | |
| 1639 } | |
| 1640 | |
| 1641 | |
| 1642 RawAbstractType* UnarySmiOpInstr::CompileType() const { | |
| 1643 return Type::SmiType(); | |
| 1644 } | |
| 1645 | |
| 1646 | |
| 1647 RawAbstractType* SmiToDoubleInstr::CompileType() const { | |
| 1648 return Type::Double(); | |
| 1649 } | |
| 1650 | |
| 1651 | |
| 1652 RawAbstractType* DoubleToIntegerInstr::CompileType() const { | |
| 1653 return Type::IntType(); | |
| 1654 } | |
| 1655 | |
| 1656 | |
| 1657 RawAbstractType* DoubleToSmiInstr::CompileType() const { | |
| 1658 return Type::SmiType(); | |
| 1659 } | |
| 1660 | |
| 1661 | |
| 1662 RawAbstractType* DoubleToDoubleInstr::CompileType() const { | |
| 1663 return Type::Double(); | |
| 1664 } | |
| 1665 | |
| 1666 | |
| 1667 RawAbstractType* InvokeMathCFunctionInstr::CompileType() const { | |
| 1668 return Type::Double(); | |
| 1669 } | |
| 1670 | |
| 1671 | |
| 1672 RawAbstractType* CheckClassInstr::CompileType() const { | |
| 1673 return AbstractType::null(); | |
| 1674 } | |
| 1675 | |
| 1676 | |
| 1677 RawAbstractType* CheckSmiInstr::CompileType() const { | |
| 1678 return AbstractType::null(); | |
| 1679 } | |
| 1680 | |
| 1681 | |
| 1682 RawAbstractType* CheckArrayBoundInstr::CompileType() const { | |
| 1683 return AbstractType::null(); | |
| 1684 } | |
| 1685 | |
| 1686 | |
| 1687 RawAbstractType* CheckEitherNonSmiInstr::CompileType() const { | |
| 1688 return AbstractType::null(); | |
| 1689 } | |
| 1690 | |
| 1691 | |
| 1692 // Optimizations that eliminate or simplify individual instructions. | 1022 // Optimizations that eliminate or simplify individual instructions. |
| 1693 Instruction* Instruction::Canonicalize(FlowGraphOptimizer* optimizer) { | 1023 Instruction* Instruction::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1694 return this; | 1024 return this; |
| 1695 } | 1025 } |
| 1696 | 1026 |
| 1697 | 1027 |
| 1698 Definition* Definition::Canonicalize(FlowGraphOptimizer* optimizer) { | 1028 Definition* Definition::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1699 return this; | 1029 return this; |
| 1700 } | 1030 } |
| 1701 | 1031 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1745 | 1075 |
| 1746 Definition* LoadFieldInstr::Canonicalize(FlowGraphOptimizer* optimizer) { | 1076 Definition* LoadFieldInstr::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1747 if (!IsImmutableLengthLoad()) return this; | 1077 if (!IsImmutableLengthLoad()) return this; |
| 1748 | 1078 |
| 1749 // For fixed length arrays if the array is the result of a known constructor | 1079 // For fixed length arrays if the array is the result of a known constructor |
| 1750 // call we can replace the length load with the length argument passed to | 1080 // call we can replace the length load with the length argument passed to |
| 1751 // the constructor. | 1081 // the constructor. |
| 1752 StaticCallInstr* call = value()->definition()->AsStaticCall(); | 1082 StaticCallInstr* call = value()->definition()->AsStaticCall(); |
| 1753 if (call != NULL && | 1083 if (call != NULL && |
| 1754 call->is_known_constructor() && | 1084 call->is_known_constructor() && |
| 1755 call->ResultCid() == kArrayCid) { | 1085 (call->Type()->ToCid() == kArrayCid)) { |
| 1756 return call->ArgumentAt(1)->value()->definition(); | 1086 return call->ArgumentAt(1)->value()->definition(); |
| 1757 } | 1087 } |
| 1758 return this; | 1088 return this; |
| 1759 } | 1089 } |
| 1760 | 1090 |
| 1761 | 1091 |
| 1762 Definition* AssertBooleanInstr::Canonicalize(FlowGraphOptimizer* optimizer) { | 1092 Definition* AssertBooleanInstr::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1763 const intptr_t value_cid = value()->ResultCid(); | 1093 if (FLAG_eliminate_type_checks && (value()->Type()->ToCid() == kBoolCid)) { |
| 1764 return (value_cid == kBoolCid) ? value()->definition() : this; | 1094 return value()->definition(); |
| 1095 } |
| 1096 |
| 1097 return this; |
| 1765 } | 1098 } |
| 1766 | 1099 |
| 1767 | 1100 |
| 1768 Definition* AssertAssignableInstr::Canonicalize(FlowGraphOptimizer* optimizer) { | 1101 Definition* AssertAssignableInstr::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1769 // (1) Replace the assert with its input if the input has a known compatible | 1102 if (FLAG_eliminate_type_checks && |
| 1770 // class-id. The class-ids handled here are those that are known to be | 1103 value()->Type()->IsAssignableTo(dst_type())) { |
| 1771 // results of IL instructions. | 1104 return value()->definition(); |
| 1772 intptr_t cid = value()->ResultCid(); | |
| 1773 bool is_redundant = false; | |
| 1774 if (dst_type().IsIntType()) { | |
| 1775 is_redundant = (cid == kSmiCid) || (cid == kMintCid); | |
| 1776 } else if (dst_type().IsDoubleType()) { | |
| 1777 is_redundant = (cid == kDoubleCid); | |
| 1778 } else if (dst_type().IsBoolType()) { | |
| 1779 is_redundant = (cid == kBoolCid); | |
| 1780 } | |
| 1781 if (is_redundant) return value()->definition(); | |
| 1782 | |
| 1783 // (2) Replace the assert with its input if the input is the result of a | |
| 1784 // compatible assert itself. | |
| 1785 AssertAssignableInstr* check = value()->definition()->AsAssertAssignable(); | |
| 1786 if ((check != NULL) && check->dst_type().Equals(dst_type())) { | |
| 1787 // TODO(fschneider): Propagate type-assertions across phi-nodes. | |
| 1788 // TODO(fschneider): Eliminate more asserts with subtype relation. | |
| 1789 return check; | |
| 1790 } | 1105 } |
| 1791 | 1106 |
| 1792 // (3) For uninstantiated target types: If the instantiator type arguments | 1107 // (3) For uninstantiated target types: If the instantiator type arguments |
| 1793 // are constant, instantiate the target type here. | 1108 // are constant, instantiate the target type here. |
| 1794 if (dst_type().IsInstantiated()) return this; | 1109 if (dst_type().IsInstantiated()) return this; |
| 1795 | 1110 |
| 1796 ConstantInstr* constant_type_args = | 1111 ConstantInstr* constant_type_args = |
| 1797 instantiator_type_arguments()->definition()->AsConstant(); | 1112 instantiator_type_arguments()->definition()->AsConstant(); |
| 1798 if (constant_type_args != NULL && | 1113 if (constant_type_args != NULL && |
| 1799 !constant_type_args->value().IsNull() && | 1114 !constant_type_args->value().IsNull() && |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1852 | 1167 |
| 1853 | 1168 |
| 1854 Definition* StrictCompareInstr::Canonicalize(FlowGraphOptimizer* optimizer) { | 1169 Definition* StrictCompareInstr::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1855 if (!right()->BindsToConstant()) return this; | 1170 if (!right()->BindsToConstant()) return this; |
| 1856 const Object& right_constant = right()->BoundConstant(); | 1171 const Object& right_constant = right()->BoundConstant(); |
| 1857 Definition* left_defn = left()->definition(); | 1172 Definition* left_defn = left()->definition(); |
| 1858 // TODO(fschneider): Handle other cases: e === false and e !== true/false. | 1173 // TODO(fschneider): Handle other cases: e === false and e !== true/false. |
| 1859 // Handles e === true. | 1174 // Handles e === true. |
| 1860 if ((kind() == Token::kEQ_STRICT) && | 1175 if ((kind() == Token::kEQ_STRICT) && |
| 1861 (right_constant.raw() == Bool::True().raw()) && | 1176 (right_constant.raw() == Bool::True().raw()) && |
| 1862 (left()->ResultCid() == kBoolCid)) { | 1177 (left()->Type()->ToCid() == kBoolCid)) { |
| 1863 // Return left subexpression as the replacement for this instruction. | 1178 // Return left subexpression as the replacement for this instruction. |
| 1864 return left_defn; | 1179 return left_defn; |
| 1865 } | 1180 } |
| 1866 return this; | 1181 return this; |
| 1867 } | 1182 } |
| 1868 | 1183 |
| 1869 | 1184 |
| 1870 Instruction* CheckClassInstr::Canonicalize(FlowGraphOptimizer* optimizer) { | 1185 Instruction* CheckClassInstr::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1871 const intptr_t value_cid = value()->ResultCid(); | 1186 // TODO(vegorov): Replace class checks with null checks when ToNullableCid |
| 1187 // matches. |
| 1188 |
| 1189 const intptr_t value_cid = value()->Type()->ToCid(); |
| 1872 if (value_cid == kDynamicCid) { | 1190 if (value_cid == kDynamicCid) { |
| 1873 return this; | 1191 return this; |
| 1874 } | 1192 } |
| 1875 | 1193 |
| 1876 const intptr_t num_checks = unary_checks().NumberOfChecks(); | 1194 const intptr_t num_checks = unary_checks().NumberOfChecks(); |
| 1877 | 1195 |
| 1878 for (intptr_t i = 0; i < num_checks; i++) { | 1196 for (intptr_t i = 0; i < num_checks; i++) { |
| 1879 if (value_cid == unary_checks().GetReceiverClassIdAt(i)) { | 1197 if (value_cid == unary_checks().GetReceiverClassIdAt(i)) { |
| 1880 // No checks needed. | 1198 // No checks needed. |
| 1881 return NULL; | 1199 return NULL; |
| 1882 } | 1200 } |
| 1883 } | 1201 } |
| 1884 | 1202 |
| 1885 return this; | 1203 return this; |
| 1886 } | 1204 } |
| 1887 | 1205 |
| 1888 | 1206 |
| 1889 Instruction* CheckSmiInstr::Canonicalize(FlowGraphOptimizer* optimizer) { | 1207 Instruction* CheckSmiInstr::Canonicalize(FlowGraphOptimizer* optimizer) { |
| 1890 return (value()->ResultCid() == kSmiCid) ? NULL : this; | 1208 return (value()->Type()->ToCid() == kSmiCid) ? NULL : this; |
| 1891 } | 1209 } |
| 1892 | 1210 |
| 1893 | 1211 |
| 1894 Instruction* CheckEitherNonSmiInstr::Canonicalize( | 1212 Instruction* CheckEitherNonSmiInstr::Canonicalize( |
| 1895 FlowGraphOptimizer* optimizer) { | 1213 FlowGraphOptimizer* optimizer) { |
| 1896 if ((left()->ResultCid() == kDoubleCid) || | 1214 if ((left()->Type()->ToCid() == kDoubleCid) || |
| 1897 (right()->ResultCid() == kDoubleCid)) { | 1215 (right()->Type()->ToCid() == kDoubleCid)) { |
| 1898 return NULL; // Remove from the graph. | 1216 return NULL; // Remove from the graph. |
| 1899 } | 1217 } |
| 1900 return this; | 1218 return this; |
| 1901 } | 1219 } |
| 1902 | 1220 |
| 1903 | 1221 |
| 1904 // Shared code generation methods (EmitNativeCode, MakeLocationSummary, and | 1222 // Shared code generation methods (EmitNativeCode, MakeLocationSummary, and |
| 1905 // PrepareEntry). Only assembly code that can be shared across all architectures | 1223 // PrepareEntry). Only assembly code that can be shared across all architectures |
| 1906 // can be used. Machine specific register allocation and code generation | 1224 // can be used. Machine specific register allocation and code generation |
| 1907 // is located in intermediate_language_<arch>.cc | 1225 // is located in intermediate_language_<arch>.cc |
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2106 token_pos(), | 1424 token_pos(), |
| 2107 function(), | 1425 function(), |
| 2108 ArgumentCount(), | 1426 ArgumentCount(), |
| 2109 argument_names(), | 1427 argument_names(), |
| 2110 locs()); | 1428 locs()); |
| 2111 __ Bind(&skip_call); | 1429 __ Bind(&skip_call); |
| 2112 } | 1430 } |
| 2113 | 1431 |
| 2114 | 1432 |
| 2115 void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1433 void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2116 if (!is_eliminated()) { | 1434 compiler->GenerateAssertAssignable(token_pos(), |
| 2117 compiler->GenerateAssertAssignable(token_pos(), | 1435 deopt_id(), |
| 2118 deopt_id(), | 1436 dst_type(), |
| 2119 dst_type(), | 1437 dst_name(), |
| 2120 dst_name(), | 1438 locs()); |
| 2121 locs()); | |
| 2122 } | |
| 2123 ASSERT(locs()->in(0).reg() == locs()->out().reg()); | 1439 ASSERT(locs()->in(0).reg() == locs()->out().reg()); |
| 2124 } | 1440 } |
| 2125 | 1441 |
| 2126 | 1442 |
| 2127 Environment* Environment::From(const GrowableArray<Definition*>& definitions, | 1443 Environment* Environment::From(const GrowableArray<Definition*>& definitions, |
| 2128 intptr_t fixed_parameter_count, | 1444 intptr_t fixed_parameter_count, |
| 2129 const Function& function) { | 1445 const Function& function) { |
| 2130 Environment* env = | 1446 Environment* env = |
| 2131 new Environment(definitions.length(), | 1447 new Environment(definitions.length(), |
| 2132 fixed_parameter_count, | 1448 fixed_parameter_count, |
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2384 } | 1700 } |
| 2385 | 1701 |
| 2386 const intptr_t max_a = a.UpperBound().Clamp().value(); | 1702 const intptr_t max_a = a.UpperBound().Clamp().value(); |
| 2387 const intptr_t max_b = b.UpperBound().Clamp().value(); | 1703 const intptr_t max_b = b.UpperBound().Clamp().value(); |
| 2388 | 1704 |
| 2389 return RangeBoundary::FromConstant(Utils::Maximum(max_a, max_b)); | 1705 return RangeBoundary::FromConstant(Utils::Maximum(max_a, max_b)); |
| 2390 } | 1706 } |
| 2391 | 1707 |
| 2392 | 1708 |
| 2393 void Definition::InferRange() { | 1709 void Definition::InferRange() { |
| 2394 ASSERT(GetPropagatedCid() == kSmiCid); // Has meaning only for smis. | 1710 ASSERT(Type()->ToCid() == kSmiCid); // Has meaning only for smis. |
| 2395 if (range_ == NULL) { | 1711 if (range_ == NULL) { |
| 2396 range_ = Range::Unknown(); | 1712 range_ = Range::Unknown(); |
| 2397 } | 1713 } |
| 2398 } | 1714 } |
| 2399 | 1715 |
| 2400 | 1716 |
| 2401 void ConstantInstr::InferRange() { | 1717 void ConstantInstr::InferRange() { |
| 2402 ASSERT(value_.IsSmi()); | 1718 ASSERT(value_.IsSmi()); |
| 2403 if (range_ == NULL) { | 1719 if (range_ == NULL) { |
| 2404 intptr_t value = Smi::Cast(value_).Value(); | 1720 intptr_t value = Smi::Cast(value_).Value(); |
| (...skipping 463 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2868 default: | 2184 default: |
| 2869 UNREACHABLE(); | 2185 UNREACHABLE(); |
| 2870 } | 2186 } |
| 2871 return kPowRuntimeEntry; | 2187 return kPowRuntimeEntry; |
| 2872 } | 2188 } |
| 2873 | 2189 |
| 2874 | 2190 |
| 2875 #undef __ | 2191 #undef __ |
| 2876 | 2192 |
| 2877 } // namespace dart | 2193 } // namespace dart |
| OLD | NEW |