OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/object.h" | 5 #include "vm/object.h" |
6 | 6 |
7 #include "include/dart_api.h" | 7 #include "include/dart_api.h" |
8 #include "platform/assert.h" | 8 #include "platform/assert.h" |
9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 7608 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7619 const ExternalTypedData& stream = ExternalTypedData::Handle( | 7619 const ExternalTypedData& stream = ExternalTypedData::Handle( |
7620 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, | 7620 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, |
7621 data, len, Heap::kOld)); | 7621 data, len, Heap::kOld)); |
7622 stream.AddFinalizer(data, DataFinalizer); | 7622 stream.AddFinalizer(data, DataFinalizer); |
7623 const TokenStream& result = TokenStream::Handle(TokenStream::New()); | 7623 const TokenStream& result = TokenStream::Handle(TokenStream::New()); |
7624 result.SetStream(stream); | 7624 result.SetStream(stream); |
7625 return result.raw(); | 7625 return result.raw(); |
7626 } | 7626 } |
7627 | 7627 |
7628 | 7628 |
| 7629 // CompressedTokenMap maps String and LiteralToken keys to Smi values. |
| 7630 // It also supports lookup by Scanner::TokenDescriptor. |
| 7631 class CompressedTokenTraits { |
| 7632 public: |
| 7633 static bool IsMatch(const Scanner::TokenDescriptor& descriptor, |
| 7634 const Object& key) { |
| 7635 if (!key.IsLiteralToken()) { |
| 7636 return false; |
| 7637 } |
| 7638 const LiteralToken& token = LiteralToken::Cast(key); |
| 7639 return (token.literal() == descriptor.literal->raw()) && |
| 7640 (token.kind() == descriptor.kind); |
| 7641 } |
| 7642 |
| 7643 // Only for non-descriptor lookup and table expansion. |
| 7644 static bool IsMatch(const Object& a, const Object& b) { |
| 7645 return a.raw() == b.raw(); |
| 7646 } |
| 7647 |
| 7648 static uword Hash(const Scanner::TokenDescriptor& descriptor) { |
| 7649 return descriptor.literal->Hash(); |
| 7650 } |
| 7651 |
| 7652 static uword Hash(const Object& key) { |
| 7653 if (key.IsLiteralToken()) { |
| 7654 return String::HashRawSymbol(LiteralToken::Cast(key).literal()); |
| 7655 } else { |
| 7656 return String::Cast(key).Hash(); |
| 7657 } |
| 7658 } |
| 7659 }; |
| 7660 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap; |
| 7661 |
| 7662 |
7629 // Helper class for creation of compressed token stream data. | 7663 // Helper class for creation of compressed token stream data. |
7630 class CompressedTokenStreamData : public ValueObject { | 7664 class CompressedTokenStreamData : public ValueObject { |
7631 public: | 7665 public: |
7632 static const intptr_t kInitialSize = 16 * KB; | 7666 static const intptr_t kInitialBufferSize = 16 * KB; |
7633 CompressedTokenStreamData() : | 7667 CompressedTokenStreamData() : |
7634 buffer_(NULL), | 7668 buffer_(NULL), |
7635 stream_(&buffer_, Reallocate, kInitialSize), | 7669 stream_(&buffer_, Reallocate, kInitialBufferSize), |
7636 token_objects_(GrowableObjectArray::Handle( | 7670 tokens_(Array::Handle( |
7637 GrowableObjectArray::New(kInitialTokenCount, Heap::kOld))), | 7671 HashTables::New<CompressedTokenMap>(kInitialTableSize))) { |
7638 token_obj_(Object::Handle()), | |
7639 literal_token_(LiteralToken::Handle()), | |
7640 literal_str_(String::Handle()) { | |
7641 token_objects_.Add(Object::null_string()); | |
7642 } | 7672 } |
7643 ~CompressedTokenStreamData() { | 7673 ~CompressedTokenStreamData() { |
| 7674 // Safe to discard the hash table now. |
| 7675 tokens_.Release(); |
7644 } | 7676 } |
7645 | 7677 |
7646 // Add an IDENT token into the stream and the token objects array. | 7678 // Add an IDENT token into the stream and the token hash map. |
7647 void AddIdentToken(const String* ident) { | 7679 void AddIdentToken(const String* ident) { |
7648 // If the IDENT token is already in the tokens object array use the | 7680 ASSERT(ident->IsSymbol()); |
7649 // same index instead of duplicating it. | 7681 intptr_t index = tokens_.NumOccupied(); |
7650 intptr_t index = FindIdentIndex(ident); | 7682 intptr_t entry; |
7651 if (index == -1) { | 7683 if (!tokens_.FindKeyOrDeletedOrUnused(*ident, &entry)) { |
7652 WriteIndex(token_objects_.Length()); | 7684 tokens_.InsertKey(entry, *ident); |
7653 ASSERT(ident != NULL); | 7685 tokens_.UpdatePayload(entry, 0, Smi::Handle(Smi::New(index))); |
7654 token_objects_.Add(*ident); | 7686 HashTables::EnsureLoadFactor(0.0, kMaxLoadFactor, tokens_); |
7655 } else { | 7687 } else { |
7656 WriteIndex(index); | 7688 index = Smi::Value(Smi::RawCast(tokens_.GetPayload(entry, 0))); |
7657 } | 7689 } |
| 7690 WriteIndex(index); |
7658 } | 7691 } |
7659 | 7692 |
7660 // Add a LITERAL token into the stream and the token objects array. | 7693 // Add a LITERAL token into the stream and the token hash map. |
7661 void AddLiteralToken(Token::Kind kind, const String* literal) { | 7694 void AddLiteralToken(const Scanner::TokenDescriptor& descriptor) { |
7662 // If the literal token is already in the tokens object array use the | 7695 ASSERT(descriptor.literal->IsSymbol()); |
7663 // same index instead of duplicating it. | 7696 intptr_t index = tokens_.NumOccupied(); |
7664 intptr_t index = FindLiteralIndex(kind, literal); | 7697 intptr_t entry; |
7665 if (index == -1) { | 7698 if (!tokens_.FindKeyOrDeletedOrUnused(descriptor, &entry)) { |
7666 WriteIndex(token_objects_.Length()); | 7699 LiteralToken& new_literal = LiteralToken::Handle( |
7667 ASSERT(literal != NULL); | 7700 LiteralToken::New(descriptor.kind, *descriptor.literal)); |
7668 literal_token_ = LiteralToken::New(kind, *literal); | 7701 tokens_.InsertKey(entry, new_literal); |
7669 token_objects_.Add(literal_token_); | 7702 tokens_.UpdatePayload(entry, 0, Smi::Handle(Smi::New(index))); |
| 7703 HashTables::EnsureLoadFactor(0.0, kMaxLoadFactor, tokens_); |
7670 } else { | 7704 } else { |
7671 WriteIndex(index); | 7705 index = Smi::Value(Smi::RawCast(tokens_.GetPayload(entry, 0))); |
7672 } | 7706 } |
| 7707 WriteIndex(index); |
7673 } | 7708 } |
7674 | 7709 |
7675 // Add a simple token into the stream. | 7710 // Add a simple token into the stream. |
7676 void AddSimpleToken(intptr_t kind) { | 7711 void AddSimpleToken(intptr_t kind) { |
7677 stream_.WriteUnsigned(kind); | 7712 stream_.WriteUnsigned(kind); |
7678 } | 7713 } |
7679 | 7714 |
7680 // Return the compressed token stream. | 7715 // Return the compressed token stream. |
7681 uint8_t* GetStream() const { return buffer_; } | 7716 uint8_t* GetStream() const { return buffer_; } |
7682 | 7717 |
7683 // Return the compressed token stream length. | 7718 // Return the compressed token stream length. |
7684 intptr_t Length() const { return stream_.bytes_written(); } | 7719 intptr_t Length() const { return stream_.bytes_written(); } |
7685 | 7720 |
7686 // Return the token objects array. | 7721 // Generate and return the token objects array. |
7687 const GrowableObjectArray& TokenObjects() const { | 7722 RawArray* MakeTokenObjectsArray() const { |
7688 return token_objects_; | 7723 Array& result = Array::Handle( |
| 7724 Array::New(tokens_.NumOccupied(), Heap::kOld)); |
| 7725 CompressedTokenMap::Iterator it(&tokens_); |
| 7726 Object& key = Object::Handle(); |
| 7727 while (it.MoveNext()) { |
| 7728 intptr_t entry = it.Current(); |
| 7729 key = tokens_.GetKey(entry); |
| 7730 result.SetAt(Smi::Value(Smi::RawCast(tokens_.GetPayload(entry, 0))), key); |
| 7731 } |
| 7732 return result.raw(); |
7689 } | 7733 } |
7690 | 7734 |
7691 private: | 7735 private: |
7692 intptr_t FindIdentIndex(const String* ident) { | |
7693 ASSERT(ident != NULL); | |
7694 ASSERT(ident->IsSymbol()); | |
7695 intptr_t hash_value = ident->Hash() % kTableSize; | |
7696 GrowableArray<intptr_t>& value = ident_table_[hash_value]; | |
7697 for (intptr_t i = 0; i < value.length(); i++) { | |
7698 intptr_t index = value[i]; | |
7699 if (token_objects_.At(index) == ident->raw()) { | |
7700 return index; | |
7701 } | |
7702 } | |
7703 value.Add(token_objects_.Length()); | |
7704 return -1; | |
7705 } | |
7706 | |
7707 intptr_t FindLiteralIndex(Token::Kind kind, const String* literal) { | |
7708 ASSERT(literal != NULL); | |
7709 ASSERT(literal->IsSymbol()); | |
7710 intptr_t hash_value = literal->Hash() % kTableSize; | |
7711 GrowableArray<intptr_t>& value = literal_table_[hash_value]; | |
7712 for (intptr_t i = 0; i < value.length(); i++) { | |
7713 intptr_t index = value[i]; | |
7714 token_obj_ = token_objects_.At(index); | |
7715 const LiteralToken& token = LiteralToken::Cast(token_obj_); | |
7716 if ((kind == token.kind()) && (token.literal() == literal->raw())) { | |
7717 return index; | |
7718 } | |
7719 } | |
7720 value.Add(token_objects_.Length()); | |
7721 return -1; | |
7722 } | |
7723 | |
7724 void WriteIndex(intptr_t value) { | 7736 void WriteIndex(intptr_t value) { |
7725 stream_.WriteUnsigned(value + Token::kNumTokens); | 7737 stream_.WriteUnsigned(value + Token::kNumTokens); |
7726 } | 7738 } |
7727 | 7739 |
7728 static uint8_t* Reallocate(uint8_t* ptr, | 7740 static uint8_t* Reallocate(uint8_t* ptr, |
7729 intptr_t old_size, | 7741 intptr_t old_size, |
7730 intptr_t new_size) { | 7742 intptr_t new_size) { |
7731 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); | 7743 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); |
7732 return reinterpret_cast<uint8_t*>(new_ptr); | 7744 return reinterpret_cast<uint8_t*>(new_ptr); |
7733 } | 7745 } |
7734 | 7746 |
7735 static const int kInitialTokenCount = 32; | 7747 static const intptr_t kInitialTableSize = 32; |
7736 static const intptr_t kTableSize = 1024; | 7748 static const double kMaxLoadFactor = 0.75; |
7737 | 7749 |
7738 uint8_t* buffer_; | 7750 uint8_t* buffer_; |
7739 WriteStream stream_; | 7751 WriteStream stream_; |
7740 GrowableArray<intptr_t> ident_table_[kTableSize]; | 7752 CompressedTokenMap tokens_; |
7741 GrowableArray<intptr_t> literal_table_[kTableSize]; | |
7742 const GrowableObjectArray& token_objects_; | |
7743 Object& token_obj_; | |
7744 LiteralToken& literal_token_; | |
7745 String& literal_str_; | |
7746 | 7753 |
7747 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); | 7754 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); |
7748 }; | 7755 }; |
7749 | 7756 |
7750 | 7757 |
7751 RawTokenStream* TokenStream::New(const Scanner::GrowableTokenStream& tokens, | 7758 RawTokenStream* TokenStream::New(const Scanner::GrowableTokenStream& tokens, |
7752 const String& private_key) { | 7759 const String& private_key) { |
7753 // Copy the relevant data out of the scanner into a compressed stream of | 7760 // Copy the relevant data out of the scanner into a compressed stream of |
7754 // tokens. | 7761 // tokens. |
7755 CompressedTokenStreamData data; | 7762 CompressedTokenStreamData data; |
7756 intptr_t len = tokens.length(); | 7763 intptr_t len = tokens.length(); |
7757 for (intptr_t i = 0; i < len; i++) { | 7764 for (intptr_t i = 0; i < len; i++) { |
7758 Scanner::TokenDescriptor token = tokens[i]; | 7765 Scanner::TokenDescriptor token = tokens[i]; |
7759 if (token.kind == Token::kIDENT) { // Identifier token. | 7766 if (token.kind == Token::kIDENT) { // Identifier token. |
7760 if (FLAG_compiler_stats) { | 7767 if (FLAG_compiler_stats) { |
7761 CompilerStats::num_ident_tokens_total += 1; | 7768 CompilerStats::num_ident_tokens_total += 1; |
7762 } | 7769 } |
7763 data.AddIdentToken(token.literal); | 7770 data.AddIdentToken(token.literal); |
7764 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token. | 7771 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token. |
7765 if (FLAG_compiler_stats) { | 7772 if (FLAG_compiler_stats) { |
7766 CompilerStats::num_literal_tokens_total += 1; | 7773 CompilerStats::num_literal_tokens_total += 1; |
7767 } | 7774 } |
7768 data.AddLiteralToken(token.kind, token.literal); | 7775 data.AddLiteralToken(token); |
7769 } else { // Keyword, pseudo keyword etc. | 7776 } else { // Keyword, pseudo keyword etc. |
7770 ASSERT(token.kind < Token::kNumTokens); | 7777 ASSERT(token.kind < Token::kNumTokens); |
7771 data.AddSimpleToken(token.kind); | 7778 data.AddSimpleToken(token.kind); |
7772 } | 7779 } |
7773 } | 7780 } |
7774 if (FLAG_compiler_stats) { | 7781 if (FLAG_compiler_stats) { |
7775 CompilerStats::num_tokens_total += len; | 7782 CompilerStats::num_tokens_total += len; |
7776 } | 7783 } |
7777 data.AddSimpleToken(Token::kEOS); // End of stream. | 7784 data.AddSimpleToken(Token::kEOS); // End of stream. |
7778 | 7785 |
7779 // Create and setup the token stream object. | 7786 // Create and setup the token stream object. |
7780 const ExternalTypedData& stream = ExternalTypedData::Handle( | 7787 const ExternalTypedData& stream = ExternalTypedData::Handle( |
7781 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, | 7788 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, |
7782 data.GetStream(), data.Length(), Heap::kOld)); | 7789 data.GetStream(), data.Length(), Heap::kOld)); |
7783 stream.AddFinalizer(data.GetStream(), DataFinalizer); | 7790 stream.AddFinalizer(data.GetStream(), DataFinalizer); |
7784 const TokenStream& result = TokenStream::Handle(New()); | 7791 const TokenStream& result = TokenStream::Handle(New()); |
7785 result.SetPrivateKey(private_key); | 7792 result.SetPrivateKey(private_key); |
| 7793 const Array& token_objects = Array::Handle(data.MakeTokenObjectsArray()); |
7786 { | 7794 { |
7787 NoGCScope no_gc; | 7795 NoGCScope no_gc; |
7788 result.SetStream(stream); | 7796 result.SetStream(stream); |
7789 const Array& tokens = Array::Handle(Array::MakeArray(data.TokenObjects())); | 7797 result.SetTokenObjects(token_objects); |
7790 result.SetTokenObjects(tokens); | |
7791 } | 7798 } |
7792 return result.raw(); | 7799 return result.raw(); |
7793 } | 7800 } |
7794 | 7801 |
7795 | 7802 |
7796 const char* TokenStream::ToCString() const { | 7803 const char* TokenStream::ToCString() const { |
7797 return "TokenStream"; | 7804 return "TokenStream"; |
7798 } | 7805 } |
7799 | 7806 |
7800 | 7807 |
(...skipping 11295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
19096 return tag_label.ToCString(); | 19103 return tag_label.ToCString(); |
19097 } | 19104 } |
19098 | 19105 |
19099 | 19106 |
19100 void UserTag::PrintJSONImpl(JSONStream* stream, bool ref) const { | 19107 void UserTag::PrintJSONImpl(JSONStream* stream, bool ref) const { |
19101 Instance::PrintJSONImpl(stream, ref); | 19108 Instance::PrintJSONImpl(stream, ref); |
19102 } | 19109 } |
19103 | 19110 |
19104 | 19111 |
19105 } // namespace dart | 19112 } // namespace dart |
OLD | NEW |