| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/object.h" | 5 #include "vm/object.h" |
| 6 | 6 |
| 7 #include "include/dart_api.h" | 7 #include "include/dart_api.h" |
| 8 #include "platform/assert.h" | 8 #include "platform/assert.h" |
| 9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
| 10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
| (...skipping 8438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8449 return String::HashRawSymbol(LiteralToken::Cast(key).literal()); | 8449 return String::HashRawSymbol(LiteralToken::Cast(key).literal()); |
| 8450 } else { | 8450 } else { |
| 8451 return String::Cast(key).Hash(); | 8451 return String::Cast(key).Hash(); |
| 8452 } | 8452 } |
| 8453 } | 8453 } |
| 8454 }; | 8454 }; |
| 8455 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap; | 8455 typedef UnorderedHashMap<CompressedTokenTraits> CompressedTokenMap; |
| 8456 | 8456 |
| 8457 | 8457 |
| 8458 // Helper class for creation of compressed token stream data. | 8458 // Helper class for creation of compressed token stream data. |
| 8459 class CompressedTokenStreamData : public ValueObject { | 8459 class CompressedTokenStreamData : public Scanner::TokenCollector { |
| 8460 public: | 8460 public: |
| 8461 static const intptr_t kInitialBufferSize = 16 * KB; | 8461 static const intptr_t kInitialBufferSize = 16 * KB; |
| 8462 static const bool kPrintTokenObjects = false; | 8462 static const bool kPrintTokenObjects = false; |
| 8463 | 8463 |
| 8464 CompressedTokenStreamData(const GrowableObjectArray& ta, | 8464 CompressedTokenStreamData(const GrowableObjectArray& ta, |
| 8465 CompressedTokenMap* map) : | 8465 CompressedTokenMap* map) : |
| 8466 buffer_(NULL), | 8466 buffer_(NULL), |
| 8467 stream_(&buffer_, Reallocate, kInitialBufferSize), | 8467 stream_(&buffer_, Reallocate, kInitialBufferSize), |
| 8468 token_objects_(ta), | 8468 token_objects_(ta), |
| 8469 tokens_(map), | 8469 tokens_(map), |
| 8470 value_(Object::Handle()), | 8470 value_(Object::Handle()), |
| 8471 fresh_index_smi_(Smi::Handle()) { | 8471 fresh_index_smi_(Smi::Handle()), |
| 8472 num_tokens_collected_(0) { |
| 8473 } |
| 8474 virtual ~CompressedTokenStreamData() { } |
| 8475 |
| 8476 virtual void AddToken(const Scanner::TokenDescriptor& token) { |
| 8477 if (token.kind == Token::kIDENT) { // Identifier token. |
| 8478 AddIdentToken(*token.literal); |
| 8479 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token. |
| 8480 AddLiteralToken(token); |
| 8481 } else { // Keyword, pseudo keyword etc. |
| 8482 ASSERT(token.kind < Token::kNumTokens); |
| 8483 AddSimpleToken(token.kind); |
| 8484 } |
| 8485 num_tokens_collected_++; |
| 8472 } | 8486 } |
| 8473 | 8487 |
| 8488 // Return the compressed token stream. |
| 8489 uint8_t* GetStream() const { return buffer_; } |
| 8490 |
| 8491 // Return the compressed token stream length. |
| 8492 intptr_t Length() const { return stream_.bytes_written(); } |
| 8493 |
| 8494 intptr_t NumTokens() const { return num_tokens_collected_; } |
| 8495 |
| 8496 private: |
| 8474 // Add an IDENT token into the stream and the token hash map. | 8497 // Add an IDENT token into the stream and the token hash map. |
| 8475 void AddIdentToken(const String& ident) { | 8498 void AddIdentToken(const String& ident) { |
| 8476 ASSERT(ident.IsSymbol()); | 8499 ASSERT(ident.IsSymbol()); |
| 8477 const intptr_t fresh_index = token_objects_.Length(); | 8500 const intptr_t fresh_index = token_objects_.Length(); |
| 8478 fresh_index_smi_ = Smi::New(fresh_index); | 8501 fresh_index_smi_ = Smi::New(fresh_index); |
| 8479 intptr_t index = Smi::Value(Smi::RawCast( | 8502 intptr_t index = Smi::Value(Smi::RawCast( |
| 8480 tokens_->InsertOrGetValue(ident, fresh_index_smi_))); | 8503 tokens_->InsertOrGetValue(ident, fresh_index_smi_))); |
| 8481 if (index == fresh_index) { | 8504 if (index == fresh_index) { |
| 8482 token_objects_.Add(ident); | 8505 token_objects_.Add(ident); |
| 8483 if (kPrintTokenObjects) { | 8506 if (kPrintTokenObjects) { |
| (...skipping 30 matching lines...) Expand all Loading... |
| 8514 } | 8537 } |
| 8515 } | 8538 } |
| 8516 WriteIndex(index); | 8539 WriteIndex(index); |
| 8517 } | 8540 } |
| 8518 | 8541 |
| 8519 // Add a simple token into the stream. | 8542 // Add a simple token into the stream. |
| 8520 void AddSimpleToken(intptr_t kind) { | 8543 void AddSimpleToken(intptr_t kind) { |
| 8521 stream_.WriteUnsigned(kind); | 8544 stream_.WriteUnsigned(kind); |
| 8522 } | 8545 } |
| 8523 | 8546 |
| 8524 // Return the compressed token stream. | |
| 8525 uint8_t* GetStream() const { return buffer_; } | |
| 8526 | |
| 8527 // Return the compressed token stream length. | |
| 8528 intptr_t Length() const { return stream_.bytes_written(); } | |
| 8529 | |
| 8530 private: | |
| 8531 void WriteIndex(intptr_t value) { | 8547 void WriteIndex(intptr_t value) { |
| 8532 stream_.WriteUnsigned(value + Token::kNumTokens); | 8548 stream_.WriteUnsigned(value + Token::kNumTokens); |
| 8533 } | 8549 } |
| 8534 | 8550 |
| 8535 static uint8_t* Reallocate(uint8_t* ptr, | 8551 static uint8_t* Reallocate(uint8_t* ptr, |
| 8536 intptr_t old_size, | 8552 intptr_t old_size, |
| 8537 intptr_t new_size) { | 8553 intptr_t new_size) { |
| 8538 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); | 8554 void* new_ptr = ::realloc(reinterpret_cast<void*>(ptr), new_size); |
| 8539 return reinterpret_cast<uint8_t*>(new_ptr); | 8555 return reinterpret_cast<uint8_t*>(new_ptr); |
| 8540 } | 8556 } |
| 8541 | 8557 |
| 8542 uint8_t* buffer_; | 8558 uint8_t* buffer_; |
| 8543 WriteStream stream_; | 8559 WriteStream stream_; |
| 8544 const GrowableObjectArray& token_objects_; | 8560 const GrowableObjectArray& token_objects_; |
| 8545 CompressedTokenMap* tokens_; | 8561 CompressedTokenMap* tokens_; |
| 8546 Object& value_; | 8562 Object& value_; |
| 8547 Smi& fresh_index_smi_; | 8563 Smi& fresh_index_smi_; |
| 8564 intptr_t num_tokens_collected_; |
| 8548 | 8565 |
| 8549 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); | 8566 DISALLOW_COPY_AND_ASSIGN(CompressedTokenStreamData); |
| 8550 }; | 8567 }; |
| 8551 | 8568 |
| 8552 | 8569 |
| 8553 RawTokenStream* TokenStream::New(const Scanner::GrowableTokenStream& tokens, | 8570 RawTokenStream* TokenStream::New(const String& source, |
| 8554 const String& private_key, | 8571 const String& private_key, |
| 8555 bool use_shared_tokens) { | 8572 bool use_shared_tokens) { |
| 8556 Thread* thread = Thread::Current(); | 8573 Thread* thread = Thread::Current(); |
| 8557 Zone* zone = thread->zone(); | 8574 Zone* zone = thread->zone(); |
| 8558 // Copy the relevant data out of the scanner into a compressed stream of | |
| 8559 // tokens. | |
| 8560 | 8575 |
| 8561 GrowableObjectArray& token_objects = GrowableObjectArray::Handle(zone); | 8576 GrowableObjectArray& token_objects = GrowableObjectArray::Handle(zone); |
| 8562 Array& token_objects_map = Array::Handle(zone); | 8577 Array& token_objects_map = Array::Handle(zone); |
| 8563 if (use_shared_tokens) { | 8578 if (use_shared_tokens) { |
| 8564 // Use the shared token objects array in the object store. Allocate | 8579 // Use the shared token objects array in the object store. Allocate |
| 8565 // a new array if necessary. | 8580 // a new array if necessary. |
| 8566 ObjectStore* store = thread->isolate()->object_store(); | 8581 ObjectStore* store = thread->isolate()->object_store(); |
| 8567 if (store->token_objects() == GrowableObjectArray::null()) { | 8582 if (store->token_objects() == GrowableObjectArray::null()) { |
| 8568 OpenSharedTokenList(thread->isolate()); | 8583 OpenSharedTokenList(thread->isolate()); |
| 8569 } | 8584 } |
| 8570 token_objects = store->token_objects(); | 8585 token_objects = store->token_objects(); |
| 8571 token_objects_map = store->token_objects_map(); | 8586 token_objects_map = store->token_objects_map(); |
| 8572 } else { | 8587 } else { |
| 8573 // Use new, non-shared token array. | 8588 // Use new, non-shared token array. |
| 8574 const int kInitialPrivateCapacity = 256; | 8589 const int kInitialPrivateCapacity = 256; |
| 8575 token_objects = | 8590 token_objects = |
| 8576 GrowableObjectArray::New(kInitialPrivateCapacity, Heap::kOld); | 8591 GrowableObjectArray::New(kInitialPrivateCapacity, Heap::kOld); |
| 8577 token_objects_map = | 8592 token_objects_map = |
| 8578 HashTables::New<CompressedTokenMap>(kInitialPrivateCapacity, | 8593 HashTables::New<CompressedTokenMap>(kInitialPrivateCapacity, |
| 8579 Heap::kOld); | 8594 Heap::kOld); |
| 8580 } | 8595 } |
| 8581 CompressedTokenMap map(token_objects_map.raw()); | 8596 CompressedTokenMap map(token_objects_map.raw()); |
| 8582 CompressedTokenStreamData data(token_objects, &map); | 8597 CompressedTokenStreamData data(token_objects, &map); |
| 8583 | 8598 Scanner scanner(source, private_key); |
| 8584 intptr_t len = tokens.length(); | 8599 scanner.ScanAll(&data); |
| 8585 for (intptr_t i = 0; i < len; i++) { | 8600 INC_STAT(thread, num_tokens_scanned, data.NumTokens()); |
| 8586 Scanner::TokenDescriptor token = tokens[i]; | |
| 8587 if (token.kind == Token::kIDENT) { // Identifier token. | |
| 8588 data.AddIdentToken(*token.literal); | |
| 8589 } else if (Token::NeedsLiteralToken(token.kind)) { // Literal token. | |
| 8590 data.AddLiteralToken(token); | |
| 8591 } else { // Keyword, pseudo keyword etc. | |
| 8592 ASSERT(token.kind < Token::kNumTokens); | |
| 8593 data.AddSimpleToken(token.kind); | |
| 8594 } | |
| 8595 } | |
| 8596 data.AddSimpleToken(Token::kEOS); // End of stream. | |
| 8597 | 8601 |
| 8598 // Create and setup the token stream object. | 8602 // Create and setup the token stream object. |
| 8599 const ExternalTypedData& stream = ExternalTypedData::Handle( | 8603 const ExternalTypedData& stream = ExternalTypedData::Handle( |
| 8600 zone, | 8604 zone, |
| 8601 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, | 8605 ExternalTypedData::New(kExternalTypedDataUint8ArrayCid, |
| 8602 data.GetStream(), data.Length(), Heap::kOld)); | 8606 data.GetStream(), data.Length(), Heap::kOld)); |
| 8603 stream.AddFinalizer(data.GetStream(), DataFinalizer); | 8607 stream.AddFinalizer(data.GetStream(), DataFinalizer); |
| 8604 const TokenStream& result = TokenStream::Handle(zone, New()); | 8608 const TokenStream& result = TokenStream::Handle(zone, New()); |
| 8605 result.SetPrivateKey(private_key); | 8609 result.SetPrivateKey(private_key); |
| 8606 { | 8610 { |
| (...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8934 Zone* zone = thread->zone(); | 8938 Zone* zone = thread->zone(); |
| 8935 const TokenStream& tkns = TokenStream::Handle(zone, tokens()); | 8939 const TokenStream& tkns = TokenStream::Handle(zone, tokens()); |
| 8936 if (!tkns.IsNull()) { | 8940 if (!tkns.IsNull()) { |
| 8937 // Already tokenized. | 8941 // Already tokenized. |
| 8938 return; | 8942 return; |
| 8939 } | 8943 } |
| 8940 // Get the source, scan and allocate the token stream. | 8944 // Get the source, scan and allocate the token stream. |
| 8941 VMTagScope tagScope(thread, VMTag::kCompileScannerTagId); | 8945 VMTagScope tagScope(thread, VMTag::kCompileScannerTagId); |
| 8942 CSTAT_TIMER_SCOPE(thread, scanner_timer); | 8946 CSTAT_TIMER_SCOPE(thread, scanner_timer); |
| 8943 const String& src = String::Handle(zone, Source()); | 8947 const String& src = String::Handle(zone, Source()); |
| 8944 Scanner scanner(src, private_key); | 8948 const TokenStream& ts = TokenStream::Handle(zone, |
| 8945 const Scanner::GrowableTokenStream& ts = scanner.GetStream(); | 8949 TokenStream::New(src, private_key, use_shared_tokens)); |
| 8946 INC_STAT(thread, num_tokens_scanned, ts.length()); | 8950 set_tokens(ts); |
| 8947 set_tokens(TokenStream::Handle(zone, | |
| 8948 TokenStream::New(ts, private_key, use_shared_tokens))); | |
| 8949 INC_STAT(thread, src_length, src.Length()); | 8951 INC_STAT(thread, src_length, src.Length()); |
| 8950 } | 8952 } |
| 8951 | 8953 |
| 8952 | 8954 |
| 8953 void Script::SetLocationOffset(intptr_t line_offset, | 8955 void Script::SetLocationOffset(intptr_t line_offset, |
| 8954 intptr_t col_offset) const { | 8956 intptr_t col_offset) const { |
| 8955 ASSERT(line_offset >= 0); | 8957 ASSERT(line_offset >= 0); |
| 8956 ASSERT(col_offset >= 0); | 8958 ASSERT(col_offset >= 0); |
| 8957 StoreNonPointer(&raw_ptr()->line_offset_, line_offset); | 8959 StoreNonPointer(&raw_ptr()->line_offset_, line_offset); |
| 8958 StoreNonPointer(&raw_ptr()->col_offset_, col_offset); | 8960 StoreNonPointer(&raw_ptr()->col_offset_, col_offset); |
| (...skipping 13496 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 22455 return UserTag::null(); | 22457 return UserTag::null(); |
| 22456 } | 22458 } |
| 22457 | 22459 |
| 22458 | 22460 |
| 22459 const char* UserTag::ToCString() const { | 22461 const char* UserTag::ToCString() const { |
| 22460 const String& tag_label = String::Handle(label()); | 22462 const String& tag_label = String::Handle(label()); |
| 22461 return tag_label.ToCString(); | 22463 return tag_label.ToCString(); |
| 22462 } | 22464 } |
| 22463 | 22465 |
| 22464 } // namespace dart | 22466 } // namespace dart |
| OLD | NEW |