Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(845)

Unified Diff: mojom/mojom_parser/lexer/token_stream.go

Issue 1387893002: New lexer for mojom written in go. (Closed) Base URL: https://github.com/domokit/mojo.git@master
Patch Set: Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: mojom/mojom_parser/lexer/token_stream.go
diff --git a/mojom/mojom_parser/lexer/token_stream.go b/mojom/mojom_parser/lexer/token_stream.go
new file mode 100644
index 0000000000000000000000000000000000000000..56394f554286406ad0cbc8fc42a98180a4a95c0e
--- /dev/null
+++ b/mojom/mojom_parser/lexer/token_stream.go
@@ -0,0 +1,75 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// TokenStream is the interface between the lexer and the parser. The lexer
+// creates a TokenStream which the parser consumes.
+
+package lexer
+
+type TokenStream interface {
+ // Returns the next Token in the stream without advancing the cursor,
+ // or returns the EOF token if the cursor is already past the end.
+ PeekNext() Token
+
+ // Advances the cursor in the stream or does nothing if the cursor is
+ // already past the end of the stream
+ ConsumeNext()
+}
+
+// An EOF token is returned by TokenStream to signal the end of the stream.
rudominer 2015/10/12 18:06:06 The EOF token?
azani 2015/10/13 00:23:46 Done.
+var eofToken = Token{Kind: EOF}
+
+// *TokenChan implements TokenStream.
+// This implementation uses a non-buffered channel to pass the tokens from the
+// lexer to the parser. One end of the channel is held by the lexer and the
+// other is in the TokenChan object that is passed to the parser.
+type TokenChan struct {
+ tokenChan chan Token
+ nextToken Token
+ // read is true if a token has been read out of the channel into nextToken.
+ read bool
+}
+
+// See TokenStream.
+func (s *TokenChan) PeekNext() (token Token) {
+ if !s.read {
+ s.read = true
+ s.ConsumeNext()
+ }
+
+ return s.nextToken
+}
+
+// See TokenStream.
+func (s *TokenChan) ConsumeNext() {
+ if t, open := <-s.tokenChan; open {
+ s.nextToken = t
+ } else {
+ s.nextToken = eofToken
+ }
+}
+
+// *TokenSlice implements TokenStream
rudominer 2015/10/12 18:06:06 I'm not sure anything is going to use TokenSlice.
azani 2015/10/13 00:23:46 Done.
+// This implementation uses a slice in which the tokens to be streamed are
+// placed. This is most useful for testing when there isn't a lexer to produce
+// tokens.
+type TokenSlice []Token
+
+// See TokenStream.
+func (slice *TokenSlice) PeekNext() (token Token) {
+ if len(*(slice)) == 0 {
+ token = eofToken
+ return
+ }
+ token = (*slice)[0]
+ return
+}
+
+// See TokenStream.
+func (slice *TokenSlice) ConsumeNext() {
+ if len(*(slice)) == 0 {
+ return
+ }
+ (*slice) = (*slice)[1:]
+}

Powered by Google App Engine
This is Rietveld 408576698