OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 // |
| 5 // TokenStream is the interface between the lexer and the parser. The lexer |
| 6 // creates a TokenStream which the parser consumes. |
| 7 |
| 8 package lexer |
| 9 |
| 10 type TokenStream interface { |
| 11 // Returns the next Token in the stream without advancing the cursor, |
| 12 // or returns the EOF token if the cursor is already past the end. |
| 13 PeekNext() Token |
| 14 |
| 15 // Advances the cursor in the stream or does nothing if the cursor is |
| 16 // already past the end of the stream |
| 17 ConsumeNext() |
| 18 } |
| 19 |
| 20 // The EOF token is returned by TokenStream to signal the end of the stream. |
| 21 var eofToken = Token{Kind: EOF} |
| 22 |
| 23 // *TokenChan implements TokenStream. |
| 24 // This implementation uses a non-buffered channel to pass the tokens from the |
| 25 // lexer to the parser. One end of the channel is held by the lexer and the |
| 26 // other is in the TokenChan object that is passed to the parser. |
| 27 type TokenChan struct { |
| 28 tokenChan chan Token |
| 29 nextToken Token |
| 30 // read is true if a token has been read out of the channel into nextTok
en. |
| 31 read bool |
| 32 } |
| 33 |
| 34 // See TokenStream. |
| 35 func (s *TokenChan) PeekNext() (token Token) { |
| 36 if !s.read { |
| 37 s.read = true |
| 38 s.ConsumeNext() |
| 39 } |
| 40 |
| 41 return s.nextToken |
| 42 } |
| 43 |
| 44 // See TokenStream. |
| 45 func (s *TokenChan) ConsumeNext() { |
| 46 if t, open := <-s.tokenChan; open { |
| 47 s.nextToken = t |
| 48 } else { |
| 49 s.nextToken = eofToken |
| 50 } |
| 51 } |
OLD | NEW |