OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 // | |
5 // TokenStream is the interface between the lexer and the parser. The lexer | |
6 // creates a TokenStream which the parser consumes. | |
7 | |
8 package lexer | |
9 | |
10 type TokenStream interface { | |
11 // Returns the next Token in the stream without advancing the cursor, | |
12 // or returns the EOF token if the cursor is already past the end. | |
13 PeekNext() Token | |
14 | |
15 // Advances the cursor in the stream or does nothing if the cursor is | |
16 // already past the end of the stream | |
17 ConsumeNext() | |
18 } | |
19 | |
20 // An EOF token is returned by TokenStream to signal the end of the stream. | |
rudominer
2015/10/12 18:06:06
The EOF token?
azani
2015/10/13 00:23:46
Done.
| |
21 var eofToken = Token{Kind: EOF} | |
22 | |
23 // *TokenChan implements TokenStream. | |
24 // This implementation uses a non-buffered channel to pass the tokens from the | |
25 // lexer to the parser. One end of the channel is held by the lexer and the | |
26 // other is in the TokenChan object that is passed to the parser. | |
27 type TokenChan struct { | |
28 tokenChan chan Token | |
29 nextToken Token | |
30 // read is true if a token has been read out of the channel into nextTok en. | |
31 read bool | |
32 } | |
33 | |
34 // See TokenStream. | |
35 func (s *TokenChan) PeekNext() (token Token) { | |
36 if !s.read { | |
37 s.read = true | |
38 s.ConsumeNext() | |
39 } | |
40 | |
41 return s.nextToken | |
42 } | |
43 | |
44 // See TokenStream. | |
45 func (s *TokenChan) ConsumeNext() { | |
46 if t, open := <-s.tokenChan; open { | |
47 s.nextToken = t | |
48 } else { | |
49 s.nextToken = eofToken | |
50 } | |
51 } | |
52 | |
53 // *TokenSlice implements TokenStream | |
rudominer
2015/10/12 18:06:06
I'm not sure anything is going to use TokenSlice.
azani
2015/10/13 00:23:46
Done.
| |
54 // This implementation uses a slice in which the tokens to be streamed are | |
55 // placed. This is most useful for testing when there isn't a lexer to produce | |
56 // tokens. | |
57 type TokenSlice []Token | |
58 | |
59 // See TokenStream. | |
60 func (slice *TokenSlice) PeekNext() (token Token) { | |
61 if len(*(slice)) == 0 { | |
62 token = eofToken | |
63 return | |
64 } | |
65 token = (*slice)[0] | |
66 return | |
67 } | |
68 | |
69 // See TokenStream. | |
70 func (slice *TokenSlice) ConsumeNext() { | |
71 if len(*(slice)) == 0 { | |
72 return | |
73 } | |
74 (*slice) = (*slice)[1:] | |
75 } | |
OLD | NEW |