Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(110)

Side by Side Diff: tools/lexer_generator/rule_parser.py

Issue 64913011: Experimental parser: implement skip (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/lexer_generator/code_generator.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 the V8 project authors. All rights reserved. 1 # Copyright 2013 the V8 project authors. All rights reserved.
2 # Redistribution and use in source and binary forms, with or without 2 # Redistribution and use in source and binary forms, with or without
3 # modification, are permitted provided that the following conditions are 3 # modification, are permitted provided that the following conditions are
4 # met: 4 # met:
5 # 5 #
6 # * Redistributions of source code must retain the above copyright 6 # * Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer. 7 # notice, this list of conditions and the following disclaimer.
8 # * Redistributions in binary form must reproduce the above 8 # * Redistributions in binary form must reproduce the above
9 # copyright notice, this list of conditions and the following 9 # copyright notice, this list of conditions and the following
10 # disclaimer in the documentation and/or other materials provided 10 # disclaimer in the documentation and/or other materials provided
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
45 self.transitions = set() 45 self.transitions = set()
46 46
47 def parse(self, string): 47 def parse(self, string):
48 return RuleParser.parse(string, self) 48 return RuleParser.parse(string, self)
49 49
50 class RuleParser: 50 class RuleParser:
51 51
52 tokens = RuleLexer.tokens 52 tokens = RuleLexer.tokens
53 __rule_precedence_counter = 0 53 __rule_precedence_counter = 0
54 __keyword_transitions = set([ 54 __keyword_transitions = set([
55 'continue', 'break', 'terminate', 'terminate_illegal']) 55 'continue', 'break', 'terminate', 'terminate_illegal', 'skip'])
56 56
57 def __init__(self): 57 def __init__(self):
58 self.__state = None 58 self.__state = None
59 59
60 def p_statements(self, p): 60 def p_statements(self, p):
61 'statements : aliases rules' 61 'statements : aliases rules'
62 62
63 def p_aliases(self, p): 63 def p_aliases(self, p):
64 '''aliases : alias_rule aliases 64 '''aliases : alias_rule aliases
65 | empty''' 65 | empty'''
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
104 | DEFAULT_ACTION code_or_token empty 104 | DEFAULT_ACTION code_or_token empty
105 | CATCH_ALL empty action''' 105 | CATCH_ALL empty action'''
106 transition = p[3] 106 transition = p[3]
107 if transition and not transition in self.__keyword_transitions: 107 if transition and not transition in self.__keyword_transitions:
108 assert not transition == 'default' 108 assert not transition == 'default'
109 self.__state.transitions.add(transition) 109 self.__state.transitions.add(transition)
110 RuleParser.__rule_precedence_counter += 1 110 RuleParser.__rule_precedence_counter += 1
111 rules = self.__state.rules[self.__state.current_state] 111 rules = self.__state.rules[self.__state.current_state]
112 code = p[2] 112 code = p[2]
113 if p[1] == 'default_action': 113 if p[1] == 'default_action':
114 assert self.__state.current_state == 'default'
114 assert not rules['default_action'] 115 assert not rules['default_action']
115 rules['default_action'] = code 116 rules['default_action'] = code
116 elif p[1] == 'catch_all': 117 elif p[1] == 'catch_all':
117 assert not rules['catch_all'] 118 assert not rules['catch_all']
118 rules['catch_all'] = transition 119 rules['catch_all'] = transition
119 else: 120 else:
120 rule = (p[1], RuleParser.__rule_precedence_counter, code, transition) 121 rule = (p[1], RuleParser.__rule_precedence_counter, code, transition)
121 rules['regex'].append(rule) 122 rules['regex'].append(rule)
122 123
123 def p_code_or_token(self, p): 124 def p_code_or_token(self, p):
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 (code_type, code_value) = code if code else default_code 282 (code_type, code_value) = code if code else default_code
282 action = Action(code_type, code_value, precedence) 283 action = Action(code_type, code_value, precedence)
283 graph = NfaBuilder.add_action(graph, action) 284 graph = NfaBuilder.add_action(graph, action)
284 if not transition or transition == 'break': 285 if not transition or transition == 'break':
285 pass 286 pass
286 elif transition == 'continue': 287 elif transition == 'continue':
287 assert not k == 'default' 288 assert not k == 'default'
288 continues += 1 289 continues += 1
289 graph = NfaBuilder.add_continue(graph) 290 graph = NfaBuilder.add_continue(graph)
290 elif (transition == 'terminate' or 291 elif (transition == 'terminate' or
291 transition == 'terminate_illegal'): 292 transition == 'terminate_illegal' or
293 transition == 'skip'):
292 assert not code 294 assert not code
293 graph = NfaBuilder.add_action(graph, Action(transition, None, -1)) 295 graph = NfaBuilder.add_action(graph, Action(transition, None, -1))
294 else: 296 else:
295 assert k == 'default' 297 assert k == 'default'
296 subgraph_modifier = '*' if code else None 298 subgraph_modifier = '*' if code else None
297 graph = NfaBuilder.join_subgraph( 299 graph = NfaBuilder.join_subgraph(
298 graph, transition, rule_map[transition], subgraph_modifier) 300 graph, transition, rule_map[transition], subgraph_modifier)
299 graphs.append(graph) 301 graphs.append(graph)
300 if continues == len(graphs): 302 if continues == len(graphs):
301 graphs.append(NfaBuilder.epsilon()) 303 graphs.append(NfaBuilder.epsilon())
302 if v['catch_all']: 304 if v['catch_all']:
303 assert v['catch_all'] == 'continue' 305 assert v['catch_all'] == 'continue'
304 graphs.append(NfaBuilder.add_continue(NfaBuilder.catch_all())) 306 graphs.append(NfaBuilder.add_continue(NfaBuilder.catch_all()))
305 graph = NfaBuilder.or_graphs(graphs) 307 graph = NfaBuilder.or_graphs(graphs)
306 rule_map[k] = graph 308 rule_map[k] = graph
307 # process first the subgraphs, then the default graph 309 # process first the subgraphs, then the default graph
308 for k, v in parser_state.rules.items(): 310 for k, v in parser_state.rules.items():
309 if k == 'default': continue 311 if k == 'default': continue
310 process(k, v) 312 process(k, v)
311 process('default', parser_state.rules['default']) 313 process('default', parser_state.rules['default'])
312 # build the automata 314 # build the automata
313 for rule_name, graph in rule_map.items(): 315 for rule_name, graph in rule_map.items():
314 self.__automata[rule_name] = RuleProcessor.Automata(builder, graph) 316 self.__automata[rule_name] = RuleProcessor.Automata(builder, graph)
315 317
316 default_action = parser_state.rules['default']['default_action'] 318 default_action = parser_state.rules['default']['default_action']
317 self.default_action = Action(default_action[0], default_action[1]) if defaul t_action else None 319 self.default_action = Action(default_action[0], default_action[1]) if defaul t_action else None
OLDNEW
« no previous file with comments | « tools/lexer_generator/code_generator.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698