Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: tools/lexer_generator/rule_parser.py

Issue 69953022: Experimental parser: faster dfa minimization (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/lexer_generator/generator.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 the V8 project authors. All rights reserved. 1 # Copyright 2013 the V8 project authors. All rights reserved.
2 # Redistribution and use in source and binary forms, with or without 2 # Redistribution and use in source and binary forms, with or without
3 # modification, are permitted provided that the following conditions are 3 # modification, are permitted provided that the following conditions are
4 # met: 4 # met:
5 # 5 #
6 # * Redistributions of source code must retain the above copyright 6 # * Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer. 7 # notice, this list of conditions and the following disclaimer.
8 # * Redistributions in binary form must reproduce the above 8 # * Redistributions in binary form must reproduce the above
9 # copyright notice, this list of conditions and the following 9 # copyright notice, this list of conditions and the following
10 # disclaimer in the documentation and/or other materials provided 10 # disclaimer in the documentation and/or other materials provided
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
228 return RuleProcessor(parser_state) 228 return RuleProcessor(parser_state)
229 229
230 def automata_iter(self): 230 def automata_iter(self):
231 return iter(self.__automata.items()) 231 return iter(self.__automata.items())
232 232
233 def default_automata(self): 233 def default_automata(self):
234 return self.__automata['default'] 234 return self.__automata['default']
235 235
236 class Automata(object): 236 class Automata(object):
237 237
238 def __init__(self, nfa): 238 def __init__(self, builder, graph):
239 (start, dfa_nodes) = nfa.compute_dfa() 239 self.__builder = builder
240 self.__nfa = nfa 240 self.__graph = graph
241 self.__dfa = Dfa(start, dfa_nodes) 241 self.__nfa = None
242 self.__dfa = None
242 self.__minimial_dfa = None 243 self.__minimial_dfa = None
243 244
244 def nfa(self): 245 def nfa(self):
246 if not self.__nfa:
247 self.__nfa = self.__builder.nfa(self.__graph)
245 return self.__nfa 248 return self.__nfa
246 249
247 def dfa(self): 250 def dfa(self):
251 if not self.__dfa:
252 (start, dfa_nodes) = self.nfa().compute_dfa()
253 self.__dfa = Dfa(start, dfa_nodes)
248 return self.__dfa 254 return self.__dfa
249 255
250 def minimal_dfa(self): 256 def minimal_dfa(self):
251 if not self.__minimial_dfa: 257 if not self.__minimial_dfa:
252 self.__minimial_dfa = self.__dfa.minimize() 258 self.__minimial_dfa = self.dfa().minimize()
253 return self.__minimial_dfa 259 return self.__minimial_dfa
254 260
255 def __process_parser_state(self, parser_state): 261 def __process_parser_state(self, parser_state):
256 rule_map = {} 262 rule_map = {}
257 builder = NfaBuilder() 263 builder = NfaBuilder()
258 builder.set_character_classes(parser_state.character_classes) 264 builder.set_character_classes(parser_state.character_classes)
259 assert 'default' in parser_state.rules 265 assert 'default' in parser_state.rules
260 def process(k, v): 266 def process(k, v):
261 graphs = [] 267 graphs = []
262 continues = 0 268 continues = 0
(...skipping 25 matching lines...) Expand all
288 graphs.append(NfaBuilder.add_continue(NfaBuilder.catch_all())) 294 graphs.append(NfaBuilder.add_continue(NfaBuilder.catch_all()))
289 graph = NfaBuilder.or_graphs(graphs) 295 graph = NfaBuilder.or_graphs(graphs)
290 rule_map[k] = graph 296 rule_map[k] = graph
291 # process first the subgraphs, then the default graph 297 # process first the subgraphs, then the default graph
292 for k, v in parser_state.rules.items(): 298 for k, v in parser_state.rules.items():
293 if k == 'default': continue 299 if k == 'default': continue
294 process(k, v) 300 process(k, v)
295 process('default', parser_state.rules['default']) 301 process('default', parser_state.rules['default'])
296 # build the automata 302 # build the automata
297 for rule_name, graph in rule_map.items(): 303 for rule_name, graph in rule_map.items():
298 nfa = builder.nfa(graph) 304 self.__automata[rule_name] = RuleProcessor.Automata(builder, graph)
299 self.__automata[rule_name] = RuleProcessor.Automata(nfa)
300 self.default_action = parser_state.rules['default']['default_action'] 305 self.default_action = parser_state.rules['default']['default_action']
OLDNEW
« no previous file with comments | « tools/lexer_generator/generator.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698