Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: tools/lexer_generator/nfa_builder.py

Issue 138973007: Experimental parser: support subgraph inlining (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/gyp/v8.gyp ('k') | tools/lexer_generator/regex_lexer.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 the V8 project authors. All rights reserved. 1 # Copyright 2013 the V8 project authors. All rights reserved.
2 # Redistribution and use in source and binary forms, with or without 2 # Redistribution and use in source and binary forms, with or without
3 # modification, are permitted provided that the following conditions are 3 # modification, are permitted provided that the following conditions are
4 # met: 4 # met:
5 # 5 #
6 # * Redistributions of source code must retain the above copyright 6 # * Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer. 7 # notice, this list of conditions and the following disclaimer.
8 # * Redistributions in binary form must reproduce the above 8 # * Redistributions in binary form must reproduce the above
9 # copyright notice, this list of conditions and the following 9 # copyright notice, this list of conditions and the following
10 # disclaimer in the documentation and/or other materials provided 10 # disclaimer in the documentation and/or other materials provided
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
140 action = Action.from_term(action_term) 140 action = Action.from_term(action_term)
141 end = self.__new_state() 141 end = self.__new_state()
142 self.__patch_ends(ends, end) 142 self.__patch_ends(ends, end)
143 end.set_action(action) 143 end.set_action(action)
144 # Force all match actions to be terminal. 144 # Force all match actions to be terminal.
145 if action.match_action(): 145 if action.match_action():
146 global_end = self.__global_end() 146 global_end = self.__global_end()
147 end.add_epsilon_transition(global_end) 147 end.add_epsilon_transition(global_end)
148 return (start, [end]) 148 return (start, [end])
149 149
150 def __continue(self, subtree): 150 def __continue(self, subtree, depth):
151 'add an epsilon transitions to the start node of the current subtree' 151 'add an epsilon transitions to the start node of the current subtree'
152 (start, ends) = self.__process(subtree) 152 (start, ends) = self.__process(subtree)
153 state = self.__peek_state() 153 index = -1 - min(int(depth), len(self.__states) - 1)
154 state = self.__states[index]
154 if not state['start_node']: 155 if not state['start_node']:
155 state['start_node'] = self.__new_state() 156 state['start_node'] = self.__new_state()
156 self.__patch_ends(ends, state['start_node']) 157 self.__patch_ends(ends, state['start_node'])
157 return (start, []) 158 return (start, [])
158 159
159 def __unique_key(self, name): 160 def __unique_key(self, name):
160 return self.__key_state(TransitionKey.unique(name)) 161 return self.__key_state(TransitionKey.unique(name))
161 162
162 def __join(self, tree, name): 163 def __join(self, tree, name):
163 (subtree_start, subtree_end, nodes_in_subtree) = self.__nfa(name) 164 (subtree_start, subtree_end, nodes_in_subtree) = self.__nfa(name)
164 (start, ends) = self.__process(tree) 165 if tree:
165 self.__patch_ends(ends, subtree_start) 166 (start, ends) = self.__process(tree)
167 self.__patch_ends(ends, subtree_start)
168 else:
169 start = subtree_start
166 if subtree_end: 170 if subtree_end:
167 return (start, [subtree_end]) 171 return (start, [subtree_end])
168 else: 172 else:
169 return (start, []) 173 return (start, [])
170 174
171 def __get_method(self, name): 175 def __get_method(self, name):
172 'lazily build map of all private bound methods and return the one requested' 176 'lazily build map of all private bound methods and return the one requested'
173 if not self.__operation_map: 177 if not self.__operation_map:
174 prefix = "_NfaBuilder__" 178 prefix = "_NfaBuilder__"
175 self.__operation_map = {name[len(prefix):] : f for (name, f) in 179 self.__operation_map = {name[len(prefix):] : f for (name, f) in
(...skipping 13 matching lines...) Expand all
189 for state in self.__states: 193 for state in self.__states:
190 assert state['name'] != name, 'recursive subgraph' 194 assert state['name'] != name, 'recursive subgraph'
191 self.__states.append({ 195 self.__states.append({
192 'start_node' : None, 196 'start_node' : None,
193 'name' : name 197 'name' : name
194 }) 198 })
195 199
196 def __pop_state(self): 200 def __pop_state(self):
197 return self.__states.pop() 201 return self.__states.pop()
198 202
199 def __peek_state(self):
200 return self.__states[-1]
201
202 def __nfa(self, name): 203 def __nfa(self, name):
203 tree = self.__subtree_map[name] 204 tree = self.__subtree_map[name]
204 start_node_number = self.__node_number 205 start_node_number = self.__node_number
205 self.__push_state(name) 206 self.__push_state(name)
206 (start, ends) = self.__process(tree) 207 (start, ends) = self.__process(tree)
207 state = self.__pop_state() 208 state = self.__pop_state()
208 # move saved start node into start 209 # move saved start node into start
209 if state['start_node']: 210 if state['start_node']:
210 state['start_node'].close(start) 211 state['start_node'].close(start)
211 start = state['start_node'] 212 start = state['start_node']
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 self.__compute_epsilon_closures(start) 265 self.__compute_epsilon_closures(start)
265 f = lambda node, state: self.__replace_catch_all(self.__encoding, node) 266 f = lambda node, state: self.__replace_catch_all(self.__encoding, node)
266 Automaton.visit_states(set([start]), f) 267 Automaton.visit_states(set([start]), f)
267 return Nfa(self.__encoding, start, global_end_to_return, nodes_created) 268 return Nfa(self.__encoding, start, global_end_to_return, nodes_created)
268 269
269 @staticmethod 270 @staticmethod
270 def add_action(term, action): 271 def add_action(term, action):
271 return Term('ACTION', term, action.to_term()) 272 return Term('ACTION', term, action.to_term())
272 273
273 @staticmethod 274 @staticmethod
274 def add_continue(term): 275 def add_continue(tree, depth):
275 return Term('CONTINUE', term) 276 return Term('CONTINUE', tree, depth)
276 277
277 @staticmethod 278 @staticmethod
278 def unique_key(name): 279 def unique_key(name):
279 return Term('UNIQUE_KEY', name) 280 return Term('UNIQUE_KEY', name)
280 281
281 @staticmethod 282 @staticmethod
282 def join_subtree(tree, subtree_name): 283 def join_subtree(tree, subtree_name):
283 return Term('JOIN', tree, subtree_name) 284 return Term('JOIN', tree, subtree_name)
284 285
285 @staticmethod 286 @staticmethod
286 def or_terms(terms): 287 def or_terms(terms):
287 return reduce(lambda acc, g: Term('OR', acc, g), terms) 288 return reduce(lambda acc, g: Term('OR', acc, g), terms)
288 289
289 @staticmethod 290 @staticmethod
290 def cat_terms(terms): 291 def cat_terms(terms):
291 return reduce(lambda acc, g: Term('CAT', acc, g), terms) 292 return reduce(lambda acc, g: Term('CAT', acc, g), terms)
292 293
293 __modifer_map = { 294 __modifer_map = {
294 '+': 'ONE_OR_MORE', 295 '+': 'ONE_OR_MORE',
295 '?': 'ZERO_OR_ONE', 296 '?': 'ZERO_OR_ONE',
296 '*': 'ZERO_OR_MORE', 297 '*': 'ZERO_OR_MORE',
297 } 298 }
298 299
299 @staticmethod 300 @staticmethod
300 def apply_modifier(modifier, term): 301 def apply_modifier(modifier, term):
301 return Term(NfaBuilder.__modifer_map[modifier], term) 302 return Term(NfaBuilder.__modifer_map[modifier], term)
OLDNEW
« no previous file with comments | « tools/gyp/v8.gyp ('k') | tools/lexer_generator/regex_lexer.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698