Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(95)

Side by Side Diff: tools/lexer_generator/dfa.py

Issue 153993014: Experimental parser: cleanup dfa.py (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/lexer_generator/action.py ('k') | tools/lexer_generator/nfa.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 the V8 project authors. All rights reserved. 1 # Copyright 2013 the V8 project authors. All rights reserved.
2 # Redistribution and use in source and binary forms, with or without 2 # Redistribution and use in source and binary forms, with or without
3 # modification, are permitted provided that the following conditions are 3 # modification, are permitted provided that the following conditions are
4 # met: 4 # met:
5 # 5 #
6 # * Redistributions of source code must retain the above copyright 6 # * Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer. 7 # notice, this list of conditions and the following disclaimer.
8 # * Redistributions in binary form must reproduce the above 8 # * Redistributions in binary form must reproduce the above
9 # copyright notice, this list of conditions and the following 9 # copyright notice, this list of conditions and the following
10 # disclaimer in the documentation and/or other materials provided 10 # disclaimer in the documentation and/or other materials provided
(...skipping 13 matching lines...) Expand all
24 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 from itertools import chain 28 from itertools import chain
29 from automaton import * 29 from automaton import *
30 from transition_keys import TransitionKey 30 from transition_keys import TransitionKey
31 31
32 class DfaState(AutomatonState): 32 class DfaState(AutomatonState):
33 33
34 def __init__(self, name, action): 34 def __init__(self, name, action, transitions):
35 super(DfaState, self).__init__() 35 super(DfaState, self).__init__()
36 self.__name = name 36 self.__name = name
37 self.__transitions = {} 37 self.__transitions = transitions
38 self.__action = action 38 self.__action = action
39 assert isinstance(action, Action) 39 assert isinstance(action, Action)
40 40
41 def name(self): 41 def name(self):
42 return self.__name 42 return self.__name
43 43
44 def action(self): 44 def action(self):
45 return self.__action 45 return self.__action
46 46
47 def add_transition(self, key, state):
48 assert key != None
49 assert not key == TransitionKey.epsilon()
50 assert not self.__transitions.has_key(key)
51 self.__transitions[key] = state
52
53
54 def epsilon_closure_iter(self): 47 def epsilon_closure_iter(self):
55 return iter([]) 48 return iter([])
56 49
57 def transition_state_for_key(self, value): 50 def transition_state_for_key(self, value):
58 matches = list(self.transition_state_iter_for_key(value)) 51 matches = list(self.transition_state_iter_for_key(value))
59 assert len(matches) <= 1 52 assert len(matches) <= 1
60 return matches[0] if matches else None 53 return matches[0] if matches else None
61 54
62 def key_state_iter( 55 def key_state_iter(
63 self, 56 self,
64 key_filter = lambda x: True, 57 key_filter = lambda x: True,
65 state_filter = lambda x: True, 58 state_filter = lambda x: True,
66 match_func = lambda x, y: True, 59 match_func = lambda x, y: True,
67 yield_func = lambda x, y: (x, y)): 60 yield_func = lambda x, y: (x, y)):
68 for key, state in self.__transitions.items(): 61 for key, state in self.__transitions.items():
69 if key_filter(key) and state_filter(state) and match_func(key, state): 62 if key_filter(key) and state_filter(state) and match_func(key, state):
70 yield yield_func(key, state) 63 yield yield_func(key, state)
71 64
72 class Dfa(Automaton): 65 class Dfa(Automaton):
73 66
67 @staticmethod
68 def __add_transition(transitions, key, state):
69 assert key != None
70 assert not key == TransitionKey.epsilon()
71 assert not transitions.has_key(key)
72 transitions[key] = state
73
74 def __init__(self, encoding, start_name, mapping): 74 def __init__(self, encoding, start_name, mapping):
75 super(Dfa, self).__init__(encoding) 75 super(Dfa, self).__init__(encoding)
76 self.__terminal_set = set() 76 self.__terminal_set = set()
77 name_map = {} 77 name_map = {}
78 for name, node_data in mapping.items(): 78 for name, node_data in mapping.items():
79 node = DfaState(name, node_data['action']) 79 transitions = {}
80 name_map[name] = node 80 node = DfaState(name, node_data['action'], transitions)
81 name_map[name] = (node, transitions)
81 if node_data['terminal']: 82 if node_data['terminal']:
82 self.__terminal_set.add(node) 83 self.__terminal_set.add(node)
83 for name, node_data in mapping.items(): 84 for name, node_data in mapping.items():
84 node = name_map[name] 85 (node, transitions) = name_map[name]
85 inversion = {} 86 inversion = {}
86 for key, state in node_data['transitions'].items(): 87 for key, state in node_data['transitions'].items():
87 if not state in inversion: 88 if not state in inversion:
88 inversion[state] = [] 89 inversion[state] = []
89 inversion[state].append(key) 90 inversion[state].append(key)
90 for state, keys in inversion.items(): 91 for state, keys in inversion.items():
91 merged_key = TransitionKey.merged_key(encoding, keys) 92 merged_key = TransitionKey.merged_key(encoding, keys)
92 node.add_transition(merged_key, name_map[state]) 93 self.__add_transition(transitions, merged_key, name_map[state][0])
93 self.__start = name_map[start_name] 94 self.__start = name_map[start_name][0]
94 self.__node_count = len(mapping) 95 self.__node_count = len(mapping)
95 self.__verify() 96 self.__verify()
96 97
97 def __verify(self): 98 def __verify(self):
98 assert self.__terminal_set 99 assert self.__terminal_set
99 state_count = self.visit_all_states(lambda state, count: count + 1, 0) 100 state_count = self.visit_all_states(lambda state, count: count + 1, 0)
100 assert self.__node_count == state_count 101 assert self.__node_count == state_count
101 102
102 def node_count(self): 103 def node_count(self):
103 return self.__node_count 104 return self.__node_count
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
172 initial_partitions = {} 173 initial_partitions = {}
173 terminal_set = self.__dfa.terminal_set() 174 terminal_set = self.__dfa.terminal_set()
174 all_keys = [] # Will contain all TransitionKeys in the dfa. 175 all_keys = [] # Will contain all TransitionKeys in the dfa.
175 176
176 # f fills in initial_partitions, id_to_state and all_keys. 177 # f fills in initial_partitions, id_to_state and all_keys.
177 def f(state, visitor_state): 178 def f(state, visitor_state):
178 state_id = len(id_to_state) 179 state_id = len(id_to_state)
179 id_to_state[state_id] = state 180 id_to_state[state_id] = state
180 action = state.action() 181 action = state.action()
181 all_keys.append(state.key_iter()) 182 all_keys.append(state.key_iter())
182 if action: 183 if state in terminal_set:
183 if state not in terminal_set: 184 key = ("terminal set", action)
184 # Match actions are valid only if we have matched the whole token, not
185 # just some sub-part of it.
186 assert not action.match_action()
187 key = ("nonterminal action", action)
188 else:
189 key = ("terminal action", action)
190 elif state in terminal_set:
191 key = ("terminal set",)
192 else: 185 else:
193 key = ("nonterminal set",) 186 # Match actions are valid only if we have matched the whole token, not
187 # just some sub-part of it.
188 assert not action.match_action()
189 key = ("nonterminal set", action)
194 if not key in initial_partitions: 190 if not key in initial_partitions:
195 initial_partitions[key] = set() 191 initial_partitions[key] = set()
196 initial_partitions[key].add(state_id) 192 initial_partitions[key].add(state_id)
197 self.__dfa.visit_all_states(f) 193 self.__dfa.visit_all_states(f)
198 partitions = set() 194 partitions = set()
199 working_set = set() 195 working_set = set()
200 196
201 # Create StatePartition objects: 197 # Create StatePartition objects:
202 for k, p in initial_partitions.items(): 198 for k, p in initial_partitions.items():
203 p = StatePartition(p) 199 p = StatePartition(p)
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
379 elif len(intersection) <= len(difference): 375 elif len(intersection) <= len(difference):
380 working_set.add(intersection) 376 working_set.add(intersection)
381 else: 377 else:
382 working_set.add(difference) 378 working_set.add(difference)
383 if old_partitions: 379 if old_partitions:
384 partitions -= old_partitions 380 partitions -= old_partitions
385 if new_partitions: 381 if new_partitions:
386 partitions |= new_partitions 382 partitions |= new_partitions
387 (start_name, mapping) = self.__create_states_from_partitions(partitions) 383 (start_name, mapping) = self.__create_states_from_partitions(partitions)
388 return Dfa(self.__dfa.encoding(), start_name, mapping) 384 return Dfa(self.__dfa.encoding(), start_name, mapping)
OLDNEW
« no previous file with comments | « tools/lexer_generator/action.py ('k') | tools/lexer_generator/nfa.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698