Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(34)

Side by Side Diff: tools/lexer_generator/generator.py

Issue 170253007: Experimental parser: always apply default transitions (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/lexer_generator/dot_utilities.py ('k') | tools/lexer_generator/nfa.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 the V8 project authors. All rights reserved. 1 # Copyright 2013 the V8 project authors. All rights reserved.
2 # Redistribution and use in source and binary forms, with or without 2 # Redistribution and use in source and binary forms, with or without
3 # modification, are permitted provided that the following conditions are 3 # modification, are permitted provided that the following conditions are
4 # met: 4 # met:
5 # 5 #
6 # * Redistributions of source code must retain the above copyright 6 # * Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer. 7 # notice, this list of conditions and the following disclaimer.
8 # * Redistributions in binary form must reproduce the above 8 # * Redistributions in binary form must reproduce the above
9 # copyright notice, this list of conditions and the following 9 # copyright notice, this list of conditions and the following
10 # disclaimer in the documentation and/or other materials provided 10 # disclaimer in the documentation and/or other materials provided
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
59 %s 59 %s
60 </script> 60 </script>
61 ''' 61 '''
62 62
63 load_template = ''' draw('%s', '%s');''' 63 load_template = ''' draw('%s', '%s');'''
64 64
65 load_outer_template = ''' <script> 65 load_outer_template = ''' <script>
66 %s 66 %s
67 </script>''' 67 </script>'''
68 68
69 def generate_html(rule_processor, minimize_default): 69 def generate_html(rule_processor, minimize_default, merge):
70 scripts = [] 70 scripts = []
71 loads = [] 71 loads = []
72 for i, (name, automata) in enumerate(list(rule_processor.automata_iter())): 72 for i, (name, automata) in enumerate(list(rule_processor.automata_iter())):
73 (nfa, dfa) = (automata.nfa(), automata.dfa()) 73 (nfa, dfa) = (automata.nfa(), automata.dfa())
74 mdfa = None 74 mdfa = None
75 if name != 'default' or minimize_default: 75 if name != 'default' or minimize_default:
76 mdfa = automata.minimal_dfa() 76 mdfa = automata.minimal_dfa()
77 (nfa_i, dfa_i, mdfa_i) = ("nfa_%d" % i, "dfa_%d" % i, "mdfa_%d" % i) 77 (nfa_i, dfa_i, mdfa_i) = ("nfa_%d" % i, "dfa_%d" % i, "mdfa_%d" % i)
78 scripts.append(script_template % (nfa_i, automaton_to_dot(nfa))) 78 scripts.append(script_template % (nfa_i, automaton_to_dot(nfa)))
79 loads.append(load_template % ("nfa [%s]" % name, nfa_i)) 79 loads.append(load_template % ("nfa [%s]" % name, nfa_i))
80 scripts.append(script_template % (dfa_i, automaton_to_dot(dfa))) 80 scripts.append(script_template % (dfa_i, automaton_to_dot(dfa, merge)))
81 loads.append(load_template % ("dfa [%s]" % name, dfa_i)) 81 loads.append(load_template % ("dfa [%s]" % name, dfa_i))
82 if mdfa and mdfa.node_count() != dfa.node_count(): 82 if mdfa and mdfa.node_count() != dfa.node_count():
83 scripts.append(script_template % (mdfa_i, automaton_to_dot(mdfa))) 83 scripts.append(script_template % (mdfa_i, automaton_to_dot(mdfa, merge)))
84 loads.append(load_template % ("mdfa [%s]" % name, mdfa_i)) 84 loads.append(load_template % ("mdfa [%s]" % name, mdfa_i))
85 body = "\n".join(scripts) + (load_outer_template % "\n".join(loads)) 85 body = "\n".join(scripts) + (load_outer_template % "\n".join(loads))
86 return file_template % body 86 return file_template % body
87 87
88 def generate_rule_tree_html(rule_processor): 88 def generate_rule_tree_html(rule_processor):
89 scripts = [] 89 scripts = []
90 loads = [] 90 loads = []
91 mapper = lambda x : map_characters(rule_processor.encoding(), x) 91 mapper = lambda x : map_characters(rule_processor.encoding(), x)
92 for i, (name, alias) in enumerate(list(rule_processor.alias_iter())): 92 for i, (name, alias) in enumerate(list(rule_processor.alias_iter())):
93 alias_i = "alias_%d" % i 93 alias_i = "alias_%d" % i
(...skipping 27 matching lines...) Expand all
121 s = StringIO.StringIO() 121 s = StringIO.StringIO()
122 sortby = 'cumulative' 122 sortby = 'cumulative'
123 ps = pstats.Stats(pr, stream=s).sort_stats(sortby) 123 ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
124 ps.print_stats() 124 ps.print_stats()
125 print s.getvalue() 125 print s.getvalue()
126 126
127 if __name__ == '__main__': 127 if __name__ == '__main__':
128 128
129 parser = argparse.ArgumentParser() 129 parser = argparse.ArgumentParser()
130 parser.add_argument('--html') 130 parser.add_argument('--html')
131 parser.add_argument('--no-merge-html', action='store_true')
131 parser.add_argument('--re', default='src/lexer/lexer_py.re') 132 parser.add_argument('--re', default='src/lexer/lexer_py.re')
132 parser.add_argument('--input') 133 parser.add_argument('--input')
133 parser.add_argument('--code') 134 parser.add_argument('--code')
134 parser.add_argument('--encoding', default='latin1') 135 parser.add_argument('--encoding', default='latin1')
135 parser.add_argument('--no-optimize-default', action='store_true') 136 parser.add_argument('--no-optimize-default', action='store_true')
136 parser.add_argument('--no-minimize-default', action='store_true') 137 parser.add_argument('--no-minimize-default', action='store_true')
137 parser.add_argument('--no-verify-default', action='store_true') 138 parser.add_argument('--no-verify-default', action='store_true')
138 parser.add_argument('--no-inline', action='store_true') 139 parser.add_argument('--no-inline', action='store_true')
139 parser.add_argument('--verbose', action='store_true') 140 parser.add_argument('--verbose', action='store_true')
140 parser.add_argument('--debug-code', action='store_true') 141 parser.add_argument('--debug-code', action='store_true')
(...skipping 21 matching lines...) Expand all
162 DfaMinimizer.set_verify(False) 163 DfaMinimizer.set_verify(False)
163 dfa = rule_processor.default_automata().dfa() 164 dfa = rule_processor.default_automata().dfa()
164 mdfa = rule_processor.default_automata().minimal_dfa() 165 mdfa = rule_processor.default_automata().minimal_dfa()
165 if verbose: 166 if verbose:
166 print "nodes reduced from %s to %s" % ( 167 print "nodes reduced from %s to %s" % (
167 dfa.node_count(), mdfa.node_count()) 168 dfa.node_count(), mdfa.node_count())
168 DfaMinimizer.set_verify(True) 169 DfaMinimizer.set_verify(True)
169 170
170 html_file = args.html 171 html_file = args.html
171 if html_file: 172 if html_file:
172 html = generate_html(rule_processor, minimize_default) 173 html = generate_html(
174 rule_processor, minimize_default, not args.no_merge_html)
173 with open(args.html, 'w') as f: 175 with open(args.html, 'w') as f:
174 f.write(html) 176 f.write(html)
175 if verbose: 177 if verbose:
176 print "wrote html to %s" % html_file 178 print "wrote html to %s" % html_file
177 179
178 rule_html_file = args.rule_html 180 rule_html_file = args.rule_html
179 if rule_html_file: 181 if rule_html_file:
180 html = generate_rule_tree_html(rule_processor) 182 html = generate_rule_tree_html(rule_processor)
181 with open(rule_html_file, 'w') as f: 183 with open(rule_html_file, 'w') as f:
182 f.write(html) 184 f.write(html)
(...skipping 13 matching lines...) Expand all
196 if verbose: 198 if verbose:
197 print "wrote code to %s" % code_file 199 print "wrote code to %s" % code_file
198 200
199 input_file = args.input 201 input_file = args.input
200 if input_file: 202 if input_file:
201 with open(input_file, 'r') as f: 203 with open(input_file, 'r') as f:
202 lex(rule_processor, f.read()) 204 lex(rule_processor, f.read())
203 205
204 if args.profile: 206 if args.profile:
205 stop_profiling(profiler) 207 stop_profiling(profiler)
OLDNEW
« no previous file with comments | « tools/lexer_generator/dot_utilities.py ('k') | tools/lexer_generator/nfa.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698