Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: tools/lexer_generator/dfa_optimizer.py

Issue 180213003: Experimental parser: cleanup logging (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/lexer_generator/code_generator.py ('k') | tools/lexer_generator/generator.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 the V8 project authors. All rights reserved. 1 # Copyright 2013 the V8 project authors. All rights reserved.
2 # Redistribution and use in source and binary forms, with or without 2 # Redistribution and use in source and binary forms, with or without
3 # modification, are permitted provided that the following conditions are 3 # modification, are permitted provided that the following conditions are
4 # met: 4 # met:
5 # 5 #
6 # * Redistributions of source code must retain the above copyright 6 # * Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer. 7 # notice, this list of conditions and the following disclaimer.
8 # * Redistributions in binary form must reproduce the above 8 # * Redistributions in binary form must reproduce the above
9 # copyright notice, this list of conditions and the following 9 # copyright notice, this list of conditions and the following
10 # disclaimer in the documentation and/or other materials provided 10 # disclaimer in the documentation and/or other materials provided
11 # with the distribution. 11 # with the distribution.
12 # * Neither the name of Google Inc. nor the names of its 12 # * Neither the name of Google Inc. nor the names of its
13 # contributors may be used to endorse or promote products derived 13 # contributors may be used to endorse or promote products derived
14 # from this software without specific prior written permission. 14 # from this software without specific prior written permission.
15 # 15 #
16 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 import logging
28 from transition_key import TransitionKey 29 from transition_key import TransitionKey
29 from automaton import Term, Action, Automaton 30 from automaton import Term, Action, Automaton
30 from dfa import Dfa 31 from dfa import Dfa
31 32
32 # --- Optimization: Replacing tokens with gotos --- 33 # --- Optimization: Replacing tokens with gotos ---
33 # 34 #
34 # This optimization reduces the code size for grammars which have constructs 35 # This optimization reduces the code size for grammars which have constructs
35 # similar to keywords and identifiers. Consider the following grammar: 36 # similar to keywords and identifiers. Consider the following grammar:
36 # 37 #
37 # "baz" <|token(BAZ)|> 38 # "baz" <|token(BAZ)|>
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
132 # \--[A-Z]---> [s3] 133 # \--[A-Z]---> [s3]
133 # 134 #
134 # We can add a goto from s1 to s2 (after checking for "a"), because the 135 # We can add a goto from s1 to s2 (after checking for "a"), because the
135 # transitions match: with [b-z], both states transition to s2, and with [A-Z], 136 # transitions match: with [b-z], both states transition to s2, and with [A-Z],
136 # both states transition to s3. Note that [a-z] is a superkey of [b-z], and the 137 # both states transition to s3. Note that [a-z] is a superkey of [b-z], and the
137 # transition from s2 is defined for the superkey, but that is ok. 138 # transition from s2 is defined for the superkey, but that is ok.
138 139
139 class DfaOptimizer(object): 140 class DfaOptimizer(object):
140 141
141 @staticmethod 142 @staticmethod
142 def optimize(dfa, log): 143 def optimize(dfa):
143 return DfaOptimizer(dfa, log).__replace_tokens_with_gotos() 144 return DfaOptimizer(dfa).__replace_tokens_with_gotos()
144 145
145 def __init__(self, dfa, log): 146 def __init__(self, dfa):
146 self.__dfa = dfa 147 self.__dfa = dfa
147 self.__log = log
148 148
149 @staticmethod 149 @staticmethod
150 def __transistions_match(encoding, incoming_key, incoming_state, state): 150 def __transistions_match(encoding, incoming_key, incoming_state, state):
151 keys = set(state.key_iter()) 151 keys = set(state.key_iter())
152 keys.add(incoming_key) 152 keys.add(incoming_key)
153 disjoint_keys = TransitionKey.disjoint_keys(encoding, keys) 153 disjoint_keys = TransitionKey.disjoint_keys(encoding, keys)
154 for key in disjoint_keys: 154 for key in disjoint_keys:
155 if not incoming_key.is_superset_of_key(key): 155 if not incoming_key.is_superset_of_key(key):
156 continue 156 continue
157 transition_state = state.transition_state_for_key(key) 157 transition_state = state.transition_state_for_key(key)
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
347 orphanable.add(name(transition_state)) 347 orphanable.add(name(transition_state))
348 match_state = DfaOptimizer.__apply_jump( 348 match_state = DfaOptimizer.__apply_jump(
349 counters, state, new_state, target_state) 349 counters, state, new_state, target_state)
350 if match_state: 350 if match_state:
351 states[name(state) + '_match'] = match_state 351 states[name(state) + '_match'] = match_state
352 assert not deletions 352 assert not deletions
353 self.__dfa.visit_all_states(replace_state) 353 self.__dfa.visit_all_states(replace_state)
354 start_name = name(self.__dfa.start_state()) 354 start_name = name(self.__dfa.start_state())
355 states = self.__remove_orphaned_states(states, orphanable, start_name) 355 states = self.__remove_orphaned_states(states, orphanable, start_name)
356 # dump stats 356 # dump stats
357 if self.__log: 357 logging.info('goto_start inserted %s' % counters['goto_start'])
358 print 'goto_start inserted %s' % counters['goto_start'] 358 logging.info('store_token inserted %s' % (counters['store_token']))
359 print 'store_token inserted %s' % ( 359 logging.info('store_harmony_token %s' % (counters['store_harmony_token']))
360 counters['store_token']) 360 logging.info('transitions removed %s' % counters['removals'])
361 print 'store_harmony_token %s' % ( 361 logging.info('states split %s' % counters['split_state'])
362 counters['store_harmony_token'])
363 print 'transitions removed %s' % counters['removals']
364 print 'states split %s' % counters['split_state']
365 return Dfa(self.__dfa.encoding(), start_name, states) 362 return Dfa(self.__dfa.encoding(), start_name, states)
OLDNEW
« no previous file with comments | « tools/lexer_generator/code_generator.py ('k') | tools/lexer_generator/generator.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698