X-Git-Url: https://code.kerkeslager.com/?p=fur;a=blobdiff_plain;f=tokenization.py;fp=tokenization.py;h=ff79307194d84c427a8569b9e4aa6d5035b438f4;hp=e6cad0a0bc7c3ac035294f6d0720c5e46a0a56e4;hb=eab88b322191c40168553b8671b968d1b1558084;hpb=59cb91d6dbee0b40416ff265565b426958770d45 diff --git a/tokenization.py b/tokenization.py index e6cad0a..ff79307 100644 --- a/tokenization.py +++ b/tokenization.py @@ -40,9 +40,9 @@ _TOKEN_MATCHERS = [ ('symbol', r'[a-z]+'), ('single_quoted_string_literal', r"'.*?'"), ('equality_level_operator', r'(<=|>=|==|!=|<|>)'), + ('assignment_operator', r'='), ('addition_level_operator', r'(\+|-)'), ('multiplication_level_operator', r'(\*|//|%)'), - ('assignment_operator', r'='), ] _TOKEN_MATCHERS = list(map(_make_token_matcher, _TOKEN_MATCHERS))