Make "do" a keyword
[fur] / tokenization.py
index 3c4dc6f..914d05a 100644 (file)
@@ -32,7 +32,7 @@ def _make_token_matcher(definition):
     return token_matcher
 
 _TOKEN_MATCHERS = [
-    ('keyword',                         r'(def|end)(?![a-z_])'),
+    ('keyword',                         r'(def|do|end)(?![a-z_])'),
     ('open_parenthese',                 r'\('),
     ('close_parenthese',                r'\)'),
     ('comma',                           r','),