Set the environment to the same variable name
[fur] / tokenization.py
index a736912..bc8cf85 100644 (file)
@@ -31,17 +31,22 @@ def _make_token_matcher(definition):
 
     return token_matcher
 
-
 _TOKEN_MATCHERS = [
+    ('keyword',                         r'(def|do|else|end|if)(?![a-z_])'),
+    ('open_bracket',                    r'\['),
+    ('close_bracket',                   r'\]'),
     ('open_parenthese',                 r'\('),
     ('close_parenthese',                r'\)'),
     ('comma',                           r','),
+    ('colon',                           r':'),
+    ('period',                          r'\.'),
     ('integer_literal',                 r'\d+'),
-    ('symbol',                          r'[a-z]+'),
+    ('symbol',                          r'[a-z_]+'),
     ('single_quoted_string_literal',    r"'.*?'"),
-    ('comparison_level_operator',         r'(<=|>=|==|!=|<|>)'),
+    ('double_quoted_string_literal',    r'".*?"'),
+    ('comparison_level_operator',       r'(<=|>=|==|!=|<|>)'),
     ('assignment_operator',             r'='),
-    ('addition_level_operator',         r'(\+|-)'),
+    ('addition_level_operator',         r'(\+\+|\+|-)'),
     ('multiplication_level_operator',   r'(\*|//|%)'),
     ('newline',                         r'\n'),
 ]
@@ -58,6 +63,12 @@ def tokenize(source):
             index += 1
             continue
 
+        if source[index] == '#':
+            while index < len(source) and source[index] != '\n':
+                index += 1
+
+            continue
+
         success = False
 
         for matcher in _TOKEN_MATCHERS: