Normalize symbol expressions
[fur] / tokenization.py
index e9f536b..3c4dc6f 100644 (file)
@@ -31,17 +31,19 @@ def _make_token_matcher(definition):
 
     return token_matcher
 
-
 _TOKEN_MATCHERS = [
+    ('keyword',                         r'(def|end)(?![a-z_])'),
     ('open_parenthese',                 r'\('),
     ('close_parenthese',                r'\)'),
     ('comma',                           r','),
-    ('assignment_operator',             r'='),
     ('integer_literal',                 r'\d+'),
-    ('symbol',                          r'[a-z]+'),
+    ('symbol',                          r'[a-z_]+'),
     ('single_quoted_string_literal',    r"'.*?'"),
+    ('comparison_level_operator',         r'(<=|>=|==|!=|<|>)'),
+    ('assignment_operator',             r'='),
     ('addition_level_operator',         r'(\+|-)'),
-    ('multiplication_level_operator',  r'(\*|//|%)'),
+    ('multiplication_level_operator',   r'(\*|//|%)'),
+    ('newline',                         r'\n'),
 ]
 
 _TOKEN_MATCHERS = list(map(_make_token_matcher, _TOKEN_MATCHERS))
@@ -56,6 +58,12 @@ def tokenize(source):
             index += 1
             continue
 
+        if source[index] == '#':
+            while index < len(source) and source[index] != '\n':
+                index += 1
+
+            continue
+
         success = False
 
         for matcher in _TOKEN_MATCHERS:
@@ -66,11 +74,13 @@ def tokenize(source):
                 break
 
         if not success:
-            raise Exception('Unexpected character "{}"'.format(source[index]))
+            raise Exception('Unexpected character "{}" on line {}'.format(
+                source[index],
+                line,
+            ))
 
-        while index < len(source) and source[index] in set(['\n']):
+        if token.type == 'newline':
             line += 1
-            index += 1
 
 if __name__ == '__main__':
     import unittest
@@ -197,12 +207,78 @@ if __name__ == '__main__':
                 ),),
             )
 
-        def test_handles_trailing_newline(self):
+        def test_tokenizes_equality_operator(self):
             self.assertEqual(
-                tokenize('print\n'),
+                tokenize('=='),
                 (Token(
-                    type='symbol',
-                    match='print',
+                    type='comparison_level_operator',
+                    match='==',
+                    index=0,
+                    line=1,
+                ),),
+            )
+
+        def test_tokenizes_greater_than_or_equal_operator(self):
+            self.assertEqual(
+                tokenize('>='),
+                (Token(
+                    type='comparison_level_operator',
+                    match='>=',
+                    index=0,
+                    line=1,
+                ),),
+            )
+
+        def test_tokenizes_less_than_or_equal_operator(self):
+            self.assertEqual(
+                tokenize('<='),
+                (Token(
+                    type='comparison_level_operator',
+                    match='<=',
+                    index=0,
+                    line=1,
+                ),),
+            )
+
+        def test_tokenizes_greater_than_equal_operator(self):
+            self.assertEqual(
+                tokenize('>'),
+                (Token(
+                    type='comparison_level_operator',
+                    match='>',
+                    index=0,
+                    line=1,
+                ),),
+            )
+
+        def test_tokenizes_less_than_equal_operator(self):
+            self.assertEqual(
+                tokenize('<'),
+                (Token(
+                    type='comparison_level_operator',
+                    match='<',
+                    index=0,
+                    line=1,
+                ),),
+            )
+
+        def test_tokenizes_not_equal_operator(self):
+            self.assertEqual(
+                tokenize('!='),
+                (Token(
+                    type='comparison_level_operator',
+                    match='!=',
+                    index=0,
+                    line=1,
+                ),),
+            )
+
+        def test_tokenizes_newline(self):
+            self.assertEqual(
+                tokenize('\n'),
+                (Token(
+                    type='newline',
+                    match='\n',
                     index=0,
                     line=1,
                 ),),
@@ -229,6 +305,12 @@ if __name__ == '__main__':
                         index=0,
                         line=1,
                     ),
+                    Token(
+                        type='newline',
+                        match='\n',
+                        index=5,
+                        line=1,
+                    ),
                     Token(
                         type='open_parenthese',
                         match='(',