projects
/
fur
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Add newlines to the parsing of statements
[fur]
/
tokenization.py
diff --git
a/tokenization.py
b/tokenization.py
index
ff79307
..
3131c35
100644
(file)
--- a/
tokenization.py
+++ b/
tokenization.py
@@
-43,6
+43,7
@@
_TOKEN_MATCHERS = [
('assignment_operator', r'='),
('addition_level_operator', r'(\+|-)'),
('multiplication_level_operator', r'(\*|//|%)'),
('assignment_operator', r'='),
('addition_level_operator', r'(\+|-)'),
('multiplication_level_operator', r'(\*|//|%)'),
+ ('newline', r'\n'),
]
_TOKEN_MATCHERS = list(map(_make_token_matcher, _TOKEN_MATCHERS))
]
_TOKEN_MATCHERS = list(map(_make_token_matcher, _TOKEN_MATCHERS))
@@
-67,11
+68,13
@@
def tokenize(source):
break
if not success:
break
if not success:
- raise Exception('Unexpected character "{}"'.format(source[index]))
+ raise Exception('Unexpected character "{}" on line {}'.format(
+ source[index],
+ line,
+ ))
-
while index < len(source) and source[index] in set(['\n'])
:
+
if token.type == 'newline'
:
line += 1
line += 1
- index += 1
if __name__ == '__main__':
import unittest
if __name__ == '__main__':
import unittest
@@
-264,12
+267,12
@@
if __name__ == '__main__':
),),
)
),),
)
- def test_
handles_trailing
_newline(self):
+ def test_
tokenizes
_newline(self):
self.assertEqual(
self.assertEqual(
- tokenize('
print
\n'),
+ tokenize('\n'),
(Token(
(Token(
- type='
symbol
',
- match='
print
',
+ type='
newline
',
+ match='
\n
',
index=0,
line=1,
),),
index=0,
line=1,
),),
@@
-296,6
+299,12
@@
if __name__ == '__main__':
index=0,
line=1,
),
index=0,
line=1,
),
+ Token(
+ type='newline',
+ match='\n',
+ index=5,
+ line=1,
+ ),
Token(
type='open_parenthese',
match='(',
Token(
type='open_parenthese',
match='(',