Some minor refactoring and added a (currently trivial) normalization step
[fur] / main.py
1 import sys
2
3 import generation
4 import normalization
5 import parsing
6 import tokenization
7 import transformation
8
9 source_path = sys.argv[1]
10
11 with open(source_path, 'r') as f:
12     source = f.read()
13
14 tokens = tokenization.tokenize(source)
15 parsed = parsing.parse(tokens)
16 normalized = normalization.normalize(parsed)
17 transformed = transformation.transform(normalized)
18 generated = generation.generate(transformed)
19
20 assert source_path.endswith('.fur')
21 destination_path = source_path + '.c'
22
23 with open(destination_path, 'w') as f:
24     f.write(generated)