Commit my random junk
[sandbox] / furfur / main.fur
1 enum TokenType
2   CLOSE_BRACE,
3   CLOSE_BRACKET,
4   CLOSE_PARENTHESE,
5   COLON,
6   COMMA,
7   DEF,
8   DO,
9   END,
10   ENUM,
11   EQUALS,
12   IDENTIFIER,
13   IF,
14   OPEN_BRACE,
15   OPEN_BRACKET,
16   OPEN_PARENTHESE,
17   PERIOD,
18   STRING_LITERAL,
19   WITH,
20 end
21
22 struct Token
23   TokenType tokenType
24   string lexeme
25 end
26
27 def scan(source)
28   SINGLE_CHARACTER_TOKENS = {
29     '}': TokenType.CLOSE_BRACE,
30     ']': TokenType.CLOSE_BRACKET,
31     ')': TokenType.CLOSE_PARENTHESE,
32     ':': TokenType.COLON,
33     ',': TokenType.COMMA,
34     '{': TokenType.OPEN_BRACE,
35     '[': TokenType.OPEN_BRACKET,
36     '(': TokenType.OPEN_PARENTHESE,
37     '.': TokenType.PERIOD,
38   }
39
40   def match(source)
41     if source[0] in SINGLE_CHARACTER_TOKENS
42       Token {
43         tokenType: SINGLE_CHARACTER_TOKENS[source[0]],
44         lexeme: source[0]
45       }
46     else
47     end
48   end
49 end
50
51 def parse(tokens)
52 end
53
54 def generate(ast)
55 end
56
57 def compile(source)
58   tokens = scan(source)
59   ast = parse(tokens)
60   generate(ast)
61 end
62
63 source_file_path = __arguments__[0]
64 destination_file_path = __argument__[1]
65
66 with open(source_file_path, 'r') as source_file
67   source = source_file.read()
68 end
69
70 c = compile(source)
71
72 with open(destination_file_path, 'w') as destination_file do
73   destination_file.write(c)
74 end
75
76