--- /dev/null
+enum TokenType
+ CLOSE_BRACE,
+ CLOSE_BRACKET,
+ CLOSE_PARENTHESE,
+ COLON,
+ COMMA,
+ DEF,
+ DO,
+ END,
+ ENUM,
+ EQUALS,
+ IDENTIFIER,
+ IF,
+ OPEN_BRACE,
+ OPEN_BRACKET,
+ OPEN_PARENTHESE,
+ PERIOD,
+ STRING_LITERAL,
+ WITH,
+end
+
+struct Token
+ TokenType tokenType
+ string lexeme
+end
+
+def scan(source)
+ SINGLE_CHARACTER_TOKENS = {
+ '}': TokenType.CLOSE_BRACE,
+ ']': TokenType.CLOSE_BRACKET,
+ ')': TokenType.CLOSE_PARENTHESE,
+ ':': TokenType.COLON,
+ ',': TokenType.COMMA,
+ '{': TokenType.OPEN_BRACE,
+ '[': TokenType.OPEN_BRACKET,
+ '(': TokenType.OPEN_PARENTHESE,
+ '.': TokenType.PERIOD,
+ }
+
+ def match(source)
+ if source[0] in SINGLE_CHARACTER_TOKENS
+ Token {
+ tokenType: SINGLE_CHARACTER_TOKENS[source[0]],
+ lexeme: source[0]
+ }
+ else
+ end
+ end
+end
+
+def parse(tokens)
+end
+
+def generate(ast)
+end
+
+def compile(source)
+ tokens = scan(source)
+ ast = parse(tokens)
+ generate(ast)
+end
+
+source_file_path = __arguments__[0]
+destination_file_path = __argument__[1]
+
+with open(source_file_path, 'r') as source_file
+ source = source_file.read()
+end
+
+c = compile(source)
+
+with open(destination_file_path, 'w') as destination_file do
+ destination_file.write(c)
+end
+
+