Compare commits

..

No commits in common. "4f7478d7b2f77a2e1f48514e779daf410cad5046" and "b0e123f83ae54d5c58511333de11646817962a7c" have entirely different histories.

3 changed files with 48 additions and 67 deletions

View file

@ -10,7 +10,7 @@ pub const NodeType = enum {
}; };
pub const FnDecl = struct { pub const FnDecl = struct {
func_name: Token, func_name: []const u8,
}; };
pub const Node = union(NodeType) { pub const Node = union(NodeType) {

View file

@ -17,6 +17,7 @@ pub const Parser = struct {
scanner: *Scanner, scanner: *Scanner,
tokens: []Token = undefined, tokens: []Token = undefined,
current: usize = 0,
pub fn init(allocator: *Allocator, scanner: *Scanner) Parser { pub fn init(allocator: *Allocator, scanner: *Scanner) Parser {
return Parser{ .allocator = allocator, .scanner = scanner }; return Parser{ .allocator = allocator, .scanner = scanner };
@ -31,11 +32,11 @@ pub const Parser = struct {
} }
fn peek(self: *Parser) Token { fn peek(self: *Parser) Token {
return self.tokens[self.tokens.len - 1]; return self.tokens[self.current];
} }
fn previous(self: *Parser) Token { fn previous(self: *Parser) Token {
return self.tokens[self.tokens.len - 2]; return self.tokens[self.current - 1];
} }
fn tokenError(self: *Parser, token: Token, msg: []const u8) Result!void { fn tokenError(self: *Parser, token: Token, msg: []const u8) Result!void {
@ -52,98 +53,85 @@ pub const Parser = struct {
return self.peek().ttype == .EOF; return self.peek().ttype == .EOF;
} }
fn advance(self: *Parser) Token {
if (!self.isAtEnd()) self.current += 1;
return self.previous();
}
fn check(self: *Parser, ttype: TokenType) bool { fn check(self: *Parser, ttype: TokenType) bool {
if (self.isAtEnd()) return false; if (self.isAtEnd()) return false;
return self.peek().ttype == ttype; return self.peek().ttype == ttype;
} }
fn nextToken(self: *Parser) !Token { fn match(self: *Parser, ttypes: []TokenType) bool {
var token: Token = undefined; for (ttypes) |ttype| {
if (self.check(ttype)) {
while (true) { _ = self.advance();
var next_token_opt = try self.scanner.nextToken(); return true;
if (next_token_opt) |token_nice| {
token = token_nice;
break;
} }
} }
self.tokens = try self.allocator.realloc( return false;
self.tokens,
self.tokens.len + 1,
);
self.tokens[self.tokens.len - 1] = token;
return token;
} }
fn consume(self: *Parser, ttype: TokenType, comptime msg: []const u8) !Token { fn matchSingle(self: *Parser, ttype: TokenType) bool {
if (self.check(ttype)) return try self.nextToken(); if (self.check(ttype)) {
_ = self.advance();
return true;
}
return false;
}
fn consume(self: *Parser, ttype: TokenType, comptime msg: []const u8) Result!Token {
if (self.check(ttype)) return self.advance();
try self.tokenError(self.peek(), msg); try self.tokenError(self.peek(), msg);
return Result.CompileError; return Result.CompileError;
} }
fn consumeSingle(self: *Parser, ttype: TokenType) !Token { fn mkFnDecl(self: *Parser, name: []const u8) !*ast.Node {
std.debug.warn("consume {}, has {}\n", ttype, self.peek().ttype); var node = try self.allocator.create(Node.FnDecl);
node.* = Node.FnDecl{ .name = name };
if (self.check(ttype)) {
var cur = self.peek();
_ = try self.nextToken();
std.debug.warn("now has {}\n", self.peek());
return cur;
}
var buf_main: [1000]u8 = undefined;
var buf = try std.fmt.bufPrint(
buf_main[0..],
"expected {}, got {}",
ttype,
self.peek().ttype,
);
try self.tokenError(self.peek(), buf);
return Result.CompileError;
}
fn mkFnDecl(self: *Parser, name: Token) !*ast.Node {
var node = try self.allocator.create(Node);
node.* = Node{ .FnDecl = ast.FnDecl{ .func_name = name } };
return node; return node;
} }
fn functionDecl(self: *Parser) !*ast.Node { fn functionDecl(self: *Parser) !*ast.Node {
_ = try self.consumeSingle(.Fn); // get the name
var name = try self.consumeSingle(.Identifier); var name = try self.consume(.Identifier, "expected function name");
_ = try self.consumeSingle(.LeftParen);
return try self.mkFnDecl(name); return try self.mkFnDecl(name);
} }
fn processToken(self: *Parser, token: Token) !*ast.Node { fn processToken(self: *Parser, token: Token) Result!ast.Node {
var node = switch (token.ttype) { switch (token.ttype) {
.Fn => try self.functionDecl(), //.Fn => try self.functionDecl(),
else => blk: { else => blk: {
try self.doError("TODO handle {}\n", token.ttype); try self.doError("TODO handle {}\n", token.ttype);
return Result.CompileError; return Result.CompileError;
}, },
}; }
return node;
} }
pub fn parse(self: *Parser) !*ast.Node { pub fn parse(self: *Parser) !*ast.Node {
self.tokens = try self.allocator.alloc(Token, 0); self.tokens = try self.allocator.alloc(Token, 0);
var i: usize = 0;
var root = try ast.mkRoot(self.allocator); var root = try ast.mkRoot(self.allocator);
while (true) { while (true) {
var token = try self.nextToken(); var tok_opt = try self.scanner.nextToken();
if (tok_opt) |token| {
self.tokens = try self.allocator.realloc(self.tokens, i + 1);
self.tokens[i] = token;
i += 1;
if (token.ttype == .EOF) break; if (token.ttype == .EOF) break;
var node = try self.processToken(token); var node = try self.processToken(token);
std.debug.warn("{}\n", node.*); try root.Root.append(&node);
try root.Root.append(node); } else {
continue;
}
} }
return root; return root;

View file

@ -110,10 +110,6 @@ pub const Scanner = struct {
return self.source[self.current - 1]; return self.source[self.current - 1];
} }
fn rollback(self: *Scanner) void {
self.current -= 1;
}
pub fn currentLexeme(self: *Scanner) []const u8 { pub fn currentLexeme(self: *Scanner) []const u8 {
return self.source[self.start..self.current]; return self.source[self.start..self.current];
} }
@ -249,9 +245,6 @@ pub const Scanner = struct {
_ = self.advance(); _ = self.advance();
} }
// ugly hack.
self.rollback();
// after reading the identifier, we check // after reading the identifier, we check
// if it is any of our keywords, if it is, then we add // if it is any of our keywords, if it is, then we add
// the specificed keyword type. if not, just .IDENTIFIER // the specificed keyword type. if not, just .IDENTIFIER