vig/src/parser.zig

151 lines
4.1 KiB
Zig

const std = @import("std");
const scanners = @import("scanner.zig");
const main = @import("main.zig");
const ast = @import("ast.zig");
const tokens = @import("tokens.zig");
const err = @import("errors.zig");
const Allocator = std.mem.Allocator;
const Scanner = scanners.Scanner;
const Token = tokens.Token;
const TokenType = tokens.TokenType;
const Result = main.Result;
const Node = ast.Node;
pub const Parser = struct {
allocator: *Allocator,
scanner: *Scanner,
tokens: []Token = undefined,
pub fn init(allocator: *Allocator, scanner: *Scanner) Parser {
return Parser{ .allocator = allocator, .scanner = scanner };
}
fn doError(self: *Parser, comptime fmt: []const u8, args: ...) Result!void {
std.debug.warn("parser error at line {}\n\t", self.scanner.line);
std.debug.warn(fmt, args);
std.debug.warn("\n");
return Result.CompileError;
}
fn peek(self: *Parser) Token {
return self.tokens[self.tokens.len - 1];
}
fn previous(self: *Parser) Token {
return self.tokens[self.tokens.len - 2];
}
fn tokenError(self: *Parser, token: Token, msg: []const u8) Result!void {
if (token.ttype == .EOF) {
err.report(token.line, " at end", msg);
} else {
err.reportFmt(token.line, " at '{}': {}", token.lexeme, msg);
}
return Result.CompileError;
}
fn isAtEnd(self: *Parser) bool {
return self.peek().ttype == .EOF;
}
fn check(self: *Parser, ttype: TokenType) bool {
if (self.isAtEnd()) return false;
return self.peek().ttype == ttype;
}
fn nextToken(self: *Parser) !Token {
var token: Token = undefined;
while (true) {
var next_token_opt = try self.scanner.nextToken();
if (next_token_opt) |token_nice| {
token = token_nice;
break;
}
}
self.tokens = try self.allocator.realloc(
self.tokens,
self.tokens.len + 1,
);
self.tokens[self.tokens.len - 1] = token;
return token;
}
fn consume(self: *Parser, ttype: TokenType, comptime msg: []const u8) !Token {
if (self.check(ttype)) return try self.nextToken();
try self.tokenError(self.peek(), msg);
return Result.CompileError;
}
fn consumeSingle(self: *Parser, ttype: TokenType) !Token {
std.debug.warn("consume {}, has {}\n", ttype, self.peek().ttype);
if (self.check(ttype)) {
var cur = self.peek();
_ = try self.nextToken();
std.debug.warn("now has {}\n", self.peek());
return cur;
}
var buf_main: [1000]u8 = undefined;
var buf = try std.fmt.bufPrint(
buf_main[0..],
"expected {}, got {}",
ttype,
self.peek().ttype,
);
try self.tokenError(self.peek(), buf);
return Result.CompileError;
}
fn mkFnDecl(self: *Parser, name: Token) !*ast.Node {
var node = try self.allocator.create(Node);
node.* = Node{ .FnDecl = ast.FnDecl{ .func_name = name } };
return node;
}
fn functionDecl(self: *Parser) !*ast.Node {
_ = try self.consumeSingle(.Fn);
var name = try self.consumeSingle(.Identifier);
_ = try self.consumeSingle(.LeftParen);
return try self.mkFnDecl(name);
}
fn processToken(self: *Parser, token: Token) !*ast.Node {
var node = switch (token.ttype) {
.Fn => try self.functionDecl(),
else => blk: {
try self.doError("TODO handle {}\n", token.ttype);
return Result.CompileError;
},
};
return node;
}
pub fn parse(self: *Parser) !*ast.Node {
self.tokens = try self.allocator.alloc(Token, 0);
var root = try ast.mkRoot(self.allocator);
while (true) {
var token = try self.nextToken();
if (token.ttype == .EOF) break;
var node = try self.processToken(token);
std.debug.warn("{}\n", node.*);
try root.Root.append(node);
}
return root;
}
};