139 lines
3.8 KiB
Zig
139 lines
3.8 KiB
Zig
const std = @import("std");
|
|
const scanners = @import("scanner.zig");
|
|
const main = @import("main.zig");
|
|
const ast = @import("ast.zig");
|
|
const tokens = @import("tokens.zig");
|
|
const err = @import("errors.zig");
|
|
|
|
const Allocator = std.mem.Allocator;
|
|
const Scanner = scanners.Scanner;
|
|
const Token = tokens.Token;
|
|
const TokenType = tokens.TokenType;
|
|
const Result = main.Result;
|
|
const Node = ast.Node;
|
|
|
|
pub const Parser = struct {
|
|
allocator: *Allocator,
|
|
scanner: *Scanner,
|
|
|
|
tokens: []Token = undefined,
|
|
current: usize = 0,
|
|
|
|
pub fn init(allocator: *Allocator, scanner: *Scanner) Parser {
|
|
return Parser{ .allocator = allocator, .scanner = scanner };
|
|
}
|
|
|
|
fn doError(self: *Parser, comptime fmt: []const u8, args: ...) Result!void {
|
|
std.debug.warn("parser error at line {}\n\t", self.scanner.line);
|
|
std.debug.warn(fmt, args);
|
|
std.debug.warn("\n");
|
|
|
|
return Result.CompileError;
|
|
}
|
|
|
|
fn peek(self: *Parser) Token {
|
|
return self.tokens[self.current];
|
|
}
|
|
|
|
fn previous(self: *Parser) Token {
|
|
return self.tokens[self.current - 1];
|
|
}
|
|
|
|
fn tokenError(self: *Parser, token: Token, msg: []const u8) Result!void {
|
|
if (token.ttype == .EOF) {
|
|
err.report(token.line, " at end", msg);
|
|
} else {
|
|
err.reportFmt(token.line, " at '{}': {}", token.lexeme, msg);
|
|
}
|
|
|
|
return Result.CompileError;
|
|
}
|
|
|
|
fn isAtEnd(self: *Parser) bool {
|
|
return self.peek().ttype == .EOF;
|
|
}
|
|
|
|
fn advance(self: *Parser) Token {
|
|
if (!self.isAtEnd()) self.current += 1;
|
|
return self.previous();
|
|
}
|
|
|
|
fn check(self: *Parser, ttype: TokenType) bool {
|
|
if (self.isAtEnd()) return false;
|
|
return self.peek().ttype == ttype;
|
|
}
|
|
|
|
fn match(self: *Parser, ttypes: []TokenType) bool {
|
|
for (ttypes) |ttype| {
|
|
if (self.check(ttype)) {
|
|
_ = self.advance();
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
fn matchSingle(self: *Parser, ttype: TokenType) bool {
|
|
if (self.check(ttype)) {
|
|
_ = self.advance();
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
fn consume(self: *Parser, ttype: TokenType, comptime msg: []const u8) Result!Token {
|
|
if (self.check(ttype)) return self.advance();
|
|
|
|
try self.tokenError(self.peek(), msg);
|
|
return Result.CompileError;
|
|
}
|
|
|
|
fn mkFnDecl(self: *Parser, name: []const u8) !*ast.Node {
|
|
var node = try self.allocator.create(Node.FnDecl);
|
|
node.* = Node.FnDecl{ .name = name };
|
|
return node;
|
|
}
|
|
|
|
fn functionDecl(self: *Parser) !*ast.Node {
|
|
// get the name
|
|
var name = try self.consume(.Identifier, "expected function name");
|
|
return try self.mkFnDecl(name);
|
|
}
|
|
|
|
fn processToken(self: *Parser, token: Token) Result!ast.Node {
|
|
switch (token.ttype) {
|
|
//.Fn => try self.functionDecl(),
|
|
else => blk: {
|
|
try self.doError("TODO handle {}\n", token.ttype);
|
|
return Result.CompileError;
|
|
},
|
|
}
|
|
}
|
|
|
|
pub fn parse(self: *Parser) !*ast.Node {
|
|
self.tokens = try self.allocator.alloc(Token, 0);
|
|
var i: usize = 0;
|
|
var root = try ast.mkRoot(self.allocator);
|
|
|
|
while (true) {
|
|
var tok_opt = try self.scanner.nextToken();
|
|
|
|
if (tok_opt) |token| {
|
|
self.tokens = try self.allocator.realloc(self.tokens, i + 1);
|
|
self.tokens[i] = token;
|
|
i += 1;
|
|
|
|
if (token.ttype == .EOF) break;
|
|
|
|
var node = try self.processToken(token);
|
|
try root.Root.append(&node);
|
|
} else {
|
|
continue;
|
|
}
|
|
}
|
|
|
|
return root;
|
|
}
|
|
};
|