const std = @import("std"); const scanners = @import("scanners.zig"); pub const Result = enum { Ok, TokenizeError, ParseError, CompileError, }; pub fn run(allocator: *std.mem.Allocator, slice: []const u8) Result { var scan = scanners.Scanner.init(allocator, slice); //defer scan.deinit(); // do a full scan pass, then reset, then do it again (with parser) while (true) { var tok_opt = scan.nextToken() catch |err| { std.debug.warn( "error at '{}': {}\n", scan.currentLexeme(), err, ); return Result.TokenizeError; }; if (tok_opt) |tok| { if (tok.typ == .EOF) break; // TODO remove std.debug.warn("{x}\n", tok); } } // scan.reset(); //var parser = parsers.Parser.init(allocator, scan); //defer parser.deinit(); return Result.Ok; } pub fn main() anyerror!void { const allocator = std.heap.direct_allocator; var args_it = std.process.args(); _ = args_it.skip(); const filepath = try (args_it.next(allocator) orelse @panic("expected file path")); var file = try std.fs.File.openRead(filepath); defer file.close(); const total_bytes = try file.getEndPos(); var slice = try allocator.alloc(u8, total_bytes); defer allocator.free(slice); _ = try file.read(slice); const result = run(allocator, slice); //const result = try run(allocator, slice); switch (result) { .Ok => std.os.exit(0), .TokenizeError, .ParseError, .CompileError, => { std.debug.warn("error: {}\n", result); std.os.exit(1); }, } }