Compare commits

..

No commits in common. "9d1d253c9479cbf40e06f72105f118ceb7f05cc5" and "6b9cc575d9960e46f45ed4dba24499010a74e8e4" have entirely different histories.

3 changed files with 11 additions and 160 deletions

View file

@ -26,29 +26,17 @@ pub const Compiler = struct {
var scanr = scanner.Scanner.init(self.allocator, self.src); var scanr = scanner.Scanner.init(self.allocator, self.src);
var line: usize = 0; var line: usize = 0;
while (true) { while (true) {
var token_opt = scanr.scanToken() catch |err| { var token = scanr.scanToken();
std.debug.warn("Scan Error: {x}\n", err);
std.debug.warn(
"line: {}, cur lexeme: {}\n",
scanr.line,
scanr.currentLexeme(),
);
break;
};
if (token_opt) |token| { if (token.line != line) {
if (token.line != line) { try self.stdout.print("{} ", token.line);
try self.stdout.print("{} ", token.line); line = token.line;
line = token.line;
} else {
try self.stdout.print(" | ");
}
try self.stdout.print("{} '{}'\n", token.ttype, token.lexeme);
if (token.ttype == TokenType.EOF) break;
} else { } else {
break; try self.stdout.print(" | ");
} }
try self.stdout.print("{} '{}'\n", token.ttype, token.lexeme);
if (token.ttype == TokenType.EOF) break;
} }
} }
}; };

View file

@ -1,16 +1,8 @@
const std = @import("std"); const std = @import("std");
const tokens = @import("token.zig"); const tokens = @import("token.zig");
const Token = tokens.Token;
const TokenType = tokens.TokenType;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
pub const TokenError = error{
Unexpected,
Unterminated,
};
pub const Scanner = struct { pub const Scanner = struct {
source: []const u8, source: []const u8,
@ -27,136 +19,7 @@ pub const Scanner = struct {
}; };
} }
fn isAtEnd(self: *Scanner) bool { pub fn scanToken(self: *Scanner) tokens.Token {
return self.current >= self.source.len; return tokens.Token{};
}
fn advance(self: *Scanner) u8 {
self.current += 1;
return self.source[self.current - 1];
}
pub fn currentLexeme(self: *Scanner) []const u8 {
return self.source[self.start..self.current];
}
fn makeToken(self: *Scanner, ttype: TokenType) Token {
return Token{
.ttype = ttype,
.lexeme = self.currentLexeme(),
.line = self.line,
};
}
/// Check if the next character matches what is expected.
fn match(self: *Scanner, expected: u8) bool {
if (self.isAtEnd()) return false;
if (self.source[self.current] != expected) return false;
self.current += 1;
return true;
}
/// Add a SimpleToken of type_match if the next character is
/// `expected`. Adds a SimpleToken of type_nomatch when it is not.
fn makeMatchToken(
self: *Scanner,
expected: u8,
type_match: TokenType,
type_nomatch: TokenType,
) Token {
if (self.match(expected)) {
return self.makeToken(type_match);
} else {
return self.makeToken(type_nomatch);
}
}
fn peek(self: *Scanner) u8 {
if (self.isAtEnd()) return 0;
return self.source[self.current];
}
fn peekNext(self: *Scanner) u8 {
if (self.isAtEnd()) return 0;
return self.source[self.current - 1];
}
fn skipWhitespace(self: *Scanner) void {
while (true) {
var c = self.peek();
switch (c) {
' ', '\r', '\t' => blk: {
_ = self.advance();
},
'\n' => blk: {
self.line += 1;
_ = self.advance();
},
else => return,
}
}
}
fn doString(self: *Scanner) !Token {
// consume entire string
while (self.peek() != '"' and !self.isAtEnd()) {
if (self.peek() == '\n') self.line += 1;
_ = self.advance();
}
// unterminated string.
if (self.isAtEnd()) {
return TokenError.Unterminated;
}
// the closing ".
_ = self.advance();
// trim the surrounding quotes.
return self.makeToken(.STRING);
}
pub fn scanToken(self: *Scanner) !?Token {
self.skipWhitespace();
self.start = self.current;
if (self.isAtEnd()) return self.makeToken(TokenType.EOF);
var c = self.advance();
var token = switch (c) {
'(' => self.makeToken(.LEFT_PAREN),
')' => self.makeToken(.RIGHT_PAREN),
'{' => self.makeToken(.LEFT_BRACE),
'}' => self.makeToken(.RIGHT_BRACE),
',' => self.makeToken(.COMMA),
'.' => self.makeToken(.DOT),
'-' => self.makeToken(.MINUS),
'+' => self.makeToken(.PLUS),
';' => self.makeToken(.SEMICOLON),
'*' => self.makeToken(.STAR),
'!' => self.makeMatchToken('=', .BANG_EQUAL, .BANG),
'=' => self.makeMatchToken('=', .EQUAL_EQUAL, .EQUAL),
'<' => self.makeMatchToken('=', .LESS_EQUAL, .LESS),
'>' => self.makeMatchToken('=', .GREATER_EQUAL, .GREATER),
'/' => blk: {
if (self.peekNext() == '/') {
while (self.peek() != '\n' and !self.isAtEnd()) {
_ = self.advance();
}
break :blk null;
} else {
break :blk self.makeToken(.SLASH);
}
},
'"' => try self.doString(),
else => return TokenError.Unexpected,
};
return token;
} }
}; };

View file

@ -98,7 +98,7 @@ pub const Scanner = struct {
return self.source[self.current - 1]; return self.source[self.current - 1];
} }
pub fn currentLexeme(self: *Scanner) []u8 { fn currentLexeme(self: *Scanner) []u8 {
return self.source[self.start..self.current]; return self.source[self.start..self.current];
} }