forked from luna/jorts
scanner: add basic error handling, strings, comments
This commit is contained in:
parent
f4f1fe1fbc
commit
27b04e1612
3 changed files with 64 additions and 11 deletions
|
@ -26,17 +26,29 @@ pub const Compiler = struct {
|
||||||
var scanr = scanner.Scanner.init(self.allocator, self.src);
|
var scanr = scanner.Scanner.init(self.allocator, self.src);
|
||||||
var line: usize = 0;
|
var line: usize = 0;
|
||||||
while (true) {
|
while (true) {
|
||||||
var token = try scanr.scanToken();
|
var token_opt = scanr.scanToken() catch |err| {
|
||||||
|
std.debug.warn("Scan Error: {x}\n", err);
|
||||||
|
std.debug.warn(
|
||||||
|
"line: {}, cur lexeme: {}\n",
|
||||||
|
scanr.line,
|
||||||
|
scanr.currentLexeme(),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
|
||||||
if (token.line != line) {
|
if (token_opt) |token| {
|
||||||
try self.stdout.print("{} ", token.line);
|
if (token.line != line) {
|
||||||
line = token.line;
|
try self.stdout.print("{} ", token.line);
|
||||||
|
line = token.line;
|
||||||
|
} else {
|
||||||
|
try self.stdout.print(" | ");
|
||||||
|
}
|
||||||
|
|
||||||
|
try self.stdout.print("{} '{}'\n", token.ttype, token.lexeme);
|
||||||
|
if (token.ttype == TokenType.EOF) break;
|
||||||
} else {
|
} else {
|
||||||
try self.stdout.print(" | ");
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.stdout.print("{} '{}'\n", token.ttype, token.lexeme);
|
|
||||||
if (token.ttype == TokenType.EOF) break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -36,10 +36,14 @@ pub const Scanner = struct {
|
||||||
return self.source[self.current - 1];
|
return self.source[self.current - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn currentLexeme(self: *Scanner) []const u8 {
|
||||||
|
return self.source[self.start..self.current];
|
||||||
|
}
|
||||||
|
|
||||||
fn makeToken(self: *Scanner, ttype: TokenType) Token {
|
fn makeToken(self: *Scanner, ttype: TokenType) Token {
|
||||||
return Token{
|
return Token{
|
||||||
.ttype = ttype,
|
.ttype = ttype,
|
||||||
.lexeme = self.source[self.start..self.current],
|
.lexeme = self.currentLexeme(),
|
||||||
.line = self.line,
|
.line = self.line,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -73,6 +77,11 @@ pub const Scanner = struct {
|
||||||
return self.source[self.current];
|
return self.source[self.current];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn peekNext(self: *Scanner) u8 {
|
||||||
|
if (self.isAtEnd()) return 0;
|
||||||
|
return self.source[self.current + 1];
|
||||||
|
}
|
||||||
|
|
||||||
fn skipWhitespace(self: *Scanner) void {
|
fn skipWhitespace(self: *Scanner) void {
|
||||||
while (true) {
|
while (true) {
|
||||||
var c = self.peek();
|
var c = self.peek();
|
||||||
|
@ -89,7 +98,26 @@ pub const Scanner = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scanToken(self: *Scanner) !tokens.Token {
|
fn doString(self: *Scanner) !Token {
|
||||||
|
// consume entire string
|
||||||
|
while (self.peek() != '"' and !self.isAtEnd()) {
|
||||||
|
if (self.peek() == '\n') self.line += 1;
|
||||||
|
_ = self.advance();
|
||||||
|
}
|
||||||
|
|
||||||
|
// unterminated string.
|
||||||
|
if (self.isAtEnd()) {
|
||||||
|
return TokenError.Unterminated;
|
||||||
|
}
|
||||||
|
|
||||||
|
// the closing ".
|
||||||
|
_ = self.advance();
|
||||||
|
|
||||||
|
// trim the surrounding quotes.
|
||||||
|
return self.makeToken(.STRING);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn scanToken(self: *Scanner) !?Token {
|
||||||
self.skipWhitespace();
|
self.skipWhitespace();
|
||||||
self.start = self.current;
|
self.start = self.current;
|
||||||
if (self.isAtEnd()) return self.makeToken(TokenType.EOF);
|
if (self.isAtEnd()) return self.makeToken(TokenType.EOF);
|
||||||
|
@ -113,6 +141,19 @@ pub const Scanner = struct {
|
||||||
'<' => self.makeMatchToken('=', .LESS_EQUAL, .LESS),
|
'<' => self.makeMatchToken('=', .LESS_EQUAL, .LESS),
|
||||||
'>' => self.makeMatchToken('=', .GREATER_EQUAL, .GREATER),
|
'>' => self.makeMatchToken('=', .GREATER_EQUAL, .GREATER),
|
||||||
|
|
||||||
|
'/' => blk: {
|
||||||
|
if (self.peekNext() == '/') {
|
||||||
|
while (self.peek() != '\n' and !self.isAtEnd()) {
|
||||||
|
_ = self.advance();
|
||||||
|
}
|
||||||
|
break :blk null;
|
||||||
|
} else {
|
||||||
|
break :blk self.makeToken(.SLASH);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'"' => try self.doString(),
|
||||||
|
|
||||||
else => return TokenError.Unexpected,
|
else => return TokenError.Unexpected,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -98,7 +98,7 @@ pub const Scanner = struct {
|
||||||
return self.source[self.current - 1];
|
return self.source[self.current - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
fn currentLexeme(self: *Scanner) []u8 {
|
pub fn currentLexeme(self: *Scanner) []u8 {
|
||||||
return self.source[self.start..self.current];
|
return self.source[self.start..self.current];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue