scanner: add keyword handling (copied off the old scanner)
as with most things, lol
This commit is contained in:
parent
d62c58a195
commit
7d7aabbdd7
3 changed files with 73 additions and 5 deletions
|
@ -23,7 +23,7 @@ pub const Compiler = struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compile(self: *Compiler) !void {
|
pub fn compile(self: *Compiler) !void {
|
||||||
var scanr = scanner.Scanner.init(self.allocator, self.src);
|
var scanr = try scanner.Scanner.init(self.allocator, self.src);
|
||||||
var line: usize = 0;
|
var line: usize = 0;
|
||||||
while (true) {
|
while (true) {
|
||||||
var token_opt = scanr.scanToken() catch |err| {
|
var token_opt = scanr.scanToken() catch |err| {
|
||||||
|
|
|
@ -25,8 +25,62 @@ fn isAlphaNumeric(char: u8) bool {
|
||||||
return isAlpha(char) or isDigit(char);
|
return isAlpha(char) or isDigit(char);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub const KeywordMap = std.AutoHashMap([]const u8, u6);
|
||||||
|
|
||||||
|
/// The book does say that C doesn't have hashmaps. but Zig does. and I can
|
||||||
|
/// use it here.
|
||||||
|
fn initKeywordMap(allocator: *std.mem.Allocator) !KeywordMap {
|
||||||
|
var map = KeywordMap.init(allocator);
|
||||||
|
|
||||||
|
const keywords = [][]const u8{
|
||||||
|
"and"[0..],
|
||||||
|
"class"[0..],
|
||||||
|
"else"[0..],
|
||||||
|
"false"[0..],
|
||||||
|
"for"[0..],
|
||||||
|
"fun"[0..],
|
||||||
|
"if"[0..],
|
||||||
|
"nil"[0..],
|
||||||
|
"or"[0..],
|
||||||
|
"print"[0..],
|
||||||
|
"return"[0..],
|
||||||
|
"super"[0..],
|
||||||
|
"this"[0..],
|
||||||
|
"true"[0..],
|
||||||
|
"var"[0..],
|
||||||
|
"while"[0..],
|
||||||
|
};
|
||||||
|
|
||||||
|
const tags = []TokenType{
|
||||||
|
TokenType.AND,
|
||||||
|
TokenType.CLASS,
|
||||||
|
TokenType.ELSE,
|
||||||
|
TokenType.FALSE,
|
||||||
|
TokenType.FOR,
|
||||||
|
TokenType.FUN,
|
||||||
|
TokenType.IF,
|
||||||
|
TokenType.NIL,
|
||||||
|
TokenType.OR,
|
||||||
|
TokenType.PRINT,
|
||||||
|
TokenType.RETURN,
|
||||||
|
TokenType.SUPER,
|
||||||
|
TokenType.THIS,
|
||||||
|
TokenType.TRUE,
|
||||||
|
TokenType.VAR,
|
||||||
|
TokenType.WHILE,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (keywords) |keyword, idx| {
|
||||||
|
var tag = @enumToInt(tags[idx]);
|
||||||
|
_ = try map.put(keyword, tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
pub const Scanner = struct {
|
pub const Scanner = struct {
|
||||||
source: []const u8,
|
source: []const u8,
|
||||||
|
keywords: KeywordMap,
|
||||||
|
|
||||||
start: usize = 0,
|
start: usize = 0,
|
||||||
current: usize = 0,
|
current: usize = 0,
|
||||||
|
@ -34,10 +88,11 @@ pub const Scanner = struct {
|
||||||
|
|
||||||
allocator: *Allocator,
|
allocator: *Allocator,
|
||||||
|
|
||||||
pub fn init(allocator: *Allocator, data: []const u8) Scanner {
|
pub fn init(allocator: *Allocator, data: []const u8) !Scanner {
|
||||||
return Scanner{
|
return Scanner{
|
||||||
.allocator = allocator,
|
|
||||||
.source = data,
|
.source = data,
|
||||||
|
.keywords = try initKeywordMap(allocator),
|
||||||
|
.allocator = allocator,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -150,12 +205,26 @@ pub const Scanner = struct {
|
||||||
return self.makeToken(.NUMBER);
|
return self.makeToken(.NUMBER);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Either a keyword or an identifier come out of this.
|
||||||
fn doIdentifier(self: *Scanner) Token {
|
fn doIdentifier(self: *Scanner) Token {
|
||||||
while (isAlphaNumeric(self.peek())) {
|
while (isAlphaNumeric(self.peek())) {
|
||||||
_ = self.advance();
|
_ = self.advance();
|
||||||
}
|
}
|
||||||
|
|
||||||
return self.makeToken(.IDENTIFIER);
|
// after reading the identifier, we check
|
||||||
|
// if it is any of our keywords, if it is, then we add
|
||||||
|
// the specificed keyword type. if not, just .IDENTIFIER
|
||||||
|
var text = self.source[self.start..self.current];
|
||||||
|
var type_opt = self.keywords.get(text);
|
||||||
|
var toktype: TokenType = undefined;
|
||||||
|
|
||||||
|
if (type_opt) |kv| {
|
||||||
|
toktype = @intToEnum(TokenType, kv.value);
|
||||||
|
} else {
|
||||||
|
toktype = TokenType.IDENTIFIER;
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.makeToken(toktype);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scanToken(self: *Scanner) !?Token {
|
pub fn scanToken(self: *Scanner) !?Token {
|
||||||
|
|
|
@ -20,7 +20,6 @@ fn isAlphaNumeric(char: u8) bool {
|
||||||
return isAlpha(char) or isDigit(char);
|
return isAlpha(char) or isDigit(char);
|
||||||
}
|
}
|
||||||
|
|
||||||
// hashmaps don't work on HashMaps for some reason. anyways.
|
|
||||||
pub const KeywordMap = std.AutoHashMap([]const u8, u6);
|
pub const KeywordMap = std.AutoHashMap([]const u8, u6);
|
||||||
|
|
||||||
fn initKeywordMap(allocator: *std.mem.Allocator) !KeywordMap {
|
fn initKeywordMap(allocator: *std.mem.Allocator) !KeywordMap {
|
||||||
|
|
Loading…
Reference in a new issue