forked from luna/jorts
add keyword map when scanner is initialized
- add basic identifier reading
This commit is contained in:
parent
69aa7b493d
commit
9d4c1249b4
3 changed files with 78 additions and 3 deletions
|
@ -9,7 +9,7 @@ fn run(allocator: *Allocator, data: []u8) !void {
|
||||||
var stdout_file = try std.io.getStdOut();
|
var stdout_file = try std.io.getStdOut();
|
||||||
const stdout = &stdout_file.outStream().stream;
|
const stdout = &stdout_file.outStream().stream;
|
||||||
|
|
||||||
var scanner = Scanner.init(allocator, data);
|
var scanner = try Scanner.init(allocator, data);
|
||||||
var tokens = try scanner.scanTokens();
|
var tokens = try scanner.scanTokens();
|
||||||
var it = tokens.iterator();
|
var it = tokens.iterator();
|
||||||
|
|
||||||
|
|
|
@ -4,23 +4,88 @@ const token = @import("token.zig");
|
||||||
const main = @import("main.zig");
|
const main = @import("main.zig");
|
||||||
|
|
||||||
const TokenList = std.ArrayList(token.Token);
|
const TokenList = std.ArrayList(token.Token);
|
||||||
|
const TokenType = token.TokenType;
|
||||||
|
|
||||||
fn isDigit(char: u8) bool {
|
fn isDigit(char: u8) bool {
|
||||||
return char >= '0' and char <= '9';
|
return char >= '0' and char <= '9';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn isAlpha(c: u8) bool {
|
||||||
|
return (c >= 'a' and c <= 'z') or
|
||||||
|
(c >= 'A' and c <= 'Z') or
|
||||||
|
c == '_';
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isAlphaNumeric(char: u8) bool {
|
||||||
|
return isAlpha(char) or isDigit(char);
|
||||||
|
}
|
||||||
|
|
||||||
|
// hashmaps don't work on HashMaps for some reason. anyways.
|
||||||
|
pub const KeywordMap = std.AutoHashMap([]const u8, u6);
|
||||||
|
|
||||||
|
fn initKeywordMap(allocator: *std.mem.Allocator) !KeywordMap {
|
||||||
|
var map = KeywordMap.init(allocator);
|
||||||
|
|
||||||
|
const keywords = [][]const u8{
|
||||||
|
"and"[0..],
|
||||||
|
"class"[0..],
|
||||||
|
"else"[0..],
|
||||||
|
"false"[0..],
|
||||||
|
"for"[0..],
|
||||||
|
"fun"[0..],
|
||||||
|
"if"[0..],
|
||||||
|
"nil"[0..],
|
||||||
|
"or"[0..],
|
||||||
|
"print"[0..],
|
||||||
|
"return"[0..],
|
||||||
|
"super"[0..],
|
||||||
|
"this"[0..],
|
||||||
|
"true"[0..],
|
||||||
|
"var"[0..],
|
||||||
|
"while"[0..],
|
||||||
|
};
|
||||||
|
|
||||||
|
const tags = []TokenType{
|
||||||
|
TokenType.AND,
|
||||||
|
TokenType.CLASS,
|
||||||
|
TokenType.ELSE,
|
||||||
|
TokenType.FALSE,
|
||||||
|
TokenType.FOR,
|
||||||
|
TokenType.FUN,
|
||||||
|
TokenType.IF,
|
||||||
|
TokenType.NIL,
|
||||||
|
TokenType.OR,
|
||||||
|
TokenType.PRINT,
|
||||||
|
TokenType.RETURN,
|
||||||
|
TokenType.SUPER,
|
||||||
|
TokenType.THIS,
|
||||||
|
TokenType.TRUE,
|
||||||
|
TokenType.VAR,
|
||||||
|
TokenType.WHILE,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (keywords) |keyword, idx| {
|
||||||
|
var tag = @enumToInt(tags[idx]);
|
||||||
|
_ = try map.put(keyword, tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
pub const Scanner = struct {
|
pub const Scanner = struct {
|
||||||
source: []u8,
|
source: []u8,
|
||||||
tokens: TokenList,
|
tokens: TokenList,
|
||||||
|
keywords: KeywordMap,
|
||||||
|
|
||||||
start: usize = 0,
|
start: usize = 0,
|
||||||
current: usize = 0,
|
current: usize = 0,
|
||||||
line: usize = 1,
|
line: usize = 1,
|
||||||
|
|
||||||
pub fn init(allocator: *std.mem.Allocator, data: []u8) Scanner {
|
pub fn init(allocator: *std.mem.Allocator, data: []u8) !Scanner {
|
||||||
return Scanner{
|
return Scanner{
|
||||||
.source = data,
|
.source = data,
|
||||||
.tokens = TokenList.init(allocator),
|
.tokens = TokenList.init(allocator),
|
||||||
|
.keywords = try initKeywordMap(allocator),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,6 +226,14 @@ pub const Scanner = struct {
|
||||||
try self.addNumberToken(.NUMBER, num);
|
try self.addNumberToken(.NUMBER, num);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn doIdentifier(self: *Scanner) !void {
|
||||||
|
while (isAlphaNumeric(self.peek())) {
|
||||||
|
_ = self.advance();
|
||||||
|
}
|
||||||
|
|
||||||
|
try self.addSimpleToken(.IDENTIFIER);
|
||||||
|
}
|
||||||
|
|
||||||
/// Scan through our tokens and add them to the Scanner's token list.
|
/// Scan through our tokens and add them to the Scanner's token list.
|
||||||
fn scanToken(self: *Scanner) !void {
|
fn scanToken(self: *Scanner) !void {
|
||||||
var c = self.advance();
|
var c = self.advance();
|
||||||
|
@ -203,6 +276,8 @@ pub const Scanner = struct {
|
||||||
else => {
|
else => {
|
||||||
if (isDigit(c)) {
|
if (isDigit(c)) {
|
||||||
try self.doNumber();
|
try self.doNumber();
|
||||||
|
} else if (isAlpha(c)) {
|
||||||
|
try self.doIdentifier();
|
||||||
} else {
|
} else {
|
||||||
try main.doError(self.line, "Unexpected character");
|
try main.doError(self.line, "Unexpected character");
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
pub const TokenType = enum {
|
pub const TokenType = enum(u6) {
|
||||||
// Single-character tokens.
|
// Single-character tokens.
|
||||||
LEFT_PAREN,
|
LEFT_PAREN,
|
||||||
RIGHT_PAREN,
|
RIGHT_PAREN,
|
||||||
|
|
Loading…
Reference in a new issue