Compare commits

..

3 commits

Author SHA1 Message Date
bba969922f add reading of keywords on doIdentifier 2019-05-31 22:39:53 -03:00
9d4c1249b4 add keyword map when scanner is initialized
- add basic identifier reading
2019-05-31 22:34:10 -03:00
69aa7b493d add number tokens 2019-05-31 22:08:41 -03:00
3 changed files with 148 additions and 4 deletions

View file

@ -9,7 +9,7 @@ fn run(allocator: *Allocator, data: []u8) !void {
var stdout_file = try std.io.getStdOut();
const stdout = &stdout_file.outStream().stream;
var scanner = Scanner.init(allocator, data);
var scanner = try Scanner.init(allocator, data);
var tokens = try scanner.scanTokens();
var it = tokens.iterator();
@ -21,6 +21,9 @@ fn run(allocator: *Allocator, data: []u8) !void {
.Slice => |value| {
try value.printToken(stdout);
},
.Number => |value| {
try value.printToken(stdout);
},
}
hadError = false;

View file

@ -4,19 +4,88 @@ const token = @import("token.zig");
const main = @import("main.zig");
const TokenList = std.ArrayList(token.Token);
const TokenType = token.TokenType;
fn isDigit(char: u8) bool {
return char >= '0' and char <= '9';
}
fn isAlpha(c: u8) bool {
return (c >= 'a' and c <= 'z') or
(c >= 'A' and c <= 'Z') or
c == '_';
}
fn isAlphaNumeric(char: u8) bool {
return isAlpha(char) or isDigit(char);
}
// hashmaps don't work on HashMaps for some reason. anyways.
pub const KeywordMap = std.AutoHashMap([]const u8, u6);
fn initKeywordMap(allocator: *std.mem.Allocator) !KeywordMap {
var map = KeywordMap.init(allocator);
const keywords = [][]const u8{
"and"[0..],
"class"[0..],
"else"[0..],
"false"[0..],
"for"[0..],
"fun"[0..],
"if"[0..],
"nil"[0..],
"or"[0..],
"print"[0..],
"return"[0..],
"super"[0..],
"this"[0..],
"true"[0..],
"var"[0..],
"while"[0..],
};
const tags = []TokenType{
TokenType.AND,
TokenType.CLASS,
TokenType.ELSE,
TokenType.FALSE,
TokenType.FOR,
TokenType.FUN,
TokenType.IF,
TokenType.NIL,
TokenType.OR,
TokenType.PRINT,
TokenType.RETURN,
TokenType.SUPER,
TokenType.THIS,
TokenType.TRUE,
TokenType.VAR,
TokenType.WHILE,
};
for (keywords) |keyword, idx| {
var tag = @enumToInt(tags[idx]);
_ = try map.put(keyword, tag);
}
return map;
}
pub const Scanner = struct {
source: []u8,
tokens: TokenList,
keywords: KeywordMap,
start: usize = 0,
current: usize = 0,
line: usize = 1,
pub fn init(allocator: *std.mem.Allocator, data: []u8) Scanner {
pub fn init(allocator: *std.mem.Allocator, data: []u8) !Scanner {
return Scanner{
.source = data,
.tokens = TokenList.init(allocator),
.keywords = try initKeywordMap(allocator),
};
}
@ -55,6 +124,18 @@ pub const Scanner = struct {
});
}
/// Keep in mind Lox only has a single number type and that is a float one.
fn addNumberToken(self: *Scanner, ttype: token.TokenType, num: f32) !void {
try self.addToken(token.Token{
.Number = token.NumberToken.init(
ttype,
self.currentLexeme(),
self.line,
num,
),
});
}
fn addToken(
self: *Scanner,
tok: token.Token,
@ -114,6 +195,58 @@ pub const Scanner = struct {
);
}
fn peekNext(self: *Scanner) u8 {
if (self.current + 1 >= self.source.len) return 0;
return self.source[self.current + 1];
}
/// Consume a number
fn doNumber(self: *Scanner) !void {
while (isDigit(self.peek())) {
_ = self.advance();
}
// check if its a number like 12.34, where the '.' character
// exists and the one next to it is a digit.
if (self.peek() == '.' and isDigit(self.peekNext())) {
_ = self.advance();
while (isDigit(self.peek())) {
_ = self.advance();
}
}
// after going through all of the number, we can just use fmt.parseFloat
var num = try std.fmt.parseFloat(
f32,
self.source[self.start..self.current],
);
try self.addNumberToken(.NUMBER, num);
}
fn doIdentifier(self: *Scanner) !void {
while (isAlphaNumeric(self.peek())) {
_ = self.advance();
}
// after reading the identifier, we check
// if it is any of our keywords, if it is, then we add
// the specificed keyword type. if not, just .IDENTIFIER
var text = self.source[self.start..self.current];
var type_opt = self.keywords.get(text);
var toktype: TokenType = undefined;
if (type_opt) |kv| {
toktype = @intToEnum(TokenType, kv.value);
} else {
toktype = TokenType.IDENTIFIER;
}
try self.addSimpleToken(toktype);
}
/// Scan through our tokens and add them to the Scanner's token list.
fn scanToken(self: *Scanner) !void {
var c = self.advance();
@ -154,7 +287,13 @@ pub const Scanner = struct {
'"' => try self.doString(),
else => {
if (isDigit(c)) {
try self.doNumber();
} else if (isAlpha(c)) {
try self.doIdentifier();
} else {
try main.doError(self.line, "Unexpected character");
}
},
}
}

View file

@ -1,6 +1,6 @@
const std = @import("std");
pub const TokenType = enum {
pub const TokenType = enum(u6) {
// Single-character tokens.
LEFT_PAREN,
RIGHT_PAREN,
@ -98,8 +98,10 @@ pub fn TokenFactory(
pub const SimpleToken = TokenFactory(void);
pub const SliceToken = TokenFactory([]u8);
pub const NumberToken = TokenFactory(f32);
pub const Token = union(enum) {
Simple: SimpleToken,
Slice: SliceToken,
Number: NumberToken,
};