add consumption of comments and strings

This commit is contained in:
Luna 2019-05-31 21:46:18 -03:00
parent 9a2c50a53e
commit d1db7a0bd9
3 changed files with 116 additions and 8 deletions

View file

@ -14,7 +14,15 @@ fn run(allocator: *Allocator, data: []u8) !void {
var it = tokens.iterator();
while (it.next()) |token| {
try token.Simple.printToken(stdout);
switch (token) {
.Simple => |value| {
try value.printToken(stdout);
},
.Slice => |value| {
try value.printToken(stdout);
},
}
hadError = false;
}
}

View file

@ -44,6 +44,17 @@ pub const Scanner = struct {
});
}
fn addSliceToken(self: *Scanner, ttype: token.TokenType, slice: []u8) !void {
try self.addToken(token.Token{
.Slice = token.SliceToken.init(
ttype,
self.currentLexeme(),
self.line,
slice,
),
});
}
fn addToken(
self: *Scanner,
tok: token.Token,
@ -51,6 +62,59 @@ pub const Scanner = struct {
try self.tokens.append(tok);
}
/// Check if the next character matches what is expected.
fn match(self: *Scanner, expected: u8) bool {
if (self.isAtEnd()) return false;
if (self.source[self.current] != expected) return false;
self.current += 1;
return true;
}
/// Add a SimpleToken of type_match if the next character is
/// `expected`. Adds a SimpleToken of type_nomatch when it is not.
fn addMatchToken(
self: *Scanner,
expected: u8,
type_match: token.TokenType,
type_nomatch: token.TokenType,
) !void {
if (self.match(expected)) {
try self.addSimpleToken(type_match);
} else {
try self.addSimpleToken(type_nomatch);
}
}
fn peek(self: *Scanner) u8 {
if (self.isAtEnd()) return 0;
return self.source[self.current];
}
fn doString(self: *Scanner) !void {
// consume entire string
while (self.peek() != '"' and !self.isAtEnd()) {
if (self.peek() == '\n') self.line += 1;
_ = self.advance();
}
// unterminated string.
if (self.isAtEnd()) {
try main.doError(self.line, "Unterminated string.");
return;
}
// the closing ".
_ = self.advance();
// trim the surrounding quotes.
try self.addSliceToken(
.STRING,
self.source[self.start + 1 .. self.current - 1],
);
}
/// Scan through our tokens and add them to the Scanner's token list.
fn scanToken(self: *Scanner) !void {
var c = self.advance();
@ -65,6 +129,30 @@ pub const Scanner = struct {
'+' => try self.addSimpleToken(.PLUS),
';' => try self.addSimpleToken(.SEMICOLON),
'*' => try self.addSimpleToken(.STAR),
'!' => try self.addMatchToken('=', .BANG_EQUAL, .BANG),
'=' => try self.addMatchToken('=', .EQUAL_EQUAL, .EQUAL),
'<' => try self.addMatchToken('=', .LESS_EQUAL, .LESS),
'>' => try self.addMatchToken('=', .GREATER_EQUAL, .GREATER),
'/' => blk: {
// consume comments
if (self.match('/')) {
while (self.peek() != '\n' and !self.isAtEnd()) {
_ = self.advance();
}
} else {
try self.addSimpleToken(.SLASH);
}
},
' ', '\r', '\t' => blk: {},
'\n' => blk: {
self.line += 1;
},
'"' => try self.doString(),
else => {
try main.doError(self.line, "Unexpected character");
},

View file

@ -76,18 +76,30 @@ pub fn TokenFactory(
}
pub fn printToken(self: Self, stdout: var) !void {
if (T == void) {
try stdout.print(
"Token(type={x}, lexeme='{}', line={}\n",
"Token(type={x}, lexeme='{}', line={})\n",
self.ttype,
self.lexeme,
self.line,
);
} else {
try stdout.print(
"Token(type={x}, lexeme='{}', line={} literal='{}')\n",
self.ttype,
self.lexeme,
self.line,
self.literal,
);
}
}
};
}
pub const SimpleToken = TokenFactory(void);
pub const SliceToken = TokenFactory([]u8);
pub const Token = union {
pub const Token = union(enum) {
Simple: SimpleToken,
Slice: SliceToken,
};