Compare commits
No commits in common. "a6b928b42b3b55a7b5f36a1d2f892e86c2776968" and "1269aeeac19daa7a5fe9cec627109a8a98b85982" have entirely different histories.
a6b928b42b
...
1269aeeac1
6 changed files with 171 additions and 264 deletions
|
@ -90,8 +90,6 @@ pub fn EndpointRequest(comptime Endpoint: type) type {
|
||||||
body: Body,
|
body: Body,
|
||||||
query: Query,
|
query: Query,
|
||||||
|
|
||||||
mount_path: []const u8,
|
|
||||||
|
|
||||||
const args_middleware = //if (Args == void)
|
const args_middleware = //if (Args == void)
|
||||||
//mdw.injectContext(.{ .args = {} })
|
//mdw.injectContext(.{ .args = {} })
|
||||||
//else
|
//else
|
||||||
|
@ -122,8 +120,6 @@ fn CallApiEndpoint(comptime Endpoint: type) type {
|
||||||
.args = ctx.args,
|
.args = ctx.args,
|
||||||
.body = ctx.body,
|
.body = ctx.body,
|
||||||
.query = ctx.query_params,
|
.query = ctx.query_params,
|
||||||
|
|
||||||
.mount_path = if (@hasField(@TypeOf(ctx), "mounted_at")) ctx.mounted_at else "",
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var response = Response{ .headers = http.Fields.init(ctx.allocator), .res = res };
|
var response = Response{ .headers = http.Fields.init(ctx.allocator), .res = res };
|
||||||
|
|
|
@ -245,22 +245,8 @@ const drive = struct {
|
||||||
const info = try srv.driveGet(req.args.path);
|
const info = try srv.driveGet(req.args.path);
|
||||||
defer util.deepFree(srv.allocator, info);
|
defer util.deepFree(srv.allocator, info);
|
||||||
|
|
||||||
var breadcrumbs = std.ArrayList([]const u8).init(srv.allocator);
|
|
||||||
defer breadcrumbs.deinit();
|
|
||||||
|
|
||||||
var iter = util.PathIter.from(req.args.path);
|
|
||||||
while (iter.next()) |p| {
|
|
||||||
std.log.debug("breadcrumb: {s}", .{p});
|
|
||||||
try breadcrumbs.append(if (p.len != 0) p else continue);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (info) {
|
switch (info) {
|
||||||
.dir => |dir| try res.template(.ok, srv, dir_tmpl, .{
|
.dir => |dir| try res.template(.ok, srv, dir_tmpl, .{ .dir = dir }),
|
||||||
.dir = dir,
|
|
||||||
.breadcrumbs = breadcrumbs.items,
|
|
||||||
.mount_path = req.mount_path,
|
|
||||||
.base_drive_path = "drive",
|
|
||||||
}),
|
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,52 +1,29 @@
|
||||||
<div class="drive">
|
<div class="drive">
|
||||||
<ol class="breadcrumbs">
|
|
||||||
<li>
|
|
||||||
<a href="{.mount_path}/{.base_drive_path}/">
|
|
||||||
<i class="fa-solid fa-cloud"></i>
|
|
||||||
<span class="directory">/</span>
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{#for .breadcrumbs |$crumb, $i| =}
|
|
||||||
<i class="fa-solid fa-chevron-right"></i>
|
|
||||||
<li>
|
|
||||||
<a href="{.mount_path}/{.base_drive_path}
|
|
||||||
{= #for @slice(.breadcrumbs, 0, $i) |$c|}/{$c}{/for =}
|
|
||||||
/{$crumb}">
|
|
||||||
{$crumb}
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{/for =}
|
|
||||||
</ol>
|
|
||||||
<table class="directory-listing">
|
<table class="directory-listing">
|
||||||
{#for .dir.children.? |$child| =}
|
{#for .dir.children.? |$child| =}
|
||||||
<tr>
|
<tr>
|
||||||
{#switch $child case dir |$dir| =}
|
{#switch $child case dir |$dir| =}
|
||||||
<td class="icons"><i class="fa-solid fa-folder-closed fa-fw"></i></td>
|
<td class="icon"></td>
|
||||||
<td class="name">
|
<td class="icon"><i class="fa-solid fa-folder-closed"></i></td>
|
||||||
<a href="{.mount_path}/{.base_drive_path}{= #for @slice(.breadcrumbs, 0, .breadcrumbs.len) |$c|}/{$c}{/for =}/{$dir.name.?}">
|
<td class="name"><a href="./{$dir.name.?}">{$dir.name.?}</a></td>
|
||||||
{$dir.name.?}
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
{#case file |$file|}
|
{#case file |$file|}
|
||||||
<td class="icons">
|
<td class="icon">
|
||||||
{#if %user |$u|}
|
{= #if %user |$u|}
|
||||||
{#if $u.avatar_file_id == $file.meta.id =}
|
{#if $u.avatar_file_id == $file.meta.id =}
|
||||||
<i class="fa-solid fa-user fa-fw"></i>
|
<i class="fa-solid fa-user"></i>
|
||||||
{= #elif $u.header_file_id == $file.meta.id =}
|
{= #elif $u.header_file_id == $file.meta.id =}
|
||||||
<i class="fa-solid fa-heading fa-fw"></i>
|
<i class="fa-solid fa-heading"></i>
|
||||||
{= /if =}
|
{= /if =}
|
||||||
{= /if}
|
{= /if =}
|
||||||
{#if $file.meta.sensitive =}
|
</td>
|
||||||
<i class="fa-solid fa-eye-slash fa-fw"></i>
|
<td class="icon">
|
||||||
|
{= #if $file.meta.sensitive =}
|
||||||
|
<i class="fa-solid fa-eye-slash"></i>
|
||||||
{= #else =}
|
{= #else =}
|
||||||
<i class="fa-solid fa-file fa-fw"></i>
|
<i class="fa-solid fa-file"></i>
|
||||||
{= /if}
|
{= /if =}
|
||||||
</td>
|
|
||||||
<td class="name">
|
|
||||||
<a href="{.mount_path}/{.base_drive_path}{= #for @slice(.breadcrumbs, 0, .breadcrumbs.len) |$c|}/{$c}{/for =}/{$file.name.?}">
|
|
||||||
{$file.name.?}
|
|
||||||
</a>
|
|
||||||
</td>
|
</td>
|
||||||
|
<td class="name"><a href="./{$file.name.?}">{$file.name.?}</a></td>
|
||||||
<td class="content-type">{#if $file.meta.content_type |$t|}{$t}{/if}</td>
|
<td class="content-type">{#if $file.meta.content_type |$t|}{$t}{/if}</td>
|
||||||
<td class="size">{$file.meta.size}</td>
|
<td class="size">{$file.meta.size}</td>
|
||||||
<td class="created-at">{$file.meta.created_at}</td>
|
<td class="created-at">{$file.meta.created_at}</td>
|
||||||
|
|
|
@ -82,15 +82,13 @@ fn executeStatement(
|
||||||
const iterable = try evaluateExpression(loop.header.iterable, args, captures, context);
|
const iterable = try evaluateExpression(loop.header.iterable, args, captures, context);
|
||||||
const subtemplate = loop.subtemplate;
|
const subtemplate = loop.subtemplate;
|
||||||
//std.log.debug("{any}", .{subtemplate});
|
//std.log.debug("{any}", .{subtemplate});
|
||||||
for (iterable) |v, i| {
|
for (iterable) |v| {
|
||||||
const with_item_capture = addCapture(captures, loop.header.item_capture, v);
|
|
||||||
const with_idx_capture = if (comptime loop.header.idx_capture) |name| addCapture(with_item_capture, name, i) else with_item_capture;
|
|
||||||
try executeTemplate(
|
try executeTemplate(
|
||||||
writer,
|
writer,
|
||||||
templates,
|
templates,
|
||||||
subtemplate,
|
subtemplate,
|
||||||
args,
|
args,
|
||||||
with_idx_capture,
|
addCapture(captures, loop.header.capture, v),
|
||||||
context,
|
context,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -213,19 +211,35 @@ fn print(writer: anytype, arg: anytype) !void {
|
||||||
try std.fmt.format(writer, "{}", .{arg});
|
try std.fmt.format(writer, "{}", .{arg});
|
||||||
}
|
}
|
||||||
|
|
||||||
const ExpressionError = error{ IndexOutOfBounds, NullOptional };
|
const DerefError = error{NullOptional};
|
||||||
|
fn Deref(comptime T: type, comptime names: []const DerefDecl) type {
|
||||||
|
if (names.len == 0) return T;
|
||||||
|
|
||||||
fn Deref(comptime T: type, comptime field: []const u8) type {
|
// Compiler segfaults when I use std.meta to get this info so we search it manually
|
||||||
if (std.meta.trait.isIndexable(T) and std.mem.eql(u8, field, "len")) return usize;
|
const F = switch (names[0]) {
|
||||||
switch (@typeInfo(T)) {
|
.field => |name| blk: {
|
||||||
.Pointer => return Deref(std.meta.Child(T), field),
|
const field = for (@typeInfo(T).Struct.fields) |f| {
|
||||||
.Struct => |info| for (info.fields) |f| {
|
if (std.mem.eql(u8, f.name, name)) break f;
|
||||||
if (std.mem.eql(u8, field, f.name)) return f.field_type;
|
} else @compileError("Unknown field " ++ name ++ " in type " ++ @typeName(T));
|
||||||
} else @compileError("Field " ++ field ++ " does not exist on type " ++ @typeName(T)),
|
break :blk field.field_type;
|
||||||
else => @compileError("Cannot retrieve field " ++ field ++ " from type " ++ @typeName(T)),
|
},
|
||||||
|
.optional_unwrap => std.meta.Child(T),
|
||||||
|
};
|
||||||
|
|
||||||
|
return Deref(F, names[1..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deref(arg: anytype, comptime names: []const DerefDecl) DerefError!Deref(@TypeOf(arg), names) {
|
||||||
|
if (names.len == 0) return arg;
|
||||||
|
|
||||||
|
switch (names[0]) {
|
||||||
|
.field => |name| return deref(@field(arg, name), names[1..]),
|
||||||
|
.optional_unwrap => return arg orelse error.NullOptional,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const ExpressionError = error{IndexOutOfBounds} || DerefError;
|
||||||
|
|
||||||
fn EvaluateExpression(
|
fn EvaluateExpression(
|
||||||
comptime expression: Expression,
|
comptime expression: Expression,
|
||||||
comptime Args: type,
|
comptime Args: type,
|
||||||
|
@ -233,20 +247,14 @@ fn EvaluateExpression(
|
||||||
comptime Context: type,
|
comptime Context: type,
|
||||||
) type {
|
) type {
|
||||||
return switch (expression) {
|
return switch (expression) {
|
||||||
.args => Args,
|
.arg_deref => |names| Deref(Args, names),
|
||||||
.captures => Captures,
|
.capture_deref => |names| Deref(Captures, names),
|
||||||
.context => Context,
|
.context_deref => |names| Deref(Context, names),
|
||||||
.deref => |expr| {
|
|
||||||
const T = EvaluateExpression(expr.container, Args, Captures, Context);
|
|
||||||
return Deref(T, expr.field);
|
|
||||||
},
|
|
||||||
.equals => bool,
|
.equals => bool,
|
||||||
.builtin => |call| switch (call.*) {
|
.builtin => |call| switch (call.*) {
|
||||||
.isTag => bool,
|
.isTag => bool,
|
||||||
.slice => |sl| []const std.meta.Elem(EvaluateExpression(sl.iterable, Args, Captures, Context)),
|
.slice => |sl| []const std.meta.Elem(EvaluateExpression(sl.iterable, Args, Captures, Context)),
|
||||||
},
|
},
|
||||||
.optional_unwrap => |expr| std.meta.Child(EvaluateExpression(expr.*, Args, Captures, Context)),
|
|
||||||
.int => isize,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,15 +265,9 @@ fn evaluateExpression(
|
||||||
context: anytype,
|
context: anytype,
|
||||||
) ExpressionError!EvaluateExpression(expression, @TypeOf(args), @TypeOf(captures), @TypeOf(context)) {
|
) ExpressionError!EvaluateExpression(expression, @TypeOf(args), @TypeOf(captures), @TypeOf(context)) {
|
||||||
return switch (expression) {
|
return switch (expression) {
|
||||||
.args => args,
|
.arg_deref => |names| try deref(args, names),
|
||||||
.captures => captures,
|
.capture_deref => |names| try deref(captures, names),
|
||||||
.context => context,
|
.context_deref => |names| try deref(context, names),
|
||||||
.deref => |expr| {
|
|
||||||
return @field(
|
|
||||||
try evaluateExpression(expr.container, args, captures, context),
|
|
||||||
expr.field,
|
|
||||||
);
|
|
||||||
},
|
|
||||||
.equals => |eql| {
|
.equals => |eql| {
|
||||||
const lhs = try evaluateExpression(eql.lhs, args, captures, context);
|
const lhs = try evaluateExpression(eql.lhs, args, captures, context);
|
||||||
const rhs = try evaluateExpression(eql.rhs, args, captures, context);
|
const rhs = try evaluateExpression(eql.rhs, args, captures, context);
|
||||||
|
@ -283,19 +285,14 @@ fn evaluateExpression(
|
||||||
},
|
},
|
||||||
.slice => |sl| {
|
.slice => |sl| {
|
||||||
const iterable = try evaluateExpression(sl.iterable, args, captures, context);
|
const iterable = try evaluateExpression(sl.iterable, args, captures, context);
|
||||||
const start = std.math.cast(usize, try evaluateExpression(sl.start, args, captures, context)) orelse return error.IndexOutOfBounds;
|
const start = try evaluateExpression(sl.start, args, captures, context);
|
||||||
const end = std.math.cast(usize, try evaluateExpression(sl.end, args, captures, context)) orelse return error.IndexOutOfBounds;
|
const end = try evaluateExpression(sl.end, args, captures, context);
|
||||||
|
|
||||||
if (comptime std.meta.trait.is(.Array)(@TypeOf(iterable))) @compileError("Cannot slice an array, pass a slice or pointer to array instead");
|
if (comptime std.meta.trait.is(.Array)(@TypeOf(iterable))) @compileError("Cannot slice an array, pass a slice or pointer to array instead");
|
||||||
if (start > iterable.len or end > iterable.len) return error.IndexOutOfBounds;
|
if (start > iterable.len or end > iterable.len) return error.IndexOutOfBounds;
|
||||||
return iterable[start..end];
|
return iterable[start..end];
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
.optional_unwrap => |expr| {
|
|
||||||
const val = try evaluateExpression(expr.*, args, captures, context);
|
|
||||||
return val orelse error.NullOptional;
|
|
||||||
},
|
|
||||||
.int => |i| return i,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -309,14 +306,12 @@ fn AddCapture(comptime Root: type, comptime name: []const u8, comptime Val: type
|
||||||
.alignment = @alignOf(Val),
|
.alignment = @alignOf(Val),
|
||||||
}};
|
}};
|
||||||
|
|
||||||
const Result = @Type(.{ .Struct = .{
|
return @Type(.{ .Struct = .{
|
||||||
.layout = .Auto,
|
.layout = .Auto,
|
||||||
.fields = fields,
|
.fields = fields,
|
||||||
.decls = &.{},
|
.decls = &.{},
|
||||||
.is_tuple = false,
|
.is_tuple = false,
|
||||||
} });
|
} });
|
||||||
|
|
||||||
return Result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addCapture(root: anytype, comptime name: []const u8, val: anytype) AddCapture(@TypeOf(root), name, @TypeOf(val)) {
|
fn addCapture(root: anytype, comptime name: []const u8, val: anytype) AddCapture(@TypeOf(root), name, @TypeOf(val)) {
|
||||||
|
@ -527,7 +522,7 @@ fn parseTemplateTokens(comptime tokens: ControlTokenIter) []const TemplateToken
|
||||||
|
|
||||||
while (iter.next()) |token| switch (token) {
|
while (iter.next()) |token| switch (token) {
|
||||||
.whitespace => |wsp| items = items ++ [_]TemplateToken{.{ .whitespace = wsp }},
|
.whitespace => |wsp| items = items ++ [_]TemplateToken{.{ .whitespace = wsp }},
|
||||||
.number, .text => |text| items = items ++ [_]TemplateToken{.{ .text = text }},
|
.text => |text| items = items ++ [_]TemplateToken{.{ .text = text }},
|
||||||
.open_bracket => {
|
.open_bracket => {
|
||||||
const next = iter.next() orelse @compileError("Unexpected end of template");
|
const next = iter.next() orelse @compileError("Unexpected end of template");
|
||||||
if (next == .open_bracket) {
|
if (next == .open_bracket) {
|
||||||
|
@ -565,79 +560,33 @@ fn parseTemplateTokens(comptime tokens: ControlTokenIter) []const TemplateToken
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tryParseIdentifier(comptime tokens: ControlTokenIter) ?ParseResult(ControlTokenIter, []const u8) {
|
|
||||||
comptime {
|
|
||||||
var iter = skipWhitespace(tokens);
|
|
||||||
|
|
||||||
var ident: []const u8 = "";
|
|
||||||
var first: bool = true;
|
|
||||||
while (iter.next()) |token| switch (token) {
|
|
||||||
.number, .text => |text| {
|
|
||||||
if (first and token == .number) return null;
|
|
||||||
ident = ident ++ text;
|
|
||||||
first = false;
|
|
||||||
},
|
|
||||||
else => {
|
|
||||||
iter.putBack(token);
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if (first) return null;
|
|
||||||
|
|
||||||
return ParseResult(ControlTokenIter, []const u8){
|
|
||||||
.new_iter = iter,
|
|
||||||
.item = ident,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parseExpression(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, Expression) {
|
fn parseExpression(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, Expression) {
|
||||||
comptime {
|
comptime {
|
||||||
var iter = tokens;
|
var iter = tokens;
|
||||||
|
|
||||||
var last_valid_iter: ?ControlTokenIter = null;
|
var last_valid_iter: ?ControlTokenIter = null;
|
||||||
var expr: ?Expression = null;
|
var expr: ?Expression = null;
|
||||||
while (iter.next()) |token| {
|
while (iter.next()) |token| switch (token) {
|
||||||
switch (token) {
|
|
||||||
.whitespace => {},
|
.whitespace => {},
|
||||||
.period => {
|
.period => {
|
||||||
iter = skipWhitespace(iter);
|
const names = parseDeref(iter);
|
||||||
if (expr == null) {
|
iter = names.new_iter;
|
||||||
expr = .{ .args = {} };
|
if (expr != null) break;
|
||||||
if (iter.peek()) |n| if (n == .text) iter.putBack(.{ .period = {} });
|
expr = .{ .arg_deref = names.item };
|
||||||
} else if (tryParseIdentifier(iter)) |ident| {
|
|
||||||
iter = ident.new_iter;
|
|
||||||
|
|
||||||
expr = .{
|
|
||||||
.deref = &.{
|
|
||||||
.container = expr.?,
|
|
||||||
.field = ident.item,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
} else if (iter.peek()) |next| if (next == .question_mark) {
|
|
||||||
_ = iter.next();
|
|
||||||
expr = .{
|
|
||||||
.optional_unwrap = blk: {
|
|
||||||
const e = expr.?;
|
|
||||||
break :blk &e;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
last_valid_iter = iter;
|
last_valid_iter = iter;
|
||||||
},
|
},
|
||||||
.dollar => {
|
.dollar => {
|
||||||
|
const names = parseDeref(iter);
|
||||||
|
iter = names.new_iter;
|
||||||
if (expr != null) break;
|
if (expr != null) break;
|
||||||
iter = skipWhitespace(iter);
|
expr = .{ .capture_deref = names.item };
|
||||||
expr = .{ .captures = {} };
|
|
||||||
if (iter.peek()) |n| if (n == .text) iter.putBack(.{ .period = {} });
|
|
||||||
last_valid_iter = iter;
|
last_valid_iter = iter;
|
||||||
},
|
},
|
||||||
.percent => {
|
.percent => {
|
||||||
|
const names = parseDeref(iter);
|
||||||
|
iter = names.new_iter;
|
||||||
if (expr != null) break;
|
if (expr != null) break;
|
||||||
iter = skipWhitespace(iter);
|
expr = .{ .context_deref = names.item };
|
||||||
expr = .{ .context = {} };
|
|
||||||
if (iter.peek()) |n| if (n == .text) iter.putBack(.{ .period = {} });
|
|
||||||
last_valid_iter = iter;
|
last_valid_iter = iter;
|
||||||
},
|
},
|
||||||
.equals => {
|
.equals => {
|
||||||
|
@ -663,15 +612,8 @@ fn parseExpression(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIt
|
||||||
expr = .{ .builtin = &builtin.item };
|
expr = .{ .builtin = &builtin.item };
|
||||||
last_valid_iter = iter;
|
last_valid_iter = iter;
|
||||||
},
|
},
|
||||||
.number => |n| {
|
|
||||||
if (expr != null) break;
|
|
||||||
const num = std.fmt.parseInt(isize, n, 10) catch @compileError("Error parsing integer");
|
|
||||||
expr = .{ .int = num };
|
|
||||||
last_valid_iter = iter;
|
|
||||||
},
|
|
||||||
else => break,
|
else => break,
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.new_iter = last_valid_iter orelse @compileError("Invalid Expression"),
|
.new_iter = last_valid_iter orelse @compileError("Invalid Expression"),
|
||||||
|
@ -855,7 +797,7 @@ fn parseControlBlock(comptime tokens: ControlTokenIter) ParseResult(ControlToken
|
||||||
},
|
},
|
||||||
else => {
|
else => {
|
||||||
@compileLog(iter.row);
|
@compileLog(iter.row);
|
||||||
@compileError("TODO " ++ @tagName(token));
|
@compileError("TODO " ++ @tagName(token) ++ " " ++ token.text);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -888,21 +830,32 @@ fn endControlBlock(comptime tokens: ControlTokenIter) ControlTokenIter {
|
||||||
fn parseForHeader(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, ForHeader) {
|
fn parseForHeader(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, ForHeader) {
|
||||||
comptime {
|
comptime {
|
||||||
const iterable = parseExpression(tokens);
|
const iterable = parseExpression(tokens);
|
||||||
var iter = skipWhitespace(iterable.new_iter);
|
var iter = iterable.new_iter;
|
||||||
|
|
||||||
const captures = tryParseCapture(iter) orelse {
|
iter = skipWhitespace(iter);
|
||||||
@compileLog(iter.row);
|
{
|
||||||
@compileError("Expected capture");
|
const token = iter.next() orelse @compileError("Unexpected end of template");
|
||||||
|
if (token != .pipe) @compileError("Unexpected token");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
const token = iter.next() orelse @compileError("Unexpected end of template");
|
||||||
|
if (token != .dollar) @compileError("Unexpected token");
|
||||||
|
}
|
||||||
|
const capture = blk: {
|
||||||
|
const token = iter.next() orelse @compileError("Unexpected end of template");
|
||||||
|
if (token != .text) @compileError("Unexpected token");
|
||||||
|
break :blk token.text;
|
||||||
};
|
};
|
||||||
|
{
|
||||||
if (captures.item.len == 0 or captures.item.len > 2) @compileError("Expected 1 or 2 captures");
|
const token = iter.next() orelse @compileError("Unexpected end of template");
|
||||||
|
if (token != .pipe) @compileError("Unexpected token");
|
||||||
|
}
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.new_iter = captures.new_iter,
|
.new_iter = iter,
|
||||||
.item = .{
|
.item = .{
|
||||||
.iterable = iterable.item,
|
.iterable = iterable.item,
|
||||||
.item_capture = captures.item[0],
|
.capture = capture,
|
||||||
.idx_capture = if (captures.item.len == 2) captures.item[1] else null,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -910,8 +863,9 @@ fn parseForHeader(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIte
|
||||||
|
|
||||||
fn tryParseCapture(comptime tokens: ControlTokenIter) ?ParseResult(ControlTokenIter, []const []const u8) {
|
fn tryParseCapture(comptime tokens: ControlTokenIter) ?ParseResult(ControlTokenIter, []const []const u8) {
|
||||||
comptime {
|
comptime {
|
||||||
var iter = skipWhitespace(tokens);
|
var iter = tokens;
|
||||||
|
|
||||||
|
iter = skipWhitespace(iter);
|
||||||
if ((iter.next() orelse return null) != .pipe) return null;
|
if ((iter.next() orelse return null) != .pipe) return null;
|
||||||
var captures: []const []const u8 = &.{};
|
var captures: []const []const u8 = &.{};
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -1018,6 +972,40 @@ fn parseSwitchHeader(comptime tokens: ControlTokenIter) ParseResult(ControlToken
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parseDeref(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, []const DerefDecl) {
|
||||||
|
comptime {
|
||||||
|
var iter = tokens;
|
||||||
|
var fields: []const DerefDecl = &.{};
|
||||||
|
var wants = .text;
|
||||||
|
while (iter.peek()) |token| {
|
||||||
|
switch (token) {
|
||||||
|
.whitespace => {},
|
||||||
|
.text => |text| {
|
||||||
|
if (wants == .period) break;
|
||||||
|
fields = fields ++ [1]DerefDecl{.{ .field = text }};
|
||||||
|
wants = .period;
|
||||||
|
},
|
||||||
|
.period => {
|
||||||
|
if (wants != .period) @compileError("Unexpected token \".\"");
|
||||||
|
wants = .text;
|
||||||
|
},
|
||||||
|
.question_mark => {
|
||||||
|
if (wants == .period) break;
|
||||||
|
fields = fields ++ [1]DerefDecl{.{ .optional_unwrap = {} }};
|
||||||
|
wants = .period;
|
||||||
|
},
|
||||||
|
else => if (wants == .period or fields.len == 0) break else @compileError("Unexpected token"),
|
||||||
|
}
|
||||||
|
_ = iter.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.new_iter = iter,
|
||||||
|
.item = fields,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn parseCallTemplate(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, CallTemplate) {
|
fn parseCallTemplate(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter, CallTemplate) {
|
||||||
comptime {
|
comptime {
|
||||||
var iter = tokens;
|
var iter = tokens;
|
||||||
|
@ -1046,7 +1034,7 @@ fn parseFormat(comptime tokens: ControlTokenIter) ParseResult(ControlTokenIter,
|
||||||
expectToken(iter.next(), .double_quote);
|
expectToken(iter.next(), .double_quote);
|
||||||
var fmt_str: []const u8 = "";
|
var fmt_str: []const u8 = "";
|
||||||
while (true) switch (iter.next() orelse @compileError("Unexpected end of template")) {
|
while (true) switch (iter.next() orelse @compileError("Unexpected end of template")) {
|
||||||
.text, .number, .whitespace => |t| fmt_str = fmt_str ++ t,
|
.text, .whitespace => |t| fmt_str = fmt_str ++ t,
|
||||||
.open_bracket => fmt_str = fmt_str ++ "{",
|
.open_bracket => fmt_str = fmt_str ++ "{",
|
||||||
.close_bracket => fmt_str = fmt_str ++ "}",
|
.close_bracket => fmt_str = fmt_str ++ "}",
|
||||||
.period => fmt_str = fmt_str ++ ".",
|
.period => fmt_str = fmt_str ++ ".",
|
||||||
|
@ -1088,9 +1076,9 @@ const TemplateItem = union(enum) {
|
||||||
statement: Statement,
|
statement: Statement,
|
||||||
};
|
};
|
||||||
|
|
||||||
const DerefExpr = struct {
|
const DerefDecl = union(enum) {
|
||||||
container: Expression,
|
|
||||||
field: []const u8,
|
field: []const u8,
|
||||||
|
optional_unwrap: void,
|
||||||
};
|
};
|
||||||
|
|
||||||
const EqualsExpr = struct {
|
const EqualsExpr = struct {
|
||||||
|
@ -1099,14 +1087,11 @@ const EqualsExpr = struct {
|
||||||
};
|
};
|
||||||
|
|
||||||
const Expression = union(enum) {
|
const Expression = union(enum) {
|
||||||
args: void,
|
arg_deref: []const DerefDecl,
|
||||||
captures: void,
|
capture_deref: []const DerefDecl,
|
||||||
context: void,
|
context_deref: []const DerefDecl,
|
||||||
deref: *const DerefExpr,
|
|
||||||
equals: *const EqualsExpr,
|
equals: *const EqualsExpr,
|
||||||
builtin: *const BuiltinCall,
|
builtin: *const BuiltinCall,
|
||||||
optional_unwrap: *const Expression,
|
|
||||||
int: isize,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const For = struct {
|
const For = struct {
|
||||||
|
@ -1116,8 +1101,7 @@ const For = struct {
|
||||||
|
|
||||||
const ForHeader = struct {
|
const ForHeader = struct {
|
||||||
iterable: Expression,
|
iterable: Expression,
|
||||||
item_capture: []const u8,
|
capture: []const u8,
|
||||||
idx_capture: ?[]const u8,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const If = struct {
|
const If = struct {
|
||||||
|
@ -1230,7 +1214,6 @@ const BuiltinCall = union(Builtin) {
|
||||||
|
|
||||||
const ControlToken = union(enum) {
|
const ControlToken = union(enum) {
|
||||||
text: []const u8,
|
text: []const u8,
|
||||||
number: []const u8,
|
|
||||||
open_bracket: void,
|
open_bracket: void,
|
||||||
close_bracket: void,
|
close_bracket: void,
|
||||||
period: void,
|
period: void,
|
||||||
|
@ -1252,29 +1235,14 @@ const ControlToken = union(enum) {
|
||||||
const ControlTokenIter = struct {
|
const ControlTokenIter = struct {
|
||||||
start: usize = 0,
|
start: usize = 0,
|
||||||
text: []const u8,
|
text: []const u8,
|
||||||
peeked_tokens: [2]?ControlToken = [2]?ControlToken{ null, null },
|
peeked_token: ?ControlToken = null,
|
||||||
peeked_token_count: usize = 0,
|
|
||||||
|
|
||||||
row: usize = 0,
|
row: usize = 0,
|
||||||
|
|
||||||
fn isControlChar(ch: u8) bool {
|
|
||||||
return switch (ch) {
|
|
||||||
'{', '}', '.', '#', '|', '$', '/', '=', '@', ',', '%', '(', ')', '"', '?' => true,
|
|
||||||
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isTextChar(ch: u8) bool {
|
|
||||||
return !std.ascii.isWhitespace(ch) and !std.ascii.isDigit(ch) and !isControlChar(ch);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next(self: *ControlTokenIter) ?ControlToken {
|
fn next(self: *ControlTokenIter) ?ControlToken {
|
||||||
if (self.peeked_token_count != 0) {
|
if (self.peeked_token) |token| {
|
||||||
const t = self.peeked_tokens[self.peeked_token_count - 1].?;
|
self.peeked_token = null;
|
||||||
self.peeked_tokens[self.peeked_token_count - 1] = null;
|
return token;
|
||||||
self.peeked_token_count -= 1;
|
|
||||||
return t;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const remaining = self.text[self.start..];
|
const remaining = self.text[self.start..];
|
||||||
|
@ -1307,16 +1275,9 @@ const ControlTokenIter = struct {
|
||||||
self.start += idx - 1;
|
self.start += idx - 1;
|
||||||
return .{ .whitespace = remaining[0..idx] };
|
return .{ .whitespace = remaining[0..idx] };
|
||||||
},
|
},
|
||||||
'0'...'9' => {
|
|
||||||
var idx: usize = 0;
|
|
||||||
while (idx < remaining.len and std.ascii.isDigit(remaining[idx])) : (idx += 1) {}
|
|
||||||
|
|
||||||
self.start += idx - 1;
|
|
||||||
return .{ .number = remaining[0..idx] };
|
|
||||||
},
|
|
||||||
else => {
|
else => {
|
||||||
var idx: usize = 0;
|
var idx: usize = 0;
|
||||||
while (idx < remaining.len and isTextChar(remaining[idx])) : (idx += 1) {}
|
while (idx < remaining.len and std.mem.indexOfScalar(u8, "{}.#|$/=@,%()\"? \t\n\r", remaining[idx]) == null) : (idx += 1) {}
|
||||||
|
|
||||||
self.start += idx - 1;
|
self.start += idx - 1;
|
||||||
return .{ .text = remaining[0..idx] };
|
return .{ .text = remaining[0..idx] };
|
||||||
|
@ -1326,14 +1287,13 @@ const ControlTokenIter = struct {
|
||||||
|
|
||||||
fn peek(self: *ControlTokenIter) ?ControlToken {
|
fn peek(self: *ControlTokenIter) ?ControlToken {
|
||||||
const token = self.next();
|
const token = self.next();
|
||||||
if (token) |t| self.putBack(t);
|
self.peeked_token = token;
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn putBack(self: *ControlTokenIter, token: ControlToken) void {
|
fn putBack(self: *ControlTokenIter, token: ControlToken) void {
|
||||||
std.debug.assert(self.peeked_token_count < self.peeked_tokens.len);
|
std.debug.assert(self.peeked_token == null);
|
||||||
self.peeked_tokens[self.peeked_token_count] = token;
|
self.peeked_token = token;
|
||||||
self.peeked_token_count += 1;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1350,8 +1310,8 @@ test "template" {
|
||||||
try testCase("abcd", .{}, "abcd");
|
try testCase("abcd", .{}, "abcd");
|
||||||
try testCase("{.val}", .{ .val = 3 }, "3");
|
try testCase("{.val}", .{ .val = 3 }, "3");
|
||||||
try testCase("{#if .val}1{/if}", .{ .val = true }, "1");
|
try testCase("{#if .val}1{/if}", .{ .val = true }, "1");
|
||||||
|
try testCase("{#for .vals |$v|}{$v}{/for}", .{ .vals = [_]u8{ 1, 2, 3 } }, "123");
|
||||||
try testCase("{#for .vals |$v|=} {$v} {=/for}", .{ .vals = [_]u8{ 1, 2, 3 } }, "123");
|
try testCase("{#for .vals |$v|=} {$v} {=/for}", .{ .vals = [_]u8{ 1, 2, 3 } }, "123");
|
||||||
try testCase("{#for .vals |$val|}{$val}{/for}", .{ .vals = [_]u8{ 1, 2, 3 } }, "123");
|
|
||||||
try testCase("{#if .val}1{#else}0{/if}", .{ .val = true }, "1");
|
try testCase("{#if .val}1{#else}0{/if}", .{ .val = true }, "1");
|
||||||
try testCase("{#if .val}1{#else}0{/if}", .{ .val = false }, "0");
|
try testCase("{#if .val}1{#else}0{/if}", .{ .val = false }, "0");
|
||||||
try testCase("{#if .val}1{#elif .foo}2{/if}", .{ .val = false, .foo = true }, "2");
|
try testCase("{#if .val}1{#elif .foo}2{/if}", .{ .val = false, .foo = true }, "2");
|
||||||
|
|
|
@ -14,7 +14,6 @@
|
||||||
{$b}:
|
{$b}:
|
||||||
{= /for =}
|
{= /for =}
|
||||||
{= /for}
|
{= /for}
|
||||||
{#for .baz |$f, $i| =}{$i}{/for}
|
|
||||||
{#if .quxx == .quxx2}eql{#else}neq{/if}
|
{#if .quxx == .quxx2}eql{#else}neq{/if}
|
||||||
{#if .quxx == .qux}eql{#else}neq{/if}
|
{#if .quxx == .qux}eql{#else}neq{/if}
|
||||||
{#if @isTag(.snap, foo)}foo{/if}
|
{#if @isTag(.snap, foo)}foo{/if}
|
||||||
|
@ -28,7 +27,6 @@
|
||||||
{=/if}
|
{=/if}
|
||||||
|
|
||||||
sliced: {#for @slice(.foo, .start, .end) |$s|}{$s}, {/for}
|
sliced: {#for @slice(.foo, .start, .end) |$s|}{$s}, {/for}
|
||||||
sliced: {#for @slice(.foo, 1, 3) |$s|}{$s}, {/for}
|
|
||||||
|
|
||||||
format: {#format "s" .x}
|
format: {#format "s" .x}
|
||||||
|
|
||||||
|
|
|
@ -218,13 +218,3 @@ button:hover, a.button:hover {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.drive .breadcrumbs {
|
|
||||||
list-style: none;
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
||||||
.drive .icons {
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
vertical-align: bottom;
|
|
||||||
}
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue