controllers refactor
This commit is contained in:
parent
581159963f
commit
2aa9569050
6 changed files with 530 additions and 270 deletions
|
@ -4,6 +4,7 @@ const builtin = @import("builtin");
|
||||||
const http = @import("http");
|
const http = @import("http");
|
||||||
const api = @import("api");
|
const api = @import("api");
|
||||||
const util = @import("util");
|
const util = @import("util");
|
||||||
|
const query_utils = @import("./query.zig");
|
||||||
|
|
||||||
pub const auth = @import("./controllers/auth.zig");
|
pub const auth = @import("./controllers/auth.zig");
|
||||||
pub const communities = @import("./controllers/communities.zig");
|
pub const communities = @import("./controllers/communities.zig");
|
||||||
|
@ -11,166 +12,185 @@ pub const invites = @import("./controllers/invites.zig");
|
||||||
pub const users = @import("./controllers/users.zig");
|
pub const users = @import("./controllers/users.zig");
|
||||||
pub const notes = @import("./controllers/notes.zig");
|
pub const notes = @import("./controllers/notes.zig");
|
||||||
|
|
||||||
pub const utils = struct {
|
pub const sample_api = struct {
|
||||||
const json_options = if (builtin.mode == .Debug) .{
|
const Self = @This();
|
||||||
.whitespace = .{
|
|
||||||
.indent = .{ .Space = 2 },
|
pub const method = .POST;
|
||||||
.separator = true,
|
pub const path = "/notes/:id/reacts";
|
||||||
},
|
pub const content_type = "application/json";
|
||||||
.string = .{ .String = .{} },
|
|
||||||
} else .{
|
pub const Args = struct {
|
||||||
.whitespace = .{
|
id: []const u8,
|
||||||
.indent = .None,
|
|
||||||
.separator = false,
|
|
||||||
},
|
|
||||||
.string = .{ .String = .{} },
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Responds to a request with a json value
|
pub const Body = struct {
|
||||||
pub fn respondJson(ctx: *http.server.Context, status: http.Status, value: anytype) !void {
|
content: []const u8,
|
||||||
var headers = http.Headers.init(ctx.alloc);
|
};
|
||||||
defer headers.deinit();
|
|
||||||
|
|
||||||
// Don't need to free this k/v pair because they aren't dynamically allocated
|
pub const Query = struct {
|
||||||
try headers.put("Content-Type", "application/json");
|
arg: []const u8 = "",
|
||||||
|
};
|
||||||
|
|
||||||
var stream = try ctx.openResponse(&headers, status);
|
pub fn handler(ctx: Context(Self), response: *Response, _: api.ApiSource.Conn) !void {
|
||||||
defer stream.close();
|
try response.writeJson(.created, ctx.query);
|
||||||
|
|
||||||
const writer = stream.writer();
|
|
||||||
try std.json.stringify(value, json_options, writer);
|
|
||||||
|
|
||||||
try stream.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn respondError(ctx: *http.server.Context, status: http.Status, err: []const u8) void {
|
|
||||||
respondJson(ctx, status, .{ .@"error" = err }) catch |write_err| {
|
|
||||||
std.log.err("Unable to print error: {}", .{write_err});
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parseRequestBody(comptime T: type, ctx: *http.server.Context) !T {
|
|
||||||
const body = ctx.request.body orelse return error.BodyRequired;
|
|
||||||
var tokens = std.json.TokenStream.init(body);
|
|
||||||
const parsed = try std.json.parse(T, &tokens, .{ .allocator = ctx.alloc });
|
|
||||||
|
|
||||||
return parsed;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parseQueryParams(comptime T: type, ctx: *http.server.Context) !T {
|
|
||||||
// TODO: clean up parsing
|
|
||||||
const path = ctx.request.path;
|
|
||||||
const start = (std.mem.indexOfScalar(u8, path, '?') orelse return error.NoQuery) + 1;
|
|
||||||
const rest = path[start..];
|
|
||||||
const query = std.mem.sliceTo(rest, '#');
|
|
||||||
|
|
||||||
const fake_url = util.Url{
|
|
||||||
.scheme = "",
|
|
||||||
.hostport = "",
|
|
||||||
.path = "",
|
|
||||||
.query = query,
|
|
||||||
.fragment = "",
|
|
||||||
};
|
|
||||||
|
|
||||||
var result: T = .{};
|
|
||||||
inline for (std.meta.fields(T)) |f| {
|
|
||||||
if (fake_url.getQuery(f.name)) |param| {
|
|
||||||
const F = if (comptime @typeInfo(f.field_type) == .Optional) std.meta.Child(f.field_type) else f.field_type;
|
|
||||||
std.log.debug("{}: {s}", .{ F, param });
|
|
||||||
|
|
||||||
@field(result, f.name) = switch (F) {
|
|
||||||
[]const u8 => param,
|
|
||||||
|
|
||||||
else => switch (@typeInfo(F)) {
|
|
||||||
.Struct => if (@hasDecl(F, "parse")) try F.parse(param) else @compileError("Invalid type " ++ @typeName(F)),
|
|
||||||
.Enum => std.meta.stringToEnum(F, param) orelse return error.ParseError,
|
|
||||||
.Int => try std.fmt.parseInt(F, param, 10),
|
|
||||||
|
|
||||||
else => {},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (false) {
|
|
||||||
inline for (std.meta.fields(T)) |f| {
|
|
||||||
if (fake_url.getQuery(f.name)) |param| {
|
|
||||||
const F = if (comptime @typeInfo(f.field_type) == .Optional) std.meta.Child(f.field_type) else f.field_type;
|
|
||||||
|
|
||||||
switch (F) {
|
|
||||||
[]const u8 => @field(result, f.name) = param,
|
|
||||||
|
|
||||||
else => switch (@typeInfo(F)) {
|
|
||||||
.Struct,
|
|
||||||
.Opaque,
|
|
||||||
//.Union,
|
|
||||||
=> if (@hasDecl(F, "parse")) {
|
|
||||||
@compileLog(F);
|
|
||||||
if (true) @compileError(F);
|
|
||||||
@field(result, f.name) = try F.parse(param);
|
|
||||||
},
|
|
||||||
|
|
||||||
//.Int => @field(result, f.name) = try std.fmt.parseInt(F, param),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parseTypeFromQueryParams(comptime T: type, comptime name_prefix: []const u8, url: util.Url) !T {
|
|
||||||
var result: T = .{};
|
|
||||||
inline for (std.meta.fields(T)) |field| {
|
|
||||||
const FieldType = switch (@typeInfo(field.field_type)) {
|
|
||||||
.Optional => |info| info.child,
|
|
||||||
else => field.field_type,
|
|
||||||
};
|
|
||||||
_ = FieldType;
|
|
||||||
_ = result;
|
|
||||||
|
|
||||||
const qualified_name = name_prefix ++ field.name;
|
|
||||||
if (url.getQuery(qualified_name)) |param| {
|
|
||||||
_ = param;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn freeRequestBody(value: anytype, alloc: std.mem.Allocator) void {
|
|
||||||
std.json.parseFree(@TypeOf(value), value, .{ .allocator = alloc });
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getApiConn(srv: *RequestServer, ctx: *http.server.Context) !api.ApiSource.Conn {
|
|
||||||
const host = ctx.request.headers.get("Host") orelse return error.NoHost;
|
|
||||||
|
|
||||||
return authorizeApiConn(srv, ctx, host) catch |err| switch (err) {
|
|
||||||
error.NoToken => srv.api.connectUnauthorized(host, ctx.alloc),
|
|
||||||
error.InvalidToken => return error.InvalidToken,
|
|
||||||
else => @panic("TODO"), // doing this to resolve some sort of compiler analysis dependency issue
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn authorizeApiConn(srv: *RequestServer, ctx: *http.server.Context, host: []const u8) !api.ApiSource.Conn {
|
|
||||||
const header = ctx.request.headers.get("authorization") orelse return error.NoToken;
|
|
||||||
|
|
||||||
if (header.len < ("bearer ").len) return error.InvalidToken;
|
|
||||||
const token = header[("bearer ").len..];
|
|
||||||
|
|
||||||
return try srv.api.connectToken(host, token, ctx.alloc);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const RequestServer = root.RequestServer;
|
pub fn Context(comptime Route: type) type {
|
||||||
const RouteArgs = http.RouteArgs;
|
return struct {
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
pub fn healthcheck(_: *RequestServer, ctx: *http.server.Context, _: RouteArgs) !void {
|
pub const Args = if (@hasDecl(Route, "Args")) Route.Args else void;
|
||||||
try utils.respondJson(ctx, .ok, .{ .status = "ok" });
|
pub const Body = if (@hasDecl(Route, "Body")) Route.Body else void;
|
||||||
|
pub const Query = if (@hasDecl(Route, "Query")) Route.Query else void;
|
||||||
|
|
||||||
|
allocator: std.mem.Allocator,
|
||||||
|
|
||||||
|
method: http.Method,
|
||||||
|
request_line: []const u8,
|
||||||
|
headers: http.Headers,
|
||||||
|
|
||||||
|
args: Args,
|
||||||
|
body: Body,
|
||||||
|
query: Query,
|
||||||
|
|
||||||
|
fn parseArgs(path: []const u8) ?Args {
|
||||||
|
var args: Route.Args = undefined;
|
||||||
|
var path_iter = util.PathIter.from(path);
|
||||||
|
comptime var route_iter = util.PathIter.from(Route.path);
|
||||||
|
inline while (comptime route_iter.next()) |route_segment| {
|
||||||
|
const path_segment = path_iter.next() orelse return null;
|
||||||
|
if (route_segment[0] == ':') {
|
||||||
|
@field(args, route_segment[1..]) = path_segment;
|
||||||
|
} else {
|
||||||
|
if (!std.ascii.eqlIgnoreCase(route_segment, path_segment)) return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn matchAndHandle(api_source: *api.ApiSource, ctx: http.server.Context, alloc: std.mem.Allocator) bool {
|
||||||
|
const req = ctx.request;
|
||||||
|
if (req.method != Route.method) return false;
|
||||||
|
var path = std.mem.sliceTo(std.mem.sliceTo(req.path, '#'), '?');
|
||||||
|
var args: Route.Args = parseArgs(path) orelse return false;
|
||||||
|
|
||||||
|
var response = Response{ .headers = http.Headers.init(alloc), .ctx = ctx };
|
||||||
|
defer response.headers.deinit();
|
||||||
|
|
||||||
|
var self = Self{
|
||||||
|
.allocator = alloc,
|
||||||
|
|
||||||
|
.method = req.method,
|
||||||
|
.request_line = req.path,
|
||||||
|
.headers = req.headers,
|
||||||
|
|
||||||
|
.args = args,
|
||||||
|
.body = undefined,
|
||||||
|
.query = undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.prepareAndHandle(api_source, req, &response);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn errorHandler(response: *Response, status: http.Status) void {
|
||||||
|
response.writeStatus(status) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prepareAndHandle(self: *Self, api_source: anytype, req: http.Request, response: *Response) void {
|
||||||
|
self.parseBody(req) catch return errorHandler(response, .bad_request);
|
||||||
|
defer self.freeBody();
|
||||||
|
|
||||||
|
self.parseQuery() catch return errorHandler(response, .bad_request);
|
||||||
|
|
||||||
|
var api_conn = self.getApiConn(api_source) catch return errorHandler(response, .internal_server_error); // TODO
|
||||||
|
defer api_conn.close();
|
||||||
|
|
||||||
|
self.handle(response, api_conn);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseBody(self: *Self, req: http.Request) !void {
|
||||||
|
if (Body != void) {
|
||||||
|
const body = req.body orelse return error.NoBody;
|
||||||
|
var tokens = std.json.TokenStream.init(body);
|
||||||
|
self.body = try std.json.parse(Body, &tokens, .{ .allocator = self.allocator });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn freeBody(self: *Self) void {
|
||||||
|
if (Body != void) {
|
||||||
|
std.json.parseFree(Body, self.body, .{ .allocator = self.allocator });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseQuery(self: *Self) !void {
|
||||||
|
if (Query != void) {
|
||||||
|
const path = std.mem.sliceTo(self.request_line, '?');
|
||||||
|
const q = std.mem.sliceTo(self.request_line[path.len..], '#');
|
||||||
|
|
||||||
|
self.query = try query_utils.parseQuery(Query, q);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle(self: Self, response: *Response, api_conn: anytype) void {
|
||||||
|
Route.handler(self, response, api_conn) catch |err| std.log.err("{}", .{err});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getApiConn(self: *Self, api_source: anytype) !api.ApiSource.Conn {
|
||||||
|
const host = self.headers.get("Host") orelse return error.NoHost;
|
||||||
|
const auth_header = self.headers.get("Authorization");
|
||||||
|
const token = if (auth_header) |header| blk: {
|
||||||
|
const prefix = "bearer ";
|
||||||
|
if (header.len < prefix.len) break :blk null;
|
||||||
|
if (!std.ascii.eqlIgnoreCase(prefix, header[0..prefix.len])) break :blk null;
|
||||||
|
break :blk header[prefix.len..];
|
||||||
|
} else null;
|
||||||
|
|
||||||
|
if (token) |t| return try api_source.connectToken(host, t, self.allocator);
|
||||||
|
|
||||||
|
return try api_source.connectUnauthorized(host, self.allocator);
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn notFound(_: *RequestServer, ctx: *http.server.Context) void {
|
pub const Response = struct {
|
||||||
utils.respondError(ctx, .not_found, "Not Found");
|
const Self = @This();
|
||||||
}
|
headers: http.Headers,
|
||||||
|
ctx: http.server.Context,
|
||||||
|
|
||||||
pub fn internalServerError(_: *RequestServer, ctx: *http.server.Context) void {
|
pub fn writeStatus(self: *Self, status: http.Status) !void {
|
||||||
utils.respondError(ctx, .internal_server_error, "Internal Server Error");
|
var stream = try self.ctx.openResponse(&self.headers, status);
|
||||||
}
|
defer stream.close();
|
||||||
|
try stream.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn writeJson(self: *Self, status: http.Status, response_body: anytype) !void {
|
||||||
|
try self.headers.put("Content-Type", "application/json");
|
||||||
|
|
||||||
|
var stream = try self.ctx.openResponse(&self.headers, status);
|
||||||
|
defer stream.close();
|
||||||
|
|
||||||
|
const writer = stream.writer();
|
||||||
|
try std.json.stringify(response_body, json_options, writer);
|
||||||
|
|
||||||
|
try stream.finish();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const json_options = if (builtin.mode == .Debug)
|
||||||
|
.{
|
||||||
|
.whitespace = .{
|
||||||
|
.indent = .{ .Space = 2 },
|
||||||
|
.separator = true,
|
||||||
|
},
|
||||||
|
.string = .{ .String = .{} },
|
||||||
|
} else .{
|
||||||
|
.whitespace = .{
|
||||||
|
.indent = .None,
|
||||||
|
.separator = false,
|
||||||
|
},
|
||||||
|
.string = .{ .String = .{} },
|
||||||
|
};
|
||||||
|
|
|
@ -15,19 +15,19 @@ const Route = Router.Route;
|
||||||
const RouteArgs = http.RouteArgs;
|
const RouteArgs = http.RouteArgs;
|
||||||
const router = Router{
|
const router = Router{
|
||||||
.routes = &[_]Route{
|
.routes = &[_]Route{
|
||||||
Route.new(.GET, "/healthcheck", &c.healthcheck),
|
//Route.new(.GET, "/healthcheck", &c.healthcheck),
|
||||||
|
|
||||||
prepare(c.auth.login),
|
//prepare(c.auth.login),
|
||||||
prepare(c.auth.verify_login),
|
//prepare(c.auth.verify_login),
|
||||||
|
|
||||||
prepare(c.communities.create),
|
//prepare(c.communities.create),
|
||||||
|
|
||||||
prepare(c.invites.create),
|
//prepare(c.invites.create),
|
||||||
|
|
||||||
prepare(c.users.create),
|
//prepare(c.users.create),
|
||||||
|
|
||||||
prepare(c.notes.create),
|
//prepare(c.notes.create),
|
||||||
prepare(c.notes.get),
|
//prepare(c.notes.get),
|
||||||
|
|
||||||
//prepare(c.communities.query),
|
//prepare(c.communities.query),
|
||||||
|
|
||||||
|
@ -72,6 +72,9 @@ pub const RequestServer = struct {
|
||||||
var ctx = try srv.accept(alloc);
|
var ctx = try srv.accept(alloc);
|
||||||
defer ctx.close();
|
defer ctx.close();
|
||||||
|
|
||||||
|
_ = c.Context(c.sample_api).matchAndHandle(self.api, ctx, self.alloc);
|
||||||
|
if (true) continue;
|
||||||
|
|
||||||
router.dispatch(self, &ctx, ctx.request.method, ctx.request.path) catch |err| switch (err) {
|
router.dispatch(self, &ctx, ctx.request.method, ctx.request.path) catch |err| switch (err) {
|
||||||
error.NotFound, error.RouteNotApplicable => c.notFound(self, &ctx),
|
error.NotFound, error.RouteNotApplicable => c.notFound(self, &ctx),
|
||||||
else => {
|
else => {
|
||||||
|
|
|
@ -1,53 +1,178 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const ParamIter = struct {
|
|
||||||
remaining: []const u8,
|
|
||||||
target: []const u8,
|
|
||||||
|
|
||||||
fn next(self: *ParamIter) ?[]const u8 {
|
const QueryIter = @import("util").QueryIter;
|
||||||
//
|
|
||||||
_ = self;
|
/// Parses a set of query parameters described by the struct `T`.
|
||||||
unreachable;
|
///
|
||||||
|
/// To specify query parameters, provide a struct similar to the following:
|
||||||
|
/// ```
|
||||||
|
/// struct {
|
||||||
|
/// foo: bool = false,
|
||||||
|
/// bar: ?[]const u8 = null,
|
||||||
|
/// baz: usize = 10,
|
||||||
|
/// qux: enum { quux, snap } = .quux,
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// This will allow it to parse a query string like the following:
|
||||||
|
/// `?foo&bar=abc&qux=snap`
|
||||||
|
///
|
||||||
|
/// Every parameter must have a default value that will be used when the
|
||||||
|
/// parameter is not provided, and parameter keys.
|
||||||
|
/// Numbers are parsed from their string representations, and a parameter
|
||||||
|
/// provided in the query string without a value is parsed either as a bool
|
||||||
|
/// `true` flag or as `null` depending on the type of its param.
|
||||||
|
///
|
||||||
|
/// Parameter types supported:
|
||||||
|
/// - []const u8
|
||||||
|
/// - numbers (both integer and float)
|
||||||
|
/// + Numbers are parsed in base 10
|
||||||
|
/// - bool
|
||||||
|
/// + See below for detals
|
||||||
|
/// - exhaustive enums
|
||||||
|
/// + Enums are treated as strings with values equal to the enum fields
|
||||||
|
/// - ?F (where isScalar(F) and F != bool)
|
||||||
|
/// - Any type that implements:
|
||||||
|
/// + pub fn parse([]const u8) !F
|
||||||
|
///
|
||||||
|
/// Boolean Parameters:
|
||||||
|
/// The following query strings will all parse a `true` value for the
|
||||||
|
/// parameter `foo: bool = false`:
|
||||||
|
/// - `?foo`
|
||||||
|
/// - `?foo=true`
|
||||||
|
/// - `?foo=t`
|
||||||
|
/// - `?foo=yes`
|
||||||
|
/// - `?foo=y`
|
||||||
|
/// - `?foo=1`
|
||||||
|
/// And the following query strings all parse a `false` value:
|
||||||
|
/// - `?`
|
||||||
|
/// - `?foo=false`
|
||||||
|
/// - `?foo=f`
|
||||||
|
/// - `?foo=no`
|
||||||
|
/// - `?foo=n`
|
||||||
|
/// - `?foo=0`
|
||||||
|
///
|
||||||
|
/// Compound Types:
|
||||||
|
/// Compound (struct) types are also supported, with the parameter key
|
||||||
|
/// for its parameters consisting of the struct's field + '.' + parameter
|
||||||
|
/// field. For example:
|
||||||
|
/// ```
|
||||||
|
/// struct {
|
||||||
|
/// foo: struct {
|
||||||
|
/// baz: usize = 0,
|
||||||
|
/// } = .{},
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
/// Would be used to parse a query string like
|
||||||
|
/// `?foo.baz=12345`
|
||||||
|
///
|
||||||
|
/// Compound types cannot currently be nullable, and must be structs.
|
||||||
|
///
|
||||||
|
/// TODO: values are currently case-sensitive, and are not url-decoded properly.
|
||||||
|
/// This should be fixed.
|
||||||
|
pub fn parseQuery(comptime T: type, query: []const u8) !T {
|
||||||
|
//if (!std.meta.trait.isContainer(T)) @compileError("T must be a struct");
|
||||||
|
var iter = QueryIter.from(query);
|
||||||
|
var result = T{};
|
||||||
|
while (iter.next()) |pair| {
|
||||||
|
try parseQueryPair(T, &result, pair.key, pair.value);
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
pub fn getParam(str: []const u8, param: []const u8) !?[]const u8 {
|
|
||||||
var iter = ParamIter{ .remaining = str, .target = param };
|
|
||||||
const result = iter.next() orelse return null;
|
|
||||||
if (iter.next() != null) return error.TooMany;
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isScalarType(comptime T: type) bool {
|
fn parseQueryPair(comptime T: type, result: *T, key: []const u8, value: ?[]const u8) !void {
|
||||||
return switch (T) {
|
const key_part = std.mem.sliceTo(key, '.');
|
||||||
[]const u8 => true,
|
const field_idx = std.meta.stringToEnum(std.meta.FieldEnum(T), key_part) orelse return error.UnknownField;
|
||||||
|
|
||||||
else => switch (@typeInfo(T)) {
|
inline for (std.meta.fields(T)) |info, idx| {
|
||||||
.Int, .Float, .Bool => true,
|
if (@enumToInt(field_idx) == idx) {
|
||||||
|
if (comptime isScalar(info.field_type)) {
|
||||||
|
if (key_part.len == key.len) {
|
||||||
|
@field(result, info.name) = try parseQueryValue(info.field_type, value);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
return error.UnknownField;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const remaining = std.mem.trimLeft(u8, key[key_part.len..], ".");
|
||||||
|
return try parseQueryPair(info.field_type, &@field(result, info.name), remaining, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.Optional => |info| isScalarType(info.child),
|
return error.UnknownField;
|
||||||
.Enum => |info| if (info.is_exhaustive)
|
}
|
||||||
true
|
|
||||||
else
|
|
||||||
@compileError("Unsupported type " ++ @typeName(T)),
|
|
||||||
|
|
||||||
.Struct => false,
|
fn parseQueryValue(comptime T: type, value: ?[]const u8) !T {
|
||||||
else => @compileError("Unsupported type " ++ @typeName(T)),
|
const is_optional = comptime std.meta.trait.is(.Optional)(T);
|
||||||
},
|
// If param is present, but without an associated value
|
||||||
|
if (value == null) {
|
||||||
|
return if (is_optional)
|
||||||
|
null
|
||||||
|
else if (T == bool)
|
||||||
|
true
|
||||||
|
else
|
||||||
|
error.InvalidValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return try parseQueryValueNotNull(if (is_optional) std.meta.Child(T) else T, value.?);
|
||||||
|
}
|
||||||
|
|
||||||
|
const bool_map = std.ComptimeStringMap(bool, .{
|
||||||
|
.{ "true", true },
|
||||||
|
.{ "t", true },
|
||||||
|
.{ "yes", true },
|
||||||
|
.{ "y", true },
|
||||||
|
.{ "1", true },
|
||||||
|
|
||||||
|
.{ "false", false },
|
||||||
|
.{ "f", false },
|
||||||
|
.{ "no", false },
|
||||||
|
.{ "n", false },
|
||||||
|
.{ "0", false },
|
||||||
|
});
|
||||||
|
|
||||||
|
fn parseQueryValueNotNull(comptime T: type, value: []const u8) !T {
|
||||||
|
if (comptime std.meta.trait.isZigString(T)) return value;
|
||||||
|
if (comptime std.meta.trait.isIntegral(T)) return try std.fmt.parseInt(T, value, 0);
|
||||||
|
if (comptime std.meta.trait.isFloat(T)) return try std.fmt.parseFloat(T, value);
|
||||||
|
if (comptime std.meta.trait.is(.Enum)(T)) return std.meta.stringToEnum(T, value) orelse error.InvalidEnumValue;
|
||||||
|
if (T == bool) return bool_map.get(value) orelse error.InvalidBool;
|
||||||
|
if (comptime std.meta.trait.hasFn("parse")(T)) return try T.parse(value);
|
||||||
|
|
||||||
|
@compileError("Invalid type " ++ @typeName(T));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isScalar(comptime T: type) bool {
|
||||||
|
if (comptime std.meta.trait.isZigString(T)) return true;
|
||||||
|
if (comptime std.meta.trait.isIntegral(T)) return true;
|
||||||
|
if (comptime std.meta.trait.isFloat(T)) return true;
|
||||||
|
if (comptime std.meta.trait.is(.Enum)(T)) return true;
|
||||||
|
if (T == bool) return true;
|
||||||
|
if (comptime std.meta.trait.hasFn("parse")(T)) return true;
|
||||||
|
|
||||||
|
if (comptime std.meta.trait.is(.Optional)(T) and isScalar(std.meta.Child(T))) return true;
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
test {
|
||||||
|
const TestQuery = struct {
|
||||||
|
int: usize = 3,
|
||||||
|
boolean: bool = false,
|
||||||
|
str_enum: ?enum { foo, bar } = null,
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
try std.testing.expectEqual(TestQuery{
|
||||||
pub fn parseQueryArgs(comptime T: type, str: []const u8) !T {
|
.int = 3,
|
||||||
var result = std.mem.zeroInit(T, .{});
|
.boolean = false,
|
||||||
_ = str;
|
.str_enum = null,
|
||||||
|
}, try parseQuery(TestQuery, ""));
|
||||||
for (std.meta.fields(T)) |field| {
|
|
||||||
const ParseType = switch (@typeInfo(field.field_type)) {
|
try std.testing.expectEqual(TestQuery{
|
||||||
.Optional => |info| info.child,
|
.int = 5,
|
||||||
else => field.field_type,
|
.boolean = true,
|
||||||
};
|
.str_enum = .foo,
|
||||||
|
}, try parseQuery(TestQuery, "?int=5&boolean=yes&str_enum=foo"));
|
||||||
_ = ParseType;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
const Self = @This();
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
is_first: bool,
|
|
||||||
path: []const u8,
|
|
||||||
|
|
||||||
pub fn from(path: []const u8) Self {
|
|
||||||
return .{ .path = path, .is_first = true };
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next(self: *Self) ?[]const u8 {
|
|
||||||
if (self.path.len == 0) {
|
|
||||||
if (self.is_first) {
|
|
||||||
self.is_first = false;
|
|
||||||
return self.path;
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var start: usize = 0;
|
|
||||||
var end: usize = start;
|
|
||||||
while (end < self.path.len) : (end += 1) {
|
|
||||||
// skip leading slash
|
|
||||||
if (end == start and self.path[start] == '/') {
|
|
||||||
start += 1;
|
|
||||||
continue;
|
|
||||||
} else if (self.path[end] == '/') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (start == end) {
|
|
||||||
self.path = self.path[end..end];
|
|
||||||
self.is_first = false;
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = self.path[start..end];
|
|
||||||
self.path = self.path[end..];
|
|
||||||
self.is_first = false;
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
test "PathIter /ab/cd/" {
|
|
||||||
const path = "/ab/cd/";
|
|
||||||
var it = from(path);
|
|
||||||
try std.testing.expectEqualStrings("ab", it.next().?);
|
|
||||||
try std.testing.expectEqualStrings("cd", it.next().?);
|
|
||||||
try std.testing.expectEqual(@as(?[]const u8, null), it.next());
|
|
||||||
}
|
|
||||||
|
|
||||||
test "PathIter ''" {
|
|
||||||
const path = "";
|
|
||||||
var it = from(path);
|
|
||||||
try std.testing.expectEqualStrings("", it.next().?);
|
|
||||||
try std.testing.expectEqual(@as(?[]const u8, null), it.next());
|
|
||||||
}
|
|
||||||
|
|
||||||
test "PathIter ab/c//defg/" {
|
|
||||||
const path = "ab/c//defg/";
|
|
||||||
var it = from(path);
|
|
||||||
try std.testing.expectEqualStrings("ab", it.next().?);
|
|
||||||
try std.testing.expectEqualStrings("c", it.next().?);
|
|
||||||
try std.testing.expectEqualStrings("defg", it.next().?);
|
|
||||||
try std.testing.expectEqual(@as(?[]const u8, null), it.next());
|
|
||||||
}
|
|
178
src/util/iters.zig
Normal file
178
src/util/iters.zig
Normal file
|
@ -0,0 +1,178 @@
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
pub fn Separator(comptime separator: u8) type {
|
||||||
|
return struct {
|
||||||
|
const Self = @This();
|
||||||
|
str: []const u8,
|
||||||
|
pub fn from(str: []const u8) Self {
|
||||||
|
return .{ .str = std.mem.trim(u8, str, &.{separator}) };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(self: *Self) ?[]const u8 {
|
||||||
|
if (self.str.len == 0) return null;
|
||||||
|
|
||||||
|
const part = std.mem.sliceTo(self.str, separator);
|
||||||
|
self.str = std.mem.trimLeft(u8, self.str[part.len..], &.{separator});
|
||||||
|
|
||||||
|
return part;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const QueryIter = struct {
|
||||||
|
const Pair = struct {
|
||||||
|
key: []const u8,
|
||||||
|
value: ?[]const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
iter: Separator('&'),
|
||||||
|
|
||||||
|
pub fn from(q: []const u8) QueryIter {
|
||||||
|
return QueryIter{ .iter = Separator('&').from(std.mem.trimLeft(u8, q, "?")) };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(self: *QueryIter) ?Pair {
|
||||||
|
const part = self.iter.next() orelse return null;
|
||||||
|
|
||||||
|
const key = std.mem.sliceTo(part, '=');
|
||||||
|
if (key.len == part.len) return Pair{
|
||||||
|
.key = key,
|
||||||
|
.value = null,
|
||||||
|
};
|
||||||
|
|
||||||
|
return Pair{
|
||||||
|
.key = key,
|
||||||
|
.value = part[key.len + 1 ..],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const PathIter = struct {
|
||||||
|
is_first: bool,
|
||||||
|
iter: Separator('/'),
|
||||||
|
|
||||||
|
pub fn from(path: []const u8) PathIter {
|
||||||
|
return .{ .is_first = true, .iter = Separator('/').from(path) };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(self: *PathIter) ?[]const u8 {
|
||||||
|
if (self.is_first) {
|
||||||
|
self.is_first = false;
|
||||||
|
return self.iter.next() orelse "";
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.iter.next();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
test "QueryIter" {
|
||||||
|
const t = @import("std").testing;
|
||||||
|
if (true) return error.SkipZigTest;
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("");
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?");
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?abc");
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "abc",
|
||||||
|
.value = null,
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?abc=");
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "abc",
|
||||||
|
.value = "",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?abc=def");
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "abc",
|
||||||
|
.value = "def",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?abc=def&");
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "abc",
|
||||||
|
.value = "def",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?abc=def&foo&bar=baz&qux=");
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "abc",
|
||||||
|
.value = "def",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "foo",
|
||||||
|
.value = null,
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "bar",
|
||||||
|
.value = "baz",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "qux",
|
||||||
|
.value = "",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
var iter = QueryIter.from("?=def&");
|
||||||
|
try t.expectEqual(QueryIter.Pair{
|
||||||
|
.key = "",
|
||||||
|
.value = "def",
|
||||||
|
}, iter.next().?);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
try t.expect(iter.next() == null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "PathIter /ab/cd/" {
|
||||||
|
const path = "/ab/cd/";
|
||||||
|
var it = PathIter.from(path);
|
||||||
|
try std.testing.expectEqualStrings("ab", it.next().?);
|
||||||
|
try std.testing.expectEqualStrings("cd", it.next().?);
|
||||||
|
try std.testing.expectEqual(@as(?[]const u8, null), it.next());
|
||||||
|
}
|
||||||
|
|
||||||
|
test "PathIter ''" {
|
||||||
|
const path = "";
|
||||||
|
var it = PathIter.from(path);
|
||||||
|
try std.testing.expectEqualStrings("", it.next().?);
|
||||||
|
try std.testing.expectEqual(@as(?[]const u8, null), it.next());
|
||||||
|
}
|
||||||
|
|
||||||
|
test "PathIter ab/c//defg/" {
|
||||||
|
const path = "ab/c//defg/";
|
||||||
|
var it = PathIter.from(path);
|
||||||
|
try std.testing.expectEqualStrings("ab", it.next().?);
|
||||||
|
try std.testing.expectEqualStrings("c", it.next().?);
|
||||||
|
try std.testing.expectEqualStrings("defg", it.next().?);
|
||||||
|
try std.testing.expectEqual(@as(?[]const u8, null), it.next());
|
||||||
|
}
|
|
@ -1,10 +1,12 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
const iters = @import("./iters.zig");
|
||||||
|
|
||||||
pub const ciutf8 = @import("./ciutf8.zig");
|
pub const ciutf8 = @import("./ciutf8.zig");
|
||||||
pub const Uuid = @import("./Uuid.zig");
|
pub const Uuid = @import("./Uuid.zig");
|
||||||
pub const DateTime = @import("./DateTime.zig");
|
pub const DateTime = @import("./DateTime.zig");
|
||||||
pub const PathIter = @import("./PathIter.zig");
|
|
||||||
pub const Url = @import("./Url.zig");
|
pub const Url = @import("./Url.zig");
|
||||||
|
pub const PathIter = iters.PathIter;
|
||||||
|
pub const QueryIter = iters.QueryIter;
|
||||||
|
|
||||||
/// Joins an array of strings, prefixing every entry with `prefix`,
|
/// Joins an array of strings, prefixing every entry with `prefix`,
|
||||||
/// and putting `separator` in between each pair
|
/// and putting `separator` in between each pair
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue