Add union support to query parser

This commit is contained in:
jaina heartles 2022-11-13 20:40:13 -08:00
parent 8694516180
commit bfd73b7a1f
4 changed files with 140 additions and 79 deletions

View file

@ -38,11 +38,7 @@ fn writeStatusLine(writer: anytype, status: Status) !void {
fn writeFields(writer: anytype, headers: *const Fields) !void { fn writeFields(writer: anytype, headers: *const Fields) !void {
var iter = headers.iterator(); var iter = headers.iterator();
std.log.debug("{any}", .{headers});
std.log.debug("{any}", .{iter});
while (iter.next()) |header| { while (iter.next()) |header| {
std.log.debug("{any}", .{headers});
std.log.debug("{any}", .{iter});
for (header.value_ptr.*) |ch| { for (header.value_ptr.*) |ch| {
if (ch == '\r' or ch == '\n') @panic("newlines not yet supported in headers"); if (ch == '\r' or ch == '\n') @panic("newlines not yet supported in headers");
} }

View file

@ -153,7 +153,6 @@ pub fn Context(comptime Route: type) type {
if (Body != void) { if (Body != void) {
var stream = req.body orelse return error.NoBody; var stream = req.body orelse return error.NoBody;
const body = try stream.reader().readAllAlloc(self.allocator, 1 << 16); const body = try stream.reader().readAllAlloc(self.allocator, 1 << 16);
std.log.debug("{s}", .{body});
errdefer self.allocator.free(body); errdefer self.allocator.free(body);
self.body = try json_utils.parse(Body, body, self.allocator); self.body = try json_utils.parse(Body, body, self.allocator);
self.body_buf = body; self.body_buf = body;

View file

@ -28,7 +28,7 @@ pub const query = struct {
// NOTE: This has to match QueryArgs // NOTE: This has to match QueryArgs
// TODO: Support union fields in query strings natively, so we don't // TODO: Support union fields in query strings natively, so we don't
// have to keep these in sync // have to keep these in sync
pub const Query = struct { pub const QueryOld = struct {
const OrderBy = QueryArgs.OrderBy; const OrderBy = QueryArgs.OrderBy;
const Direction = QueryArgs.Direction; const Direction = QueryArgs.Direction;
const PageDirection = QueryArgs.PageDirection; const PageDirection = QueryArgs.PageDirection;
@ -62,41 +62,10 @@ pub const query = struct {
pub const format = formatQueryParams; pub const format = formatQueryParams;
}; };
pub const Query = QueryArgs;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void { pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const q = req.query; const results = try srv.queryCommunities(req.query);
const query_matches = if (q.prev.id) |_| switch (q.order_by) {
.name => q.prev.name != null and q.prev.host == null and q.prev.created_at == null,
.host => q.prev.name == null and q.prev.host != null and q.prev.created_at == null,
.created_at => q.prev.name == null and q.prev.host == null and q.prev.created_at != null,
} else (q.prev.name == null and q.prev.host == null and q.prev.created_at == null);
if (!query_matches) return res.err(.bad_request, "prev.* parameters do not match", {});
const prev_arg: ?QueryArgs.Prev = if (q.prev.id) |id| .{
.id = id,
.order_val = switch (q.order_by) {
.name => .{ .name = q.prev.name.? },
.host => .{ .host = q.prev.host.? },
.created_at => .{ .created_at = q.prev.created_at.? },
},
} else null;
const query_args = QueryArgs{
.max_items = q.max_items,
.owner_id = q.owner_id,
.like = q.like,
.created_before = q.created_before,
.created_after = q.created_after,
.order_by = q.order_by,
.direction = q.direction,
.prev = prev_arg,
.page_direction = q.page_direction,
};
const results = try srv.queryCommunities(query_args);
var link = std.ArrayList(u8).init(req.allocator); var link = std.ArrayList(u8).init(req.allocator);
const link_writer = link.writer(); const link_writer = link.writer();
@ -187,26 +156,28 @@ fn formatRecursive(comptime prefix: []const u8, params: anytype, writer: anytype
} }
fn queryArgsToControllerQuery(args: QueryArgs) query.Query { fn queryArgsToControllerQuery(args: QueryArgs) query.Query {
var result = query.Query{ return args;
.max_items = args.max_items,
.owner_id = args.owner_id,
.like = args.like,
.created_before = args.created_before,
.created_after = args.created_after,
.order_by = args.order_by,
.direction = args.direction,
.prev = .{},
.page_direction = args.page_direction,
};
if (args.prev) |prev| { // var result = query.Query{
result.prev = .{ // .max_items = args.max_items,
.id = prev.id, // .owner_id = args.owner_id,
.name = if (prev.order_val == .name) prev.order_val.name else null, // .like = args.like,
.host = if (prev.order_val == .host) prev.order_val.host else null, // .created_before = args.created_before,
.created_at = if (prev.order_val == .created_at) prev.order_val.created_at else null, // .created_after = args.created_after,
}; // .order_by = args.order_by,
} // .direction = args.direction,
// .prev = .{},
// .page_direction = args.page_direction,
// };
return result; // if (args.prev) |prev| {
// result.prev = .{
// .id = prev.id,
// .name = if (prev.order_val == .name) prev.order_val.name else null,
// .host = if (prev.order_val == .host) prev.order_val.host else null,
// .created_at = if (prev.order_val == .created_at) prev.order_val.created_at else null,
// };
// }
// return result;
} }

View file

@ -71,37 +71,132 @@ const QueryIter = @import("util").QueryIter;
/// TODO: values are currently case-sensitive, and are not url-decoded properly. /// TODO: values are currently case-sensitive, and are not url-decoded properly.
/// This should be fixed. /// This should be fixed.
pub fn parseQuery(comptime T: type, query: []const u8) !T { pub fn parseQuery(comptime T: type, query: []const u8) !T {
//if (!std.meta.trait.isContainer(T)) @compileError("T must be a struct"); if (comptime !std.meta.trait.isContainer(T)) @compileError("T must be a struct");
var iter = QueryIter.from(query); var iter = QueryIter.from(query);
var result = T{};
var fields = Intermediary(T){};
while (iter.next()) |pair| { while (iter.next()) |pair| {
try parseQueryPair(T, &result, pair.key, pair.value); // TODO: Hash map
inline for (std.meta.fields(Intermediary(T))) |field| {
if (std.ascii.eqlIgnoreCase(field.name[2..], pair.key)) {
@field(fields, field.name) = if (pair.value) |v| .{ .value = v } else .{ .no_value = {} };
break;
}
} else std.log.debug("unknown param {s}", .{pair.key});
} }
return (try parse(T, "", "", fields)).?;
}
fn parseScalar(comptime T: type, comptime name: []const u8, fields: anytype) !?T {
const param = @field(fields, name);
return switch (param) {
.not_specified => null,
.no_value => try parseQueryValue(T, null),
.value => |v| try parseQueryValue(T, v),
};
}
fn parse(comptime T: type, comptime prefix: []const u8, comptime name: []const u8, fields: anytype) !?T {
if (comptime isScalar(T)) return parseScalar(T, prefix ++ "." ++ name, fields);
switch (@typeInfo(T)) {
.Union => |info| {
var result: ?T = null;
inline for (info.fields) |field| {
const F = field.field_type;
const maybe_value = try parse(F, prefix, field.name, fields);
if (maybe_value) |value| {
if (result != null) return error.DuplicateUnionField;
result = @unionInit(T, field.name, value);
}
}
std.log.debug("{any}", .{result});
return result;
},
.Struct => |info| {
var result: T = undefined;
var fields_specified: usize = 0;
inline for (info.fields) |field| {
const F = field.field_type;
var maybe_value: ?F = null;
if (try parse(F, prefix ++ "." ++ name, field.name, fields)) |v| {
maybe_value = v;
} else if (field.default_value) |default| {
maybe_value = @ptrCast(*const F, @alignCast(@alignOf(F), default)).*;
}
if (maybe_value) |v| {
fields_specified += 1;
@field(result, field.name) = v;
}
}
if (fields_specified == 0) {
return null;
} else if (fields_specified != info.fields.len) {
return error.PartiallySpecifiedStruct;
} else {
return result; return result;
} }
},
fn parseQueryPair(comptime T: type, result: *T, key: []const u8, value: ?[]const u8) !void { // Only applies to non-scalar optionals
const key_part = std.mem.sliceTo(key, '.'); .Optional => |info| return try parse(info.child, prefix, name, fields),
const field_idx = std.meta.stringToEnum(std.meta.FieldEnum(T), key_part) orelse return error.UnknownField;
inline for (std.meta.fields(T)) |info, idx| { else => @compileError("tmp"),
if (@enumToInt(field_idx) == idx) { }
if (comptime isScalar(info.field_type)) { }
if (key_part.len == key.len) {
@field(result, info.name) = try parseQueryValue(info.field_type, value); fn recursiveFieldPaths(comptime T: type, comptime prefix: []const u8) []const []const u8 {
return; comptime {
if (std.meta.trait.is(.Optional)(T)) return recursiveFieldPaths(std.meta.Child(T), prefix);
var fields: []const []const u8 = &.{};
for (std.meta.fields(T)) |f| {
const full_name = prefix ++ f.name;
if (isScalar(f.field_type)) {
fields = fields ++ @as([]const []const u8, &.{full_name});
} else { } else {
return error.UnknownField; const field_prefix = if (@typeInfo(f.field_type) == .Union) prefix else full_name ++ ".";
} fields = fields ++ recursiveFieldPaths(f.field_type, field_prefix);
} else {
const remaining = std.mem.trimLeft(u8, key[key_part.len..], ".");
return try parseQueryPair(info.field_type, &@field(result, info.name), remaining, value);
}
} }
} }
return error.UnknownField; return fields;
}
}
const QueryParam = union(enum) {
not_specified: void,
no_value: void,
value: []const u8,
};
fn Intermediary(comptime T: type) type {
const field_names = recursiveFieldPaths(T, "..");
var fields: [field_names.len]std.builtin.Type.StructField = undefined;
for (field_names) |name, i| fields[i] = .{
.name = name,
.field_type = QueryParam,
.default_value = &QueryParam{ .not_specified = {} },
.is_comptime = false,
.alignment = @alignOf(QueryParam),
};
return @Type(.{ .Struct = .{
.layout = .Auto,
.fields = &fields,
.decls = &.{},
.is_tuple = false,
} });
} }
fn parseQueryValue(comptime T: type, value: ?[]const u8) !T { fn parseQueryValue(comptime T: type, value: ?[]const u8) !T {