Add support for slices in serialization module
This commit is contained in:
parent
d4cd0e6618
commit
cece277eec
3 changed files with 196 additions and 60 deletions
|
@ -185,16 +185,10 @@ fn Deserializer(comptime Result: type) type {
|
||||||
pub fn parseFormData(comptime T: type, allow_unknown_fields: bool, boundary: []const u8, reader: anytype, alloc: std.mem.Allocator) !T {
|
pub fn parseFormData(comptime T: type, allow_unknown_fields: bool, boundary: []const u8, reader: anytype, alloc: std.mem.Allocator) !T {
|
||||||
var form = openForm(try openMultipart(boundary, reader));
|
var form = openForm(try openMultipart(boundary, reader));
|
||||||
|
|
||||||
var ds = Deserializer(T){};
|
var ds = Deserializer(T).init(alloc);
|
||||||
defer {
|
defer ds.deinit();
|
||||||
var iter = ds.iterator();
|
|
||||||
while (iter.next()) |pair| {
|
|
||||||
util.deepFree(alloc, pair.value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (true) {
|
while (true) {
|
||||||
var part = (try form.next(alloc)) orelse break;
|
var part = (try form.next(ds.arena.allocator())) orelse break;
|
||||||
errdefer util.deepFree(alloc, part);
|
|
||||||
|
|
||||||
ds.setSerializedField(part.name, part) catch |err| switch (err) {
|
ds.setSerializedField(part.name, part) catch |err| switch (err) {
|
||||||
error.UnknownField => if (allow_unknown_fields) {
|
error.UnknownField => if (allow_unknown_fields) {
|
||||||
|
|
|
@ -101,7 +101,8 @@ pub const Iter = struct {
|
||||||
pub fn parse(alloc: std.mem.Allocator, allow_unknown_fields: bool, comptime T: type, query: []const u8) !T {
|
pub fn parse(alloc: std.mem.Allocator, allow_unknown_fields: bool, comptime T: type, query: []const u8) !T {
|
||||||
var iter = Iter.from(query);
|
var iter = Iter.from(query);
|
||||||
|
|
||||||
var deserializer = Deserializer(T){};
|
var deserializer = Deserializer(T).init(alloc);
|
||||||
|
defer deserializer.deinit();
|
||||||
|
|
||||||
while (iter.next()) |pair| {
|
while (iter.next()) |pair| {
|
||||||
try deserializer.setSerializedField(pair.key, pair.value);
|
try deserializer.setSerializedField(pair.key, pair.value);
|
||||||
|
|
|
@ -38,14 +38,15 @@ pub fn deserializeString(allocator: std.mem.Allocator, comptime T: type, value:
|
||||||
@compileError("Invalid type " ++ @typeName(T));
|
@compileError("Invalid type " ++ @typeName(T));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getRecursiveFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: SerializationOptions) []const FieldRef {
|
fn getStaticFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: SerializationOptions) []const FieldRef {
|
||||||
comptime {
|
comptime {
|
||||||
if (std.meta.trait.is(.Union)(T) and prefix.len == 0 and options.embed_unions) {
|
if (std.meta.trait.is(.Union)(T) and prefix.len == 0 and options.embed_unions) {
|
||||||
@compileError("Cannot embed a union into nothing");
|
@compileError("Cannot embed a union into nothing");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.isScalar(T)) return &.{prefix};
|
if (options.isScalar(T)) return &.{prefix};
|
||||||
if (std.meta.trait.is(.Optional)(T)) return getRecursiveFieldList(std.meta.Child(T), prefix, options);
|
if (std.meta.trait.is(.Optional)(T)) return getStaticFieldList(std.meta.Child(T), prefix, options);
|
||||||
|
if (std.meta.trait.isSlice(T) and !std.meta.trait.isZigString(T)) return &.{};
|
||||||
|
|
||||||
const eff_prefix: FieldRef = if (std.meta.trait.is(.Union)(T) and options.embed_unions)
|
const eff_prefix: FieldRef = if (std.meta.trait.is(.Union)(T) and options.embed_unions)
|
||||||
prefix[0 .. prefix.len - 1]
|
prefix[0 .. prefix.len - 1]
|
||||||
|
@ -57,13 +58,47 @@ pub fn getRecursiveFieldList(comptime T: type, comptime prefix: FieldRef, compti
|
||||||
for (std.meta.fields(T)) |f| {
|
for (std.meta.fields(T)) |f| {
|
||||||
const new_prefix = eff_prefix ++ &[_][]const u8{f.name};
|
const new_prefix = eff_prefix ++ &[_][]const u8{f.name};
|
||||||
const F = f.field_type;
|
const F = f.field_type;
|
||||||
fields = fields ++ getRecursiveFieldList(F, new_prefix, options);
|
fields = fields ++ getStaticFieldList(F, new_prefix, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
return fields;
|
return fields;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn getDynamicFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: SerializationOptions) []const DynamicField {
|
||||||
|
comptime {
|
||||||
|
if (std.meta.trait.is(.Union)(T) and prefix.len == 0 and options.embed_unions) {
|
||||||
|
@compileError("Cannot embed a union into nothing");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.isScalar(T)) return &.{};
|
||||||
|
if (std.meta.trait.is(.Optional)(T)) return getDynamicFieldList(std.meta.Child(T), prefix, options);
|
||||||
|
if (std.meta.trait.isSlice(T) and !std.meta.trait.isZigString(T)) return &.{
|
||||||
|
.{ .ref = prefix, .child_type = std.meta.Child(T) },
|
||||||
|
};
|
||||||
|
|
||||||
|
const eff_prefix: FieldRef = if (std.meta.trait.is(.Union)(T) and options.embed_unions)
|
||||||
|
prefix[0 .. prefix.len - 1]
|
||||||
|
else
|
||||||
|
prefix;
|
||||||
|
|
||||||
|
var fields: []const DynamicField = &.{};
|
||||||
|
|
||||||
|
for (std.meta.fields(T)) |f| {
|
||||||
|
const new_prefix = eff_prefix ++ &[_][]const u8{f.name};
|
||||||
|
const F = f.field_type;
|
||||||
|
fields = fields ++ getDynamicFieldList(F, new_prefix, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const DynamicField = struct {
|
||||||
|
ref: FieldRef,
|
||||||
|
child_type: type,
|
||||||
|
};
|
||||||
|
|
||||||
pub const SerializationOptions = struct {
|
pub const SerializationOptions = struct {
|
||||||
embed_unions: bool,
|
embed_unions: bool,
|
||||||
isScalar: fn (type) bool,
|
isScalar: fn (type) bool,
|
||||||
|
@ -74,10 +109,10 @@ pub const default_options = SerializationOptions{
|
||||||
.isScalar = defaultIsScalar,
|
.isScalar = defaultIsScalar,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn Intermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
fn StaticIntermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||||
const field_refs = getRecursiveFieldList(Result, &.{}, options);
|
const field_refs = getStaticFieldList(Result, &.{}, options);
|
||||||
|
|
||||||
var fields: [field_refs.len]std.builtin.Type.StructField = undefined;
|
var fields: [field_refs.len + 1]std.builtin.Type.StructField = undefined;
|
||||||
for (field_refs) |ref, i| {
|
for (field_refs) |ref, i| {
|
||||||
fields[i] = .{
|
fields[i] = .{
|
||||||
.name = util.comptimeJoin(".", ref),
|
.name = util.comptimeJoin(".", ref),
|
||||||
|
@ -88,6 +123,13 @@ fn Intermediary(comptime Result: type, comptime From: type, comptime options: Se
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fields[fields.len - 1] = .{
|
||||||
|
.name = "__dummy",
|
||||||
|
.default_value = &1,
|
||||||
|
.field_type = usize,
|
||||||
|
.is_comptime = false,
|
||||||
|
.alignment = @alignOf(usize),
|
||||||
|
};
|
||||||
return @Type(.{ .Struct = .{
|
return @Type(.{ .Struct = .{
|
||||||
.layout = .Auto,
|
.layout = .Auto,
|
||||||
.fields = &fields,
|
.fields = &fields,
|
||||||
|
@ -96,6 +138,109 @@ fn Intermediary(comptime Result: type, comptime From: type, comptime options: Se
|
||||||
} });
|
} });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn DynamicIntermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||||
|
const field_refs = getDynamicFieldList(Result, &.{}, options);
|
||||||
|
|
||||||
|
var fields: [field_refs.len + 1]std.builtin.Type.StructField = undefined;
|
||||||
|
for (field_refs) |f, i| {
|
||||||
|
const T = std.ArrayListUnmanaged(Intermediary(f.child_type, From, options));
|
||||||
|
fields[i] = .{
|
||||||
|
.name = util.comptimeJoin(".", f.ref),
|
||||||
|
.default_value = &T{},
|
||||||
|
.field_type = T,
|
||||||
|
.is_comptime = false,
|
||||||
|
.alignment = @alignOf(T),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fields[fields.len - 1] = .{
|
||||||
|
.name = "__dummy",
|
||||||
|
.default_value = &1,
|
||||||
|
.field_type = usize,
|
||||||
|
.is_comptime = false,
|
||||||
|
.alignment = @alignOf(usize),
|
||||||
|
};
|
||||||
|
return @Type(.{ .Struct = .{
|
||||||
|
.layout = .Auto,
|
||||||
|
.fields = &fields,
|
||||||
|
.decls = &.{},
|
||||||
|
.is_tuple = false,
|
||||||
|
} });
|
||||||
|
}
|
||||||
|
|
||||||
|
const SerializationInfo = struct {
|
||||||
|
max_slice_len: usize = 16,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn getSerializationInfo(
|
||||||
|
comptime info: anytype,
|
||||||
|
comptime field_name: []const u8,
|
||||||
|
comptime info_key: std.meta.FieldEnum(SerializationInfo),
|
||||||
|
) std.meta.fieldInfo(SerializationInfo, info_key).field_type {
|
||||||
|
if (@hasDecl(info, "serialization_info") and
|
||||||
|
@hasDecl(info.serialization_info, field_name) and
|
||||||
|
@hasDecl(@field(info.serialization_info, field_name), @tagName(info_key)))
|
||||||
|
{
|
||||||
|
return @field(@field(info.serialization_info, field_name), @tagName(info_key));
|
||||||
|
} else return switch (info_key) {
|
||||||
|
.max_slice_len => 16,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn Intermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||||
|
return struct {
|
||||||
|
const StaticData = StaticIntermediary(Result, From, options);
|
||||||
|
const DynamicData = DynamicIntermediary(Result, From, options);
|
||||||
|
static: StaticData = .{},
|
||||||
|
dynamic: DynamicData = .{},
|
||||||
|
|
||||||
|
fn setSerializedField(self: *@This(), allocator: std.mem.Allocator, key: []const u8, value: From) !void {
|
||||||
|
var split = std.mem.split(u8, key, "[");
|
||||||
|
const first = split.first();
|
||||||
|
const rest = split.rest();
|
||||||
|
if (rest.len == 0) {
|
||||||
|
const field = std.meta.stringToEnum(std.meta.FieldEnum(StaticData), key) orelse return error.UnknownField;
|
||||||
|
inline for (comptime std.meta.fieldNames(StaticData)) |field_name| {
|
||||||
|
@setEvalBranchQuota(10000);
|
||||||
|
const f = comptime std.meta.stringToEnum(std.meta.FieldEnum(StaticData), field_name).?;
|
||||||
|
if (f != .__dummy and field == f) {
|
||||||
|
@field(self.static, field_name) = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unreachable;
|
||||||
|
} else {
|
||||||
|
split = std.mem.split(u8, rest, "]");
|
||||||
|
const idx_str = split.first();
|
||||||
|
const idx = try std.fmt.parseInt(usize, idx_str, 10);
|
||||||
|
var next = split.rest();
|
||||||
|
if (next.len == 0 or next[0] != '.') return error.UnknownField;
|
||||||
|
next = next[1..];
|
||||||
|
|
||||||
|
std.log.debug("{s} {s} {s}", .{ first, idx_str, next });
|
||||||
|
|
||||||
|
const field = std.meta.stringToEnum(std.meta.FieldEnum(DynamicData), first) orelse return error.UnknownField;
|
||||||
|
inline for (comptime std.meta.fieldNames(DynamicData)) |field_name| {
|
||||||
|
@setEvalBranchQuota(10000);
|
||||||
|
const f = comptime std.meta.stringToEnum(std.meta.FieldEnum(DynamicData), field_name).?;
|
||||||
|
if (f != .__dummy and field == f) {
|
||||||
|
const limits = getSerializationInfo(Result, field_name, .max_slice_len);
|
||||||
|
if (idx >= limits) return error.SliceTooLong;
|
||||||
|
const list = &@field(self.dynamic, field_name);
|
||||||
|
while (idx >= list.items.len) {
|
||||||
|
try list.append(allocator, .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
try list.items[idx].setSerializedField(allocator, next, value);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn Deserializer(comptime Result: type) type {
|
pub fn Deserializer(comptime Result: type) type {
|
||||||
return DeserializerContext(Result, []const u8, struct {
|
return DeserializerContext(Result, []const u8, struct {
|
||||||
const options = default_options;
|
const options = default_options;
|
||||||
|
@ -109,50 +254,21 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
||||||
return struct {
|
return struct {
|
||||||
const Data = Intermediary(Result, From, Context.options);
|
const Data = Intermediary(Result, From, Context.options);
|
||||||
|
|
||||||
|
arena: std.heap.ArenaAllocator,
|
||||||
|
|
||||||
data: Data = .{},
|
data: Data = .{},
|
||||||
context: Context = .{},
|
context: Context = .{},
|
||||||
|
|
||||||
|
pub fn init(alloc: std.mem.Allocator) @This() {
|
||||||
|
return .{ .arena = std.heap.ArenaAllocator.init(alloc) };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: *@This()) void {
|
||||||
|
self.arena.deinit();
|
||||||
|
}
|
||||||
|
|
||||||
pub fn setSerializedField(self: *@This(), key: []const u8, value: From) !void {
|
pub fn setSerializedField(self: *@This(), key: []const u8, value: From) !void {
|
||||||
const field = std.meta.stringToEnum(std.meta.FieldEnum(Data), key) orelse return error.UnknownField;
|
try self.data.setSerializedField(self.arena.allocator(), key, value);
|
||||||
inline for (comptime std.meta.fieldNames(Data)) |field_name| {
|
|
||||||
@setEvalBranchQuota(10000);
|
|
||||||
const f = comptime std.meta.stringToEnum(std.meta.FieldEnum(Data), field_name);
|
|
||||||
if (field == f) {
|
|
||||||
@field(self.data, field_name) = value;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const Iter = struct {
|
|
||||||
data: *const Data,
|
|
||||||
field_index: usize,
|
|
||||||
|
|
||||||
const Item = struct {
|
|
||||||
key: []const u8,
|
|
||||||
value: From,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn next(self: *Iter) ?Item {
|
|
||||||
while (self.field_index < std.meta.fields(Data).len) {
|
|
||||||
const idx = self.field_index;
|
|
||||||
self.field_index += 1;
|
|
||||||
inline for (comptime std.meta.fieldNames(Data)) |field, i| {
|
|
||||||
if (i == idx) {
|
|
||||||
const maybe_value = @field(self.data.*, field);
|
|
||||||
if (maybe_value) |value| return Item{ .key = field, .value = value };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn iterator(self: *const @This()) Iter {
|
|
||||||
return .{ .data = &self.data, .field_index = 0 };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finishFree(_: *@This(), allocator: std.mem.Allocator, val: anytype) void {
|
pub fn finishFree(_: *@This(), allocator: std.mem.Allocator, val: anytype) void {
|
||||||
|
@ -160,7 +276,7 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish(self: *@This(), allocator: std.mem.Allocator) !Result {
|
pub fn finish(self: *@This(), allocator: std.mem.Allocator) !Result {
|
||||||
return (try self.deserialize(allocator, Result, &.{})) orelse error.MissingField;
|
return (try self.deserialize(allocator, Result, self.data, &.{})) orelse error.MissingField;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn getSerializedField(self: *@This(), comptime field_ref: FieldRef) ?From {
|
fn getSerializedField(self: *@This(), comptime field_ref: FieldRef) ?From {
|
||||||
|
@ -172,9 +288,16 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
||||||
util.deepFree(allocator, val);
|
util.deepFree(allocator, val);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize(self: *@This(), allocator: std.mem.Allocator, comptime T: type, comptime field_ref: FieldRef) !?T {
|
fn deserialize(
|
||||||
|
self: *@This(),
|
||||||
|
allocator: std.mem.Allocator,
|
||||||
|
comptime T: type,
|
||||||
|
intermediary: anytype,
|
||||||
|
comptime field_ref: FieldRef,
|
||||||
|
) !?T {
|
||||||
if (comptime Context.options.isScalar(T)) {
|
if (comptime Context.options.isScalar(T)) {
|
||||||
return try self.context.deserializeScalar(allocator, T, self.getSerializedField(field_ref) orelse return null);
|
const val = @field(intermediary.static, util.comptimeJoin(".", field_ref));
|
||||||
|
return try self.context.deserializeScalar(allocator, T, val orelse return null);
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
|
@ -188,7 +311,7 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
||||||
inline for (info.fields) |field| {
|
inline for (info.fields) |field| {
|
||||||
const F = field.field_type;
|
const F = field.field_type;
|
||||||
const new_field_ref = union_ref ++ &[_][]const u8{field.name};
|
const new_field_ref = union_ref ++ &[_][]const u8{field.name};
|
||||||
const maybe_value = try self.deserialize(allocator, F, new_field_ref);
|
const maybe_value = try self.deserialize(allocator, F, intermediary, new_field_ref);
|
||||||
if (maybe_value) |value| {
|
if (maybe_value) |value| {
|
||||||
// TODO: errdefer cleanup
|
// TODO: errdefer cleanup
|
||||||
errdefer self.deserializeFree(allocator, value);
|
errdefer self.deserializeFree(allocator, value);
|
||||||
|
@ -211,7 +334,7 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
||||||
inline for (info.fields) |field, i| {
|
inline for (info.fields) |field, i| {
|
||||||
const F = field.field_type;
|
const F = field.field_type;
|
||||||
const new_field_ref = field_ref ++ &[_][]const u8{field.name};
|
const new_field_ref = field_ref ++ &[_][]const u8{field.name};
|
||||||
const maybe_value = try self.deserialize(allocator, F, new_field_ref);
|
const maybe_value = try self.deserialize(allocator, F, intermediary, new_field_ref);
|
||||||
if (maybe_value) |v| {
|
if (maybe_value) |v| {
|
||||||
@field(result, field.name) = v;
|
@field(result, field.name) = v;
|
||||||
fields_alloced[i] = true;
|
fields_alloced[i] = true;
|
||||||
|
@ -233,8 +356,26 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
||||||
return result;
|
return result;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
.Pointer => |info| switch (info.size) {
|
||||||
|
.Slice => {
|
||||||
|
const name = comptime util.comptimeJoin(".", field_ref);
|
||||||
|
const data = @field(self.data.dynamic, name);
|
||||||
|
|
||||||
|
const result = try allocator.alloc(info.child, data.items.len);
|
||||||
|
errdefer allocator.free(result);
|
||||||
|
var count: usize = 0;
|
||||||
|
errdefer for (result[0..count]) |res| util.deepFree(allocator, res);
|
||||||
|
for (data.items) |sub, i| {
|
||||||
|
result[i] = (try self.deserialize(allocator, info.child, sub, &.{})) orelse return error.SparseSlice;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
else => @compileError("Unsupported type"),
|
||||||
|
},
|
||||||
|
|
||||||
// Specifically non-scalar optionals
|
// Specifically non-scalar optionals
|
||||||
.Optional => |info| return try self.deserialize(allocator, info.child, field_ref),
|
.Optional => |info| return try self.deserialize(allocator, info.child, intermediary, field_ref),
|
||||||
|
|
||||||
else => @compileError("Unsupported type"),
|
else => @compileError("Unsupported type"),
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue