Compare commits
4 commits
03a5112036
...
795f7983f8
Author | SHA1 | Date | |
---|---|---|---|
795f7983f8 | |||
cece277eec | |||
d4cd0e6618 | |||
9f0cac0ed3 |
10 changed files with 479 additions and 75 deletions
|
@ -41,14 +41,27 @@ pub const InviteOptions = struct {
|
|||
|
||||
pub const LoginResponse = services.auth.LoginResult;
|
||||
|
||||
pub const ProfileField = services.actors.ProfileField;
|
||||
pub const UserResponse = struct {
|
||||
id: Uuid,
|
||||
|
||||
username: []const u8,
|
||||
host: []const u8,
|
||||
|
||||
display_name: ?[]const u8,
|
||||
bio: []const u8,
|
||||
|
||||
avatar_file_id: ?Uuid,
|
||||
header_file_id: ?Uuid,
|
||||
|
||||
profile_fields: []const ProfileField,
|
||||
|
||||
community_id: Uuid,
|
||||
|
||||
created_at: DateTime,
|
||||
updated_at: DateTime,
|
||||
};
|
||||
pub const PartialUserProfile = services.actors.PartialProfile;
|
||||
|
||||
pub const NoteResponse = struct {
|
||||
id: Uuid,
|
||||
|
@ -429,6 +442,7 @@ fn ApiConn(comptime DbConn: type) type {
|
|||
|
||||
pub fn getUser(self: *Self, user_id: Uuid) !UserResponse {
|
||||
const user = try services.actors.get(self.db, user_id, self.allocator);
|
||||
errdefer util.deepFree(self.allocator, user);
|
||||
|
||||
if (self.user_id == null) {
|
||||
if (!Uuid.eql(self.community.id, user.community_id)) return error.NotFound;
|
||||
|
@ -436,9 +450,22 @@ fn ApiConn(comptime DbConn: type) type {
|
|||
|
||||
return UserResponse{
|
||||
.id = user.id,
|
||||
|
||||
.username = user.username,
|
||||
.host = user.host,
|
||||
|
||||
.display_name = user.display_name,
|
||||
.bio = user.bio,
|
||||
|
||||
.avatar_file_id = user.avatar_file_id,
|
||||
.header_file_id = user.header_file_id,
|
||||
|
||||
.profile_fields = user.profile_fields,
|
||||
|
||||
.community_id = user.community_id,
|
||||
|
||||
.created_at = user.created_at,
|
||||
.updated_at = user.updated_at,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -715,5 +742,10 @@ fn ApiConn(comptime DbConn: type) type {
|
|||
.data = try services.files.deref(self.allocator, id),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn updateUserProfile(self: *Self, id: Uuid, data: PartialUserProfile) !void {
|
||||
if (!Uuid.eql(id, self.user_id orelse return error.NoToken)) return error.AccessDenied;
|
||||
try services.actors.updateProfile(self.db, id, data, self.allocator);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
const std = @import("std");
|
||||
const util = @import("util");
|
||||
const sql = @import("sql");
|
||||
const auth = @import("./auth.zig");
|
||||
const common = @import("./common.zig");
|
||||
const files = @import("./files.zig");
|
||||
|
||||
const Partial = common.Partial;
|
||||
const Uuid = util.Uuid;
|
||||
const DateTime = util.DateTime;
|
||||
|
||||
|
@ -17,7 +21,13 @@ pub const ActorDetailed = struct {
|
|||
id: Uuid,
|
||||
username: []const u8,
|
||||
host: []const u8,
|
||||
display_name: ?[]const u8,
|
||||
bio: []const u8,
|
||||
avatar_file_id: ?Uuid,
|
||||
header_file_id: ?Uuid,
|
||||
profile_fields: ProfileField,
|
||||
created_at: DateTime,
|
||||
updated_at: DateTime,
|
||||
};
|
||||
|
||||
pub const LookupError = error{
|
||||
|
@ -90,15 +100,33 @@ pub fn create(
|
|||
return id;
|
||||
}
|
||||
|
||||
pub const ProfileField = struct {
|
||||
key: []const u8,
|
||||
value: []const u8,
|
||||
};
|
||||
|
||||
pub const Actor = struct {
|
||||
id: Uuid,
|
||||
|
||||
username: []const u8,
|
||||
host: []const u8,
|
||||
|
||||
display_name: ?[]const u8,
|
||||
bio: []const u8,
|
||||
|
||||
avatar_file_id: ?Uuid,
|
||||
header_file_id: ?Uuid,
|
||||
|
||||
profile_fields: []const ProfileField,
|
||||
|
||||
community_id: Uuid,
|
||||
|
||||
created_at: DateTime,
|
||||
updated_at: DateTime,
|
||||
|
||||
pub const sql_serialize = struct {
|
||||
pub const profile_fields = .json;
|
||||
};
|
||||
};
|
||||
|
||||
pub const GetError = error{ NotFound, DatabaseFailure };
|
||||
|
@ -109,8 +137,14 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
|
|||
\\ actor.id,
|
||||
\\ actor.username,
|
||||
\\ community.host,
|
||||
\\ actor.display_name,
|
||||
\\ actor.bio,
|
||||
\\ actor.avatar_file_id,
|
||||
\\ actor.header_file_id,
|
||||
\\ actor.profile_fields,
|
||||
\\ actor.community_id,
|
||||
\\ actor.created_at
|
||||
\\ actor.created_at,
|
||||
\\ actor.updated_at
|
||||
\\FROM actor JOIN community
|
||||
\\ ON actor.community_id = community.id
|
||||
\\WHERE actor.id = $1
|
||||
|
@ -123,3 +157,72 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
|
|||
else => error.DatabaseFailure,
|
||||
};
|
||||
}
|
||||
|
||||
pub const PartialProfile = Partial(Profile);
|
||||
pub const Profile = struct {
|
||||
display_name: ?[]const u8,
|
||||
bio: []const u8,
|
||||
avatar_file_id: ?Uuid,
|
||||
header_file_id: ?Uuid,
|
||||
profile_fields: []const ProfileField,
|
||||
};
|
||||
pub const max_fields = 32;
|
||||
pub const max_display_name_len = 128;
|
||||
pub const max_bio = 1 << 16;
|
||||
|
||||
pub fn updateProfile(db: anytype, id: Uuid, new: PartialProfile, alloc: std.mem.Allocator) !void {
|
||||
var builder = sql.QueryBuilder.init(alloc);
|
||||
defer builder.deinit();
|
||||
|
||||
try builder.appendSlice("UPDATE actor");
|
||||
|
||||
if (new.display_name) |_| try builder.set("display_name", "$2");
|
||||
if (new.bio) |_| try builder.set("bio", "$3");
|
||||
if (new.avatar_file_id) |_| try builder.set("avatar_file_id", "$4");
|
||||
if (new.header_file_id) |_| try builder.set("header_file_id", "$5");
|
||||
if (new.profile_fields) |_| try builder.set("profile_fields", "$6");
|
||||
|
||||
if (builder.set_statements_appended == 0) return error.NoChange;
|
||||
|
||||
try builder.set("updated_at", "$7");
|
||||
|
||||
try builder.andWhere("id = $1");
|
||||
|
||||
const profile_fields = if (new.profile_fields) |pf| try std.json.stringifyAlloc(alloc, pf, .{}) else null;
|
||||
defer if (profile_fields) |pf| alloc.free(pf);
|
||||
|
||||
const tx = try db.begin();
|
||||
errdefer tx.rollback();
|
||||
|
||||
if (new.display_name) |maybe_dn| if (maybe_dn) |dn| {
|
||||
if (dn.len > max_display_name_len) return error.DisplayNameTooLong;
|
||||
};
|
||||
if (new.bio) |b| if (b.len > max_bio) return error.BioTooLong;
|
||||
if (new.avatar_file_id) |maybe_file_id| if (maybe_file_id) |file_id| {
|
||||
const info = try files.get(tx, file_id, alloc);
|
||||
defer util.deepFree(alloc, info);
|
||||
|
||||
if (!Uuid.eql(id, info.owner_id)) return error.FileAccessDenied;
|
||||
if (info.status != .uploaded) return error.FileNotReady;
|
||||
};
|
||||
if (new.header_file_id) |maybe_file_id| if (maybe_file_id) |file_id| {
|
||||
const info = try files.get(tx, file_id, alloc);
|
||||
defer util.deepFree(alloc, info);
|
||||
|
||||
if (!Uuid.eql(id, info.owner_id)) return error.FileAccessDenied;
|
||||
if (info.status != .uploaded) return error.FileNotReady;
|
||||
};
|
||||
if (new.profile_fields) |f| if (f.len > max_fields) return error.TooManyFields;
|
||||
|
||||
try tx.execWithOptions(try builder.terminate(), .{
|
||||
id,
|
||||
new.display_name orelse null,
|
||||
new.bio orelse null,
|
||||
new.avatar_file_id orelse null,
|
||||
new.header_file_id orelse null,
|
||||
profile_fields,
|
||||
DateTime.now(),
|
||||
}, .{ .allocator = alloc, .ignore_unused_arguments = true });
|
||||
|
||||
try tx.commit();
|
||||
}
|
||||
|
|
|
@ -185,16 +185,10 @@ fn Deserializer(comptime Result: type) type {
|
|||
pub fn parseFormData(comptime T: type, allow_unknown_fields: bool, boundary: []const u8, reader: anytype, alloc: std.mem.Allocator) !T {
|
||||
var form = openForm(try openMultipart(boundary, reader));
|
||||
|
||||
var ds = Deserializer(T){};
|
||||
defer {
|
||||
var iter = ds.iterator();
|
||||
while (iter.next()) |pair| {
|
||||
util.deepFree(alloc, pair.value);
|
||||
}
|
||||
}
|
||||
var ds = Deserializer(T).init(alloc);
|
||||
defer ds.deinit();
|
||||
while (true) {
|
||||
var part = (try form.next(alloc)) orelse break;
|
||||
errdefer util.deepFree(alloc, part);
|
||||
var part = (try form.next(ds.arena.allocator())) orelse break;
|
||||
|
||||
ds.setSerializedField(part.name, part) catch |err| switch (err) {
|
||||
error.UnknownField => if (allow_unknown_fields) {
|
||||
|
|
|
@ -101,7 +101,8 @@ pub const Iter = struct {
|
|||
pub fn parse(alloc: std.mem.Allocator, allow_unknown_fields: bool, comptime T: type, query: []const u8) !T {
|
||||
var iter = Iter.from(query);
|
||||
|
||||
var deserializer = Deserializer(T){};
|
||||
var deserializer = Deserializer(T).init(alloc);
|
||||
defer deserializer.deinit();
|
||||
|
||||
while (iter.next()) |pair| {
|
||||
try deserializer.setSerializedField(pair.key, pair.value);
|
||||
|
|
|
@ -17,6 +17,8 @@ pub const routes = .{
|
|||
controllers.apiEndpoint(communities.query),
|
||||
controllers.apiEndpoint(invites.create),
|
||||
controllers.apiEndpoint(users.create),
|
||||
controllers.apiEndpoint(users.get),
|
||||
controllers.apiEndpoint(users.update_profile),
|
||||
controllers.apiEndpoint(notes.create),
|
||||
controllers.apiEndpoint(notes.get),
|
||||
//controllers.apiEndpoint(streaming.streaming),
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
const util = @import("util");
|
||||
const api = @import("api");
|
||||
|
||||
pub const create = struct {
|
||||
|
@ -24,3 +25,40 @@ pub const create = struct {
|
|||
try res.json(.created, user);
|
||||
}
|
||||
};
|
||||
|
||||
pub const get = struct {
|
||||
pub const method = .GET;
|
||||
pub const path = "/users/:id";
|
||||
|
||||
pub const Args = struct {
|
||||
id: util.Uuid,
|
||||
};
|
||||
|
||||
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
|
||||
const result = try srv.getUser(req.args.id);
|
||||
defer util.deepFree(srv.allocator, result);
|
||||
|
||||
try res.json(.ok, result);
|
||||
}
|
||||
};
|
||||
|
||||
pub const update_profile = struct {
|
||||
pub const method = .PUT;
|
||||
pub const path = "/users/:id";
|
||||
|
||||
pub const Args = struct {
|
||||
id: util.Uuid,
|
||||
};
|
||||
|
||||
pub const Body = api.PartialUserProfile;
|
||||
|
||||
// TODO: I don't like that the request body dn response body are substantially different
|
||||
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
|
||||
try srv.updateUserProfile(req.args.id, req.body);
|
||||
|
||||
const result = try srv.getUser(req.args.id);
|
||||
defer util.deepFree(srv.allocator, result);
|
||||
|
||||
try res.json(.ok, result);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -343,4 +343,23 @@ const migrations: []const Migration = &.{
|
|||
,
|
||||
.down = "",
|
||||
},
|
||||
.{
|
||||
.name = "user profiles",
|
||||
.up =
|
||||
\\ALTER TABLE actor ADD COLUMN bio TEXT NOT NULL DEFAULT '';
|
||||
\\ALTER TABLE actor ADD COLUMN display_name TEXT;
|
||||
\\ALTER TABLE actor ADD COLUMN avatar_file_id UUID REFERENCES file_upload(id);
|
||||
\\ALTER TABLE actor ADD COLUMN header_file_id UUID REFERENCES file_upload(id);
|
||||
\\ALTER TABLE actor ADD COLUMN profile_fields JSON DEFAULT '[]';
|
||||
\\ALTER TABLE actor ADD COLUMN updated_at TIMESTAMPTZ DEFAULT 0;
|
||||
,
|
||||
.down =
|
||||
\\ALTER TABLE actor DROP COLUMN bio;
|
||||
\\ALTER TABLE actor DROP COLUMN display_name;
|
||||
\\ALTER TABLE actor DROP COLUMN avatar_file_id;
|
||||
\\ALTER TABLE actor DROP COLUMN header_file_id;
|
||||
\\ALTER TABLE actor DROP COLUMN profile_fields;
|
||||
\\ALTER TABLE actor DROP COLUMN updated_at;
|
||||
,
|
||||
},
|
||||
};
|
||||
|
|
|
@ -140,7 +140,8 @@ const RawResults = union(Engine) {
|
|||
}
|
||||
};
|
||||
|
||||
fn FieldPtr(comptime Ptr: type, comptime names: []const []const u8) type {
|
||||
const FieldRef = []const []const u8;
|
||||
fn FieldPtr(comptime Ptr: type, comptime names: FieldRef) type {
|
||||
if (names.len == 0) return Ptr;
|
||||
|
||||
const T = std.meta.Child(Ptr);
|
||||
|
@ -152,18 +153,48 @@ fn FieldPtr(comptime Ptr: type, comptime names: []const []const u8) type {
|
|||
return FieldPtr(*field.field_type, names[1..]);
|
||||
}
|
||||
|
||||
fn fieldPtr(ptr: anytype, comptime names: []const []const u8) FieldPtr(@TypeOf(ptr), names) {
|
||||
fn fieldPtr(ptr: anytype, comptime names: FieldRef) FieldPtr(@TypeOf(ptr), names) {
|
||||
if (names.len == 0) return ptr;
|
||||
|
||||
return fieldPtr(&@field(ptr.*, names[0]), names[1..]);
|
||||
}
|
||||
|
||||
fn getRecursiveFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: anytype) []const FieldRef {
|
||||
comptime {
|
||||
if (std.meta.trait.is(.Union)(T) and prefix.len == 0 and options.embed_unions) {
|
||||
@compileError("Cannot embed a union into nothing");
|
||||
}
|
||||
|
||||
if (options.isScalar(T)) return &.{prefix};
|
||||
if (std.meta.trait.is(.Optional)(T)) return getRecursiveFieldList(std.meta.Child(T), prefix, options);
|
||||
|
||||
const eff_prefix: FieldRef = if (std.meta.trait.is(.Union)(T) and options.embed_unions)
|
||||
prefix[0 .. prefix.len - 1]
|
||||
else
|
||||
prefix;
|
||||
|
||||
var fields: []const FieldRef = &.{};
|
||||
|
||||
for (std.meta.fields(T)) |f| {
|
||||
const new_prefix = eff_prefix ++ &[_][]const u8{f.name};
|
||||
if (@hasDecl(T, "sql_serialize") and @hasDecl(T.sql_serialize, f.name) and @field(T.sql_serialize, f.name) == .json) {
|
||||
fields = fields ++ &[_]FieldRef{new_prefix};
|
||||
} else {
|
||||
const F = f.field_type;
|
||||
fields = fields ++ getRecursiveFieldList(F, new_prefix, options);
|
||||
}
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
// Represents a set of results.
|
||||
// row() must be called until it returns null, or the query may not complete
|
||||
// Must be deallocated by a call to finish()
|
||||
pub fn Results(comptime T: type) type {
|
||||
// would normally make this a declaration of the struct, but it causes the compiler to crash
|
||||
const fields = if (T == void) .{} else util.serialize.getRecursiveFieldList(
|
||||
const fields = if (T == void) .{} else getRecursiveFieldList(
|
||||
T,
|
||||
&.{},
|
||||
util.serialize.default_options,
|
||||
|
@ -223,11 +254,35 @@ pub fn Results(comptime T: type) type {
|
|||
//const F = @TypeOf(@field(result, f.name));
|
||||
const F = std.meta.Child(FieldPtr(*@TypeOf(result), f));
|
||||
const ptr = fieldPtr(&result, f);
|
||||
const name = util.comptimeJoin(".", f);
|
||||
ptr.* = row_val.get(F, self.column_indices[i], alloc) catch |err| {
|
||||
std.log.err("SQL: Error getting column {s} of type {}", .{ name, F });
|
||||
return err;
|
||||
};
|
||||
const name = comptime util.comptimeJoin(".", f);
|
||||
|
||||
const mode = comptime if (@hasDecl(T, "sql_serialize")) blk: {
|
||||
if (@hasDecl(T.sql_serialize, name)) {
|
||||
break :blk @field(T.sql_serialize, name);
|
||||
}
|
||||
break :blk .default;
|
||||
} else .default;
|
||||
switch (mode) {
|
||||
.default => ptr.* = row_val.get(F, self.column_indices[i], alloc) catch |err| {
|
||||
std.log.err("SQL: Error getting column {s} of type {}", .{ name, F });
|
||||
return err;
|
||||
},
|
||||
.json => {
|
||||
const str = row_val.get([]const u8, self.column_indices[i], alloc) catch |err| {
|
||||
std.log.err("SQL: Error getting column {s} of type {}", .{ name, F });
|
||||
return err;
|
||||
};
|
||||
const a = alloc orelse return error.AllocatorRequired;
|
||||
defer a.free(str);
|
||||
|
||||
var ts = std.json.TokenStream.init(str);
|
||||
ptr.* = std.json.parse(F, &ts, .{ .allocator = a }) catch |err| {
|
||||
std.log.err("SQL: Error parsing columns {s} of type {}: {}", .{ name, F, err });
|
||||
return error.ResultTypeMismatch;
|
||||
};
|
||||
},
|
||||
else => @compileError("unknown mode"),
|
||||
}
|
||||
fields_allocated += 1;
|
||||
}
|
||||
|
||||
|
@ -488,7 +543,16 @@ fn Tx(comptime tx_level: u8) type {
|
|||
args: anytype,
|
||||
alloc: ?std.mem.Allocator,
|
||||
) !void {
|
||||
try self.execInternal(sql, args, alloc, true);
|
||||
try self.execInternal(sql, args, .{ .allocator = alloc }, true);
|
||||
}
|
||||
|
||||
pub fn execWithOptions(
|
||||
self: Self,
|
||||
sql: [:0]const u8,
|
||||
args: anytype,
|
||||
options: QueryOptions,
|
||||
) !void {
|
||||
try self.execInternal(sql, args, options, true);
|
||||
}
|
||||
|
||||
pub fn queryWithOptions(
|
||||
|
@ -644,17 +708,17 @@ fn Tx(comptime tx_level: u8) type {
|
|||
self: Self,
|
||||
sql: [:0]const u8,
|
||||
args: anytype,
|
||||
alloc: ?std.mem.Allocator,
|
||||
options: QueryOptions,
|
||||
comptime check_tx: bool,
|
||||
) !void {
|
||||
var results = try self.runSql(sql, args, .{ .allocator = alloc }, check_tx);
|
||||
var results = try self.runSql(sql, args, options, check_tx);
|
||||
defer results.finish();
|
||||
|
||||
while (try results.row()) |_| {}
|
||||
}
|
||||
|
||||
fn rollbackUnchecked(self: Self) !void {
|
||||
try self.execInternal("ROLLBACK", {}, null, false);
|
||||
try self.execInternal("ROLLBACK", {}, .{}, false);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -95,7 +95,17 @@ pub fn deepFree(alloc: ?std.mem.Allocator, val: anytype) void {
|
|||
else => @compileError("Many and C-style pointers not supported by deepfree"),
|
||||
},
|
||||
.Optional => if (val) |v| deepFree(alloc, v) else {},
|
||||
.Struct => |struct_info| inline for (struct_info.fields) |field| deepFree(alloc, @field(val, field.name)),
|
||||
.Struct => |struct_info| inline for (struct_info.fields) |field| {
|
||||
const v = @field(val, field.name);
|
||||
const should_free = if (field.default_value) |opaque_ptr| blk: {
|
||||
const aligned = if (field.alignment != 0) @alignCast(field.alignment, opaque_ptr) else opaque_ptr;
|
||||
const ptr = @ptrCast(*const field.field_type, aligned);
|
||||
if (std.meta.eql(v, ptr.*)) break :blk false;
|
||||
break :blk true;
|
||||
} else true;
|
||||
|
||||
if (should_free) deepFree(alloc, @field(val, field.name));
|
||||
},
|
||||
.Union => |union_info| inline for (union_info.fields) |field| {
|
||||
const tag = @field(std.meta.Tag(T), field.name);
|
||||
if (@as(std.meta.Tag(T), val) == tag) {
|
||||
|
@ -114,7 +124,7 @@ pub fn deepFree(alloc: ?std.mem.Allocator, val: anytype) void {
|
|||
/// Clones a struct/array/slice/etc and all its submembers.
|
||||
/// Assumes that there are no self-refrential pointers within and that
|
||||
/// every pointer should be followed.
|
||||
pub fn deepClone(alloc: std.mem.Allocator, val: anytype) !@TypeOf(val) {
|
||||
pub fn deepClone(alloc: std.mem.Allocator, val: anytype) std.mem.Allocator.Error!@TypeOf(val) {
|
||||
const T = @TypeOf(val);
|
||||
var result: T = undefined;
|
||||
switch (@typeInfo(T)) {
|
||||
|
|
|
@ -38,14 +38,15 @@ pub fn deserializeString(allocator: std.mem.Allocator, comptime T: type, value:
|
|||
@compileError("Invalid type " ++ @typeName(T));
|
||||
}
|
||||
|
||||
pub fn getRecursiveFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: SerializationOptions) []const FieldRef {
|
||||
fn getStaticFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: SerializationOptions) []const FieldRef {
|
||||
comptime {
|
||||
if (std.meta.trait.is(.Union)(T) and prefix.len == 0 and options.embed_unions) {
|
||||
@compileError("Cannot embed a union into nothing");
|
||||
}
|
||||
|
||||
if (options.isScalar(T)) return &.{prefix};
|
||||
if (std.meta.trait.is(.Optional)(T)) return getRecursiveFieldList(std.meta.Child(T), prefix, options);
|
||||
if (std.meta.trait.is(.Optional)(T)) return getStaticFieldList(std.meta.Child(T), prefix, options);
|
||||
if (std.meta.trait.isSlice(T) and !std.meta.trait.isZigString(T)) return &.{};
|
||||
|
||||
const eff_prefix: FieldRef = if (std.meta.trait.is(.Union)(T) and options.embed_unions)
|
||||
prefix[0 .. prefix.len - 1]
|
||||
|
@ -57,13 +58,47 @@ pub fn getRecursiveFieldList(comptime T: type, comptime prefix: FieldRef, compti
|
|||
for (std.meta.fields(T)) |f| {
|
||||
const new_prefix = eff_prefix ++ &[_][]const u8{f.name};
|
||||
const F = f.field_type;
|
||||
fields = fields ++ getRecursiveFieldList(F, new_prefix, options);
|
||||
fields = fields ++ getStaticFieldList(F, new_prefix, options);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
fn getDynamicFieldList(comptime T: type, comptime prefix: FieldRef, comptime options: SerializationOptions) []const DynamicField {
|
||||
comptime {
|
||||
if (std.meta.trait.is(.Union)(T) and prefix.len == 0 and options.embed_unions) {
|
||||
@compileError("Cannot embed a union into nothing");
|
||||
}
|
||||
|
||||
if (options.isScalar(T)) return &.{};
|
||||
if (std.meta.trait.is(.Optional)(T)) return getDynamicFieldList(std.meta.Child(T), prefix, options);
|
||||
if (std.meta.trait.isSlice(T) and !std.meta.trait.isZigString(T)) return &.{
|
||||
.{ .ref = prefix, .child_type = std.meta.Child(T) },
|
||||
};
|
||||
|
||||
const eff_prefix: FieldRef = if (std.meta.trait.is(.Union)(T) and options.embed_unions)
|
||||
prefix[0 .. prefix.len - 1]
|
||||
else
|
||||
prefix;
|
||||
|
||||
var fields: []const DynamicField = &.{};
|
||||
|
||||
for (std.meta.fields(T)) |f| {
|
||||
const new_prefix = eff_prefix ++ &[_][]const u8{f.name};
|
||||
const F = f.field_type;
|
||||
fields = fields ++ getDynamicFieldList(F, new_prefix, options);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
const DynamicField = struct {
|
||||
ref: FieldRef,
|
||||
child_type: type,
|
||||
};
|
||||
|
||||
pub const SerializationOptions = struct {
|
||||
embed_unions: bool,
|
||||
isScalar: fn (type) bool,
|
||||
|
@ -74,10 +109,10 @@ pub const default_options = SerializationOptions{
|
|||
.isScalar = defaultIsScalar,
|
||||
};
|
||||
|
||||
fn Intermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||
const field_refs = getRecursiveFieldList(Result, &.{}, options);
|
||||
fn StaticIntermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||
const field_refs = getStaticFieldList(Result, &.{}, options);
|
||||
|
||||
var fields: [field_refs.len]std.builtin.Type.StructField = undefined;
|
||||
var fields: [field_refs.len + 1]std.builtin.Type.StructField = undefined;
|
||||
for (field_refs) |ref, i| {
|
||||
fields[i] = .{
|
||||
.name = util.comptimeJoin(".", ref),
|
||||
|
@ -88,6 +123,13 @@ fn Intermediary(comptime Result: type, comptime From: type, comptime options: Se
|
|||
};
|
||||
}
|
||||
|
||||
fields[fields.len - 1] = .{
|
||||
.name = "__dummy",
|
||||
.default_value = &1,
|
||||
.field_type = usize,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(usize),
|
||||
};
|
||||
return @Type(.{ .Struct = .{
|
||||
.layout = .Auto,
|
||||
.fields = &fields,
|
||||
|
@ -96,6 +138,109 @@ fn Intermediary(comptime Result: type, comptime From: type, comptime options: Se
|
|||
} });
|
||||
}
|
||||
|
||||
fn DynamicIntermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||
const field_refs = getDynamicFieldList(Result, &.{}, options);
|
||||
|
||||
var fields: [field_refs.len + 1]std.builtin.Type.StructField = undefined;
|
||||
for (field_refs) |f, i| {
|
||||
const T = std.ArrayListUnmanaged(Intermediary(f.child_type, From, options));
|
||||
fields[i] = .{
|
||||
.name = util.comptimeJoin(".", f.ref),
|
||||
.default_value = &T{},
|
||||
.field_type = T,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(T),
|
||||
};
|
||||
}
|
||||
|
||||
fields[fields.len - 1] = .{
|
||||
.name = "__dummy",
|
||||
.default_value = &1,
|
||||
.field_type = usize,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(usize),
|
||||
};
|
||||
return @Type(.{ .Struct = .{
|
||||
.layout = .Auto,
|
||||
.fields = &fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
}
|
||||
|
||||
const SerializationInfo = struct {
|
||||
max_slice_len: usize = 16,
|
||||
};
|
||||
|
||||
fn getSerializationInfo(
|
||||
comptime info: anytype,
|
||||
comptime field_name: []const u8,
|
||||
comptime info_key: std.meta.FieldEnum(SerializationInfo),
|
||||
) std.meta.fieldInfo(SerializationInfo, info_key).field_type {
|
||||
if (@hasDecl(info, "serialization_info") and
|
||||
@hasDecl(info.serialization_info, field_name) and
|
||||
@hasDecl(@field(info.serialization_info, field_name), @tagName(info_key)))
|
||||
{
|
||||
return @field(@field(info.serialization_info, field_name), @tagName(info_key));
|
||||
} else return switch (info_key) {
|
||||
.max_slice_len => 16,
|
||||
};
|
||||
}
|
||||
|
||||
fn Intermediary(comptime Result: type, comptime From: type, comptime options: SerializationOptions) type {
|
||||
return struct {
|
||||
const StaticData = StaticIntermediary(Result, From, options);
|
||||
const DynamicData = DynamicIntermediary(Result, From, options);
|
||||
static: StaticData = .{},
|
||||
dynamic: DynamicData = .{},
|
||||
|
||||
fn setSerializedField(self: *@This(), allocator: std.mem.Allocator, key: []const u8, value: From) !void {
|
||||
var split = std.mem.split(u8, key, "[");
|
||||
const first = split.first();
|
||||
const rest = split.rest();
|
||||
if (rest.len == 0) {
|
||||
const field = std.meta.stringToEnum(std.meta.FieldEnum(StaticData), key) orelse return error.UnknownField;
|
||||
inline for (comptime std.meta.fieldNames(StaticData)) |field_name| {
|
||||
@setEvalBranchQuota(10000);
|
||||
const f = comptime std.meta.stringToEnum(std.meta.FieldEnum(StaticData), field_name).?;
|
||||
if (f != .__dummy and field == f) {
|
||||
@field(self.static, field_name) = value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
unreachable;
|
||||
} else {
|
||||
split = std.mem.split(u8, rest, "]");
|
||||
const idx_str = split.first();
|
||||
const idx = try std.fmt.parseInt(usize, idx_str, 10);
|
||||
var next = split.rest();
|
||||
if (next.len == 0 or next[0] != '.') return error.UnknownField;
|
||||
next = next[1..];
|
||||
|
||||
std.log.debug("{s} {s} {s}", .{ first, idx_str, next });
|
||||
|
||||
const field = std.meta.stringToEnum(std.meta.FieldEnum(DynamicData), first) orelse return error.UnknownField;
|
||||
inline for (comptime std.meta.fieldNames(DynamicData)) |field_name| {
|
||||
@setEvalBranchQuota(10000);
|
||||
const f = comptime std.meta.stringToEnum(std.meta.FieldEnum(DynamicData), field_name).?;
|
||||
if (f != .__dummy and field == f) {
|
||||
const limits = getSerializationInfo(Result, field_name, .max_slice_len);
|
||||
if (idx >= limits) return error.SliceTooLong;
|
||||
const list = &@field(self.dynamic, field_name);
|
||||
while (idx >= list.items.len) {
|
||||
try list.append(allocator, .{});
|
||||
}
|
||||
|
||||
try list.items[idx].setSerializedField(allocator, next, value);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn Deserializer(comptime Result: type) type {
|
||||
return DeserializerContext(Result, []const u8, struct {
|
||||
const options = default_options;
|
||||
|
@ -109,50 +254,21 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
|||
return struct {
|
||||
const Data = Intermediary(Result, From, Context.options);
|
||||
|
||||
arena: std.heap.ArenaAllocator,
|
||||
|
||||
data: Data = .{},
|
||||
context: Context = .{},
|
||||
|
||||
pub fn setSerializedField(self: *@This(), key: []const u8, value: From) !void {
|
||||
const field = std.meta.stringToEnum(std.meta.FieldEnum(Data), key) orelse return error.UnknownField;
|
||||
inline for (comptime std.meta.fieldNames(Data)) |field_name| {
|
||||
@setEvalBranchQuota(10000);
|
||||
const f = comptime std.meta.stringToEnum(std.meta.FieldEnum(Data), field_name);
|
||||
if (field == f) {
|
||||
@field(self.data, field_name) = value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
unreachable;
|
||||
pub fn init(alloc: std.mem.Allocator) @This() {
|
||||
return .{ .arena = std.heap.ArenaAllocator.init(alloc) };
|
||||
}
|
||||
|
||||
pub const Iter = struct {
|
||||
data: *const Data,
|
||||
field_index: usize,
|
||||
pub fn deinit(self: *@This()) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
const Item = struct {
|
||||
key: []const u8,
|
||||
value: From,
|
||||
};
|
||||
|
||||
pub fn next(self: *Iter) ?Item {
|
||||
while (self.field_index < std.meta.fields(Data).len) {
|
||||
const idx = self.field_index;
|
||||
self.field_index += 1;
|
||||
inline for (comptime std.meta.fieldNames(Data)) |field, i| {
|
||||
if (i == idx) {
|
||||
const maybe_value = @field(self.data.*, field);
|
||||
if (maybe_value) |value| return Item{ .key = field, .value = value };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn iterator(self: *const @This()) Iter {
|
||||
return .{ .data = &self.data, .field_index = 0 };
|
||||
pub fn setSerializedField(self: *@This(), key: []const u8, value: From) !void {
|
||||
try self.data.setSerializedField(self.arena.allocator(), key, value);
|
||||
}
|
||||
|
||||
pub fn finishFree(_: *@This(), allocator: std.mem.Allocator, val: anytype) void {
|
||||
|
@ -160,7 +276,7 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
|||
}
|
||||
|
||||
pub fn finish(self: *@This(), allocator: std.mem.Allocator) !Result {
|
||||
return (try self.deserialize(allocator, Result, &.{})) orelse error.MissingField;
|
||||
return (try self.deserialize(allocator, Result, self.data, &.{})) orelse error.MissingField;
|
||||
}
|
||||
|
||||
fn getSerializedField(self: *@This(), comptime field_ref: FieldRef) ?From {
|
||||
|
@ -172,9 +288,16 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
|||
util.deepFree(allocator, val);
|
||||
}
|
||||
|
||||
fn deserialize(self: *@This(), allocator: std.mem.Allocator, comptime T: type, comptime field_ref: FieldRef) !?T {
|
||||
fn deserialize(
|
||||
self: *@This(),
|
||||
allocator: std.mem.Allocator,
|
||||
comptime T: type,
|
||||
intermediary: anytype,
|
||||
comptime field_ref: FieldRef,
|
||||
) !?T {
|
||||
if (comptime Context.options.isScalar(T)) {
|
||||
return try self.context.deserializeScalar(allocator, T, self.getSerializedField(field_ref) orelse return null);
|
||||
const val = @field(intermediary.static, util.comptimeJoin(".", field_ref));
|
||||
return try self.context.deserializeScalar(allocator, T, val orelse return null);
|
||||
}
|
||||
|
||||
switch (@typeInfo(T)) {
|
||||
|
@ -188,7 +311,7 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
|||
inline for (info.fields) |field| {
|
||||
const F = field.field_type;
|
||||
const new_field_ref = union_ref ++ &[_][]const u8{field.name};
|
||||
const maybe_value = try self.deserialize(allocator, F, new_field_ref);
|
||||
const maybe_value = try self.deserialize(allocator, F, intermediary, new_field_ref);
|
||||
if (maybe_value) |value| {
|
||||
// TODO: errdefer cleanup
|
||||
errdefer self.deserializeFree(allocator, value);
|
||||
|
@ -211,7 +334,7 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
|||
inline for (info.fields) |field, i| {
|
||||
const F = field.field_type;
|
||||
const new_field_ref = field_ref ++ &[_][]const u8{field.name};
|
||||
const maybe_value = try self.deserialize(allocator, F, new_field_ref);
|
||||
const maybe_value = try self.deserialize(allocator, F, intermediary, new_field_ref);
|
||||
if (maybe_value) |v| {
|
||||
@field(result, field.name) = v;
|
||||
fields_alloced[i] = true;
|
||||
|
@ -233,8 +356,26 @@ pub fn DeserializerContext(comptime Result: type, comptime From: type, comptime
|
|||
return result;
|
||||
},
|
||||
|
||||
.Pointer => |info| switch (info.size) {
|
||||
.Slice => {
|
||||
const name = comptime util.comptimeJoin(".", field_ref);
|
||||
const data = @field(self.data.dynamic, name);
|
||||
|
||||
const result = try allocator.alloc(info.child, data.items.len);
|
||||
errdefer allocator.free(result);
|
||||
var count: usize = 0;
|
||||
errdefer for (result[0..count]) |res| util.deepFree(allocator, res);
|
||||
for (data.items) |sub, i| {
|
||||
result[i] = (try self.deserialize(allocator, info.child, sub, &.{})) orelse return error.SparseSlice;
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
else => @compileError("Unsupported type"),
|
||||
},
|
||||
|
||||
// Specifically non-scalar optionals
|
||||
.Optional => |info| return try self.deserialize(allocator, info.child, field_ref),
|
||||
.Optional => |info| return try self.deserialize(allocator, info.child, intermediary, field_ref),
|
||||
|
||||
else => @compileError("Unsupported type"),
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue