Move types into shared file

This commit is contained in:
jaina heartles 2022-12-21 07:19:13 -08:00
parent d8e4d6c82b
commit f52b82b506
11 changed files with 533 additions and 342 deletions

View File

@ -18,6 +18,17 @@ const services = struct {
const follows = @import("./services/follows.zig");
};
const types = @import("./services/types.zig");
pub const QueryResult = types.QueryResult;
pub const Account = types.Account;
pub const Actor = types.Actor;
pub const Community = types.Community;
pub const Invite = types.Invite;
pub const Note = types.Note;
pub const Token = types.Token;
pub const ClusterMeta = struct {
community_count: usize,
user_count: usize,
@ -30,7 +41,7 @@ pub const RegistrationOptions = struct {
};
pub const InviteOptions = struct {
pub const Kind = services.invites.Kind;
pub const Kind = Invite.Kind;
name: ?[]const u8 = null,
lifespan: ?DateTime.Duration = null,
@ -41,9 +52,6 @@ pub const InviteOptions = struct {
to_community: ?Uuid = null,
};
pub const LoginResponse = services.auth.LoginResult;
pub const ProfileField = services.actors.ProfileField;
pub const UserResponse = struct {
id: Uuid,
@ -59,14 +67,13 @@ pub const UserResponse = struct {
header_file_id: ?Uuid,
header_url: ?[]const u8,
profile_fields: []const ProfileField,
profile_fields: []const Actor.ProfileField,
community_id: Uuid,
created_at: DateTime,
updated_at: DateTime,
};
pub const PartialUserProfile = services.actors.PartialProfile;
pub const NoteResponse = struct {
id: Uuid,
@ -80,15 +87,9 @@ pub const NoteResponse = struct {
created_at: DateTime,
};
pub const Community = services.communities.Community;
pub const CommunityQueryArgs = services.communities.QueryArgs;
pub const CommunityQueryResult = services.communities.QueryResult;
pub const NoteQueryArgs = services.notes.QueryArgs;
pub const TimelineArgs = struct {
pub const PageDirection = NoteQueryArgs.PageDirection;
pub const Prev = NoteQueryArgs.Prev;
pub const PageDirection = Note.QueryArgs.PageDirection;
pub const Prev = Note.QueryArgs.Prev;
max_items: usize = 20,
@ -99,7 +100,7 @@ pub const TimelineArgs = struct {
page_direction: PageDirection = .forward,
fn from(args: NoteQueryArgs) TimelineArgs {
fn from(args: Note.QueryArgs) TimelineArgs {
return .{
.max_items = args.max_items,
.created_before = args.created_before,
@ -111,7 +112,7 @@ pub const TimelineArgs = struct {
};
pub const TimelineResult = struct {
items: []services.notes.NoteDetailed,
items: []Note,
prev_page: TimelineArgs,
next_page: TimelineArgs,
@ -191,7 +192,7 @@ pub const DriveEntry = union(enum) {
},
};
pub const FileUpload = services.files.FileUpload;
pub const FileUpload = types.FileUpload;
pub const DriveGetResult = union(services.drive.Kind) {
dir: struct {
@ -205,13 +206,13 @@ pub const DriveGetResult = union(services.drive.Kind) {
};
pub const FileResult = struct {
meta: services.files.FileUpload,
meta: FileUpload,
data: []const u8,
};
pub const InviteResponse = struct {
code: []const u8,
kind: services.invites.Kind,
kind: Invite.Kind,
name: []const u8,
creator: UserResponse,
@ -315,9 +316,9 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
const Self = @This();
db: DbConn,
token_info: ?models.auth.TokenInfo = null,
token_info: ?Token.Info = null,
user_id: ?Uuid = null,
community: models.communities.Community,
community: Community,
allocator: std.mem.Allocator,
pub fn close(self: *Self) void {
@ -331,7 +332,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
return self.user_id != null and self.community.kind == .admin;
}
pub fn login(self: *Self, username: []const u8, password: []const u8) !LoginResponse {
pub fn login(self: *Self, username: []const u8, password: []const u8) !Token {
return models.auth.login(
self.db,
username,
@ -370,7 +371,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
return error.TokenRequired;
}
pub fn createCommunity(self: *Self, origin: []const u8, name: ?[]const u8) !models.communities.Community {
pub fn createCommunity(self: *Self, origin: []const u8, name: ?[]const u8) !Community {
if (!self.isAdmin()) {
return error.PermissionDenied;
}
@ -412,8 +413,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
// Users can only make user invites
if (options.kind != .user and !self.isAdmin()) return error.PermissionDenied;
const invite_id = try models.invites.create(self.db, user_id, community_id, .{
.name = options.name,
const invite_id = try models.invites.create(self.db, user_id, community_id, options.name orelse "", .{
.lifespan = options.lifespan,
.max_uses = options.max_uses,
.kind = options.kind,
@ -454,7 +454,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
};
}
fn isInviteValid(invite: models.invites.Invite) bool {
fn isInviteValid(invite: Invite) bool {
if (invite.max_uses != null and invite.times_used >= invite.max_uses.?) return false;
if (invite.expires_at != null and DateTime.now().isAfter(invite.expires_at.?)) return false;
return true;
@ -567,7 +567,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
return user;
}
pub fn createNote(self: *Self, content: []const u8) !NoteResponse {
pub fn createNote(self: *Self, content: []const u8) !Note {
// You cannot post on admin accounts
if (self.community.kind == .admin) return error.WrongCommunity;
@ -581,34 +581,25 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
};
}
pub fn getNote(self: *Self, note_id: Uuid) !NoteResponse {
pub fn getNote(self: *Self, note_id: Uuid) !Note {
const note = try models.notes.get(self.db, note_id, self.allocator);
const user = try models.actors.get(self.db, note.author_id, self.allocator);
errdefer util.deepFree(self.allocator, note);
// Only serve community-specific notes on unauthenticated requests
if (self.user_id == null) {
if (!Uuid.eql(self.community.id, user.community_id)) return error.NotFound;
if (!Uuid.eql(self.community.id, note.author.community_id)) return error.NotFound;
}
return NoteResponse{
.id = note.id,
.author = .{
.id = user.id,
.username = user.username,
.host = user.host,
},
.content = note.content,
.created_at = note.created_at,
};
return note;
}
pub fn queryCommunities(self: *Self, args: models.communities.QueryArgs) !CommunityQueryResult {
pub fn queryCommunities(self: *Self, args: Community.QueryArgs) !QueryResult(Community) {
if (!self.isAdmin()) return error.PermissionDenied;
return try models.communities.query(self.db, args, self.allocator);
}
pub fn globalTimeline(self: *Self, args: TimelineArgs) !TimelineResult {
const all_args = std.mem.zeroInit(NoteQueryArgs, args);
const all_args = std.mem.zeroInit(Note.QueryArgs, args);
const result = try models.notes.query(self.db, all_args, self.allocator);
return TimelineResult{
.items = result.items,
@ -618,7 +609,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn localTimeline(self: *Self, args: TimelineArgs) !TimelineResult {
var all_args = std.mem.zeroInit(NoteQueryArgs, args);
var all_args = std.mem.zeroInit(Note.QueryArgs, args);
all_args.community_id = self.community.id;
const result = try models.notes.query(self.db, all_args, self.allocator);
return TimelineResult{
@ -631,7 +622,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
pub fn homeTimeline(self: *Self, args: TimelineArgs) !TimelineResult {
if (self.user_id == null) return error.NoToken;
var all_args = std.mem.zeroInit(models.notes.QueryArgs, args);
var all_args = std.mem.zeroInit(Note.QueryArgs, args);
all_args.followed_by = self.user_id;
const result = try models.notes.query(self.db, all_args, self.allocator);
return TimelineResult{
@ -815,7 +806,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
return try self.backendDriveEntryToFrontend(entry, true);
}
pub fn driveUpdate(self: *Self, path: []const u8, meta: models.files.PartialMeta) !DriveEntry {
pub fn driveUpdate(self: *Self, path: []const u8, meta: FileUpload.UpdateArgs) !DriveEntry {
const user_id = self.user_id orelse return error.NoToken;
std.log.debug("{s}", .{path});
const entry = try models.drive.stat(self.db, user_id, path, self.allocator);
@ -837,7 +828,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
};
}
pub fn updateUserProfile(self: *Self, id: Uuid, data: PartialUserProfile) !void {
pub fn updateUserProfile(self: *Self, id: Uuid, data: Actor.ProfileUpdateArgs) !void {
if (!Uuid.eql(id, self.user_id orelse return error.NoToken)) return error.AccessDenied;
try models.actors.updateProfile(self.db, id, data, self.allocator);
}
@ -888,3 +879,53 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
};
}
// test "register" {
// const TestDb = void;
// const exp_code = "abcd";
// const exp_community = Uuid.parse("a210c035-c9e1-4361-82a2-aaeac8e40dc6") catch unreachable;
// var conn = ApiConn(TestDb, struct {
// const invites = struct {
// fn getByCode(_: TestDb, code: []const u8, community_id: Uuid, alloc: std.mem.Allocator) !services.invites.Invite {
// try std.testing.expectEqualStrings(exp_code, code);
// try std.testing.expectEqual(exp_community, community_id);
// return try util.deepClone(alloc, services.invites.Invite{
// .id = Uuid.parse("eac18f43-4dcc-489f-9fb5-4c1633e7b4e0") catch unreachable,
// .created_by = Uuid.parse("6d951fcc-1c9f-497b-9c96-31dfb9873708") catch unreachable,
// .community_id = exp_community,
// .name = "test invite",
// .code = exp_code,
// .created_at = DateTime.parse("2022-12-21T09:05:50Z") catch unreachable,
// .times_used = 0,
// .expires_at = null,
// .max_uses = null,
// });
// }
// };
// const auth = struct {
// fn register(
// _: TestDb,
// username: []const u8,
// password: []const u8,
// community_id: Uuid,
// _: RegistrationOptions,
// _: std.mem.Allocator,
// ) !Uuid {
// try std.testing.expectEqualStrings("root", username);
// try std.testing.expectEqualStrings("password", password);
// try std.testing.expectEqual(exp_community, community_id);
// return Uuid.parse("6d951fcc-1c9f-497b-9c96-31dfb9873708") catch unreachable;
// }
// };
// }){};
// defer conn.close();
// const result = try conn.register("root", "password", .{});
// try std.allocator.
// }

View File

@ -1,13 +1,13 @@
const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const auth = @import("./auth.zig");
const common = @import("./common.zig");
const files = @import("./files.zig");
const types = @import("./types.zig");
const Partial = common.Partial;
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Actor = types.Actor;
pub const CreateError = error{
UsernameTaken,
@ -17,19 +17,6 @@ pub const CreateError = error{
DatabaseFailure,
};
pub const ActorDetailed = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: ProfileField,
created_at: DateTime,
updated_at: DateTime,
};
pub const LookupError = error{
DatabaseFailure,
};
@ -101,35 +88,6 @@ pub fn create(
return id;
}
pub const ProfileField = struct {
key: []const u8,
value: []const u8,
};
pub const Actor = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
community_id: Uuid,
created_at: DateTime,
updated_at: DateTime,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const GetError = error{ NotFound, DatabaseFailure };
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
return db.queryRow(
@ -162,19 +120,11 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
};
}
pub const PartialProfile = Partial(Profile);
pub const Profile = struct {
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
};
pub const max_fields = 32;
pub const max_display_name_len = 128;
pub const max_bio = 1 << 16;
pub fn updateProfile(db: anytype, id: Uuid, new: PartialProfile, alloc: std.mem.Allocator) !void {
pub fn updateProfile(db: anytype, id: Uuid, new: Actor.ProfileUpdateArgs, alloc: std.mem.Allocator) !void {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();

View File

@ -1,6 +1,8 @@
const std = @import("std");
const util = @import("util");
const actors = @import("./actors.zig");
const types = @import("./types.zig");
const Token = types.Token;
const Uuid = util.Uuid;
const DateTime = util.DateTime;
@ -85,7 +87,7 @@ pub fn login(
community_id: Uuid,
password: []const u8,
alloc: std.mem.Allocator,
) LoginError!LoginResult {
) LoginError!Token {
std.log.debug("user: {s}, community_id: {}", .{ username, community_id });
const info = db.queryRow(
struct { account_id: Uuid, hash: []const u8 },
@ -136,36 +138,36 @@ pub fn login(
if (!std.mem.eql(u8, info.hash, updated_info.hash)) return error.InvalidLogin;
}
const now = DateTime.now();
tx.insert("token", .{
.account_id = info.account_id,
.hash = token_hash,
.issued_at = DateTime.now(),
.issued_at = now,
}, alloc) catch return error.DatabaseFailure;
tx.commit() catch return error.DatabaseFailure;
return LoginResult{
.token = token,
.user_id = info.account_id,
return Token{
.value = token,
.info = .{
.user_id = info.account_id,
.issued_at = now,
},
};
}
pub const VerifyTokenError = error{ InvalidToken, DatabaseFailure, OutOfMemory };
pub const TokenInfo = struct {
user_id: Uuid,
issued_at: DateTime,
};
pub fn verifyToken(
db: anytype,
token: []const u8,
community_id: Uuid,
alloc: std.mem.Allocator,
) VerifyTokenError!TokenInfo {
) VerifyTokenError!Token.Info {
const hash = try hashToken(token, alloc);
defer alloc.free(hash);
return db.queryRow(
TokenInfo,
Token.Info,
\\SELECT token.account_id as user_id, token.issued_at
\\FROM token
\\ JOIN account

View File

@ -2,41 +2,15 @@ const std = @import("std");
const builtin = @import("builtin");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const actors = @import("./actors.zig");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Community = struct {
pub const Kind = enum {
admin,
local,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Scheme = enum {
https,
http,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: ?Uuid,
host: []const u8,
name: []const u8,
scheme: Scheme,
kind: Kind,
created_at: DateTime,
};
pub const CreateOptions = struct {
name: ?[]const u8 = null,
kind: Community.Kind = .local,
};
const Community = types.Community;
const CreateOptions = Community.CreateOptions;
const QueryArgs = Community.QueryArgs;
const QueryResult = types.QueryResult(Community);
pub const CreateError = error{
UnsupportedScheme,
@ -165,61 +139,6 @@ pub fn transferOwnership(db: anytype, community_id: Uuid, new_owner: Uuid) !void
) catch return error.DatabaseFailure;
}
pub const QueryArgs = struct {
pub const OrderBy = enum {
name,
host,
created_at,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(QueryArgs, .prev).field_type);
pub const OrderVal = std.meta.fieldInfo(Prev, .order_val).field_type;
// Max items to fetch
max_items: usize = 20,
// Selection filters
owner_id: ?Uuid = null, // searches for communities owned by this user
like: ?[]const u8 = null, // searches for communities with host or name LIKE '%?%'
created_before: ?DateTime = null,
created_after: ?DateTime = null,
// Ordering parameter
order_by: OrderBy = .created_at,
direction: Direction = .ascending,
// Page start parameter
// This struct is a reference to the last value scanned
// If prev is present, then prev.order_val must have the same tag as order_by
// "prev" here refers to it being the previous value returned. It may be that
// prev refers to the item directly after the results you are about to recieve,
// if you are querying the previous page.
prev: ?struct {
id: Uuid,
order_val: union(OrderBy) {
name: []const u8,
host: []const u8,
created_at: DateTime,
},
} = null,
// What direction to scan the page window
// If "forward", then "prev" is interpreted as the item directly before the items
// to query, in the direction of "direction" above. If "backward", then the opposite
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []const Community,
prev_page: QueryArgs,
next_page: QueryArgs,
};
const max_max_items = 100;
pub const QueryError = error{

View File

@ -1,41 +1,11 @@
const std = @import("std");
const sql = @import("sql");
const util = @import("util");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const FileStatus = enum {
uploading,
uploaded,
external,
deleted,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const FileUpload = struct {
id: Uuid,
owner_id: Uuid,
size: usize,
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
status: FileStatus,
created_at: DateTime,
updated_at: DateTime,
};
pub const FileMeta = struct {
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
};
const FileUpload = types.FileUpload;
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !FileUpload {
return try db.queryRow(
@ -60,26 +30,7 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !FileUpload {
);
}
pub const PartialMeta = Partial(FileMeta);
pub fn Partial(comptime T: type) type {
const t_fields = std.meta.fields(T);
var fields: [t_fields.len]std.builtin.Type.StructField = undefined;
for (std.meta.fields(T)) |f, i| fields[i] = .{
.name = f.name,
.field_type = ?f.field_type,
.default_value = &@as(?f.field_type, null),
.is_comptime = false,
.alignment = @alignOf(?f.field_type),
};
return @Type(.{ .Struct = .{
.layout = .Auto,
.fields = &fields,
.decls = &.{},
.is_tuple = false,
} });
}
pub fn update(db: anytype, id: Uuid, meta: PartialMeta, alloc: std.mem.Allocator) !void {
pub fn update(db: anytype, id: Uuid, meta: FileUpload.UpdateArgs, alloc: std.mem.Allocator) !void {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();
@ -106,7 +57,7 @@ pub fn update(db: anytype, id: Uuid, meta: PartialMeta, alloc: std.mem.Allocator
}, alloc);
}
pub fn create(db: anytype, owner_id: Uuid, meta: FileMeta, data: []const u8, alloc: std.mem.Allocator) !Uuid {
pub fn create(db: anytype, owner_id: Uuid, meta: FileUpload.CreateOptions, data: []const u8, alloc: std.mem.Allocator) !Uuid {
const id = Uuid.randV4(util.getThreadPrng());
const now = DateTime.now();
try db.insert("file_upload", .{
@ -120,7 +71,7 @@ pub fn create(db: anytype, owner_id: Uuid, meta: FileMeta, data: []const u8, all
.content_type = meta.content_type,
.sensitive = meta.sensitive,
.status = FileStatus.uploading,
.status = FileUpload.Status.uploading,
.created_at = now,
.updated_at = now,

View File

@ -1,9 +1,11 @@
const std = @import("std");
const builtin = @import("builtin");
const util = @import("util");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Invite = types.Invite;
// 9 random bytes = 12 random b64
const rand_len = 8;
@ -12,40 +14,14 @@ const code_len = 12;
const Encoder = std.base64.url_safe.Encoder;
const Decoder = std.base64.url_safe.Decoder;
pub const Kind = enum {
system,
community_owner,
user,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
const InviteCount = usize;
pub const Invite = struct {
id: Uuid,
created_by: Uuid, // User ID
pub fn create(
db: anytype,
created_by: Uuid,
community_id: Uuid,
name: []const u8,
code: []const u8,
created_at: DateTime,
times_used: InviteCount,
expires_at: ?DateTime,
max_uses: ?InviteCount,
kind: Kind,
};
pub const InviteOptions = struct {
name: ?[]const u8 = null,
max_uses: ?InviteCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
};
pub fn create(db: anytype, created_by: Uuid, community_id: ?Uuid, options: InviteOptions, alloc: std.mem.Allocator) !Uuid {
options: Invite.InternalCreateOptions,
alloc: std.mem.Allocator,
) !Uuid {
const id = Uuid.randV4(util.getThreadPrng());
var code_bytes: [rand_len]u8 = undefined;
@ -55,7 +31,6 @@ pub fn create(db: anytype, created_by: Uuid, community_id: ?Uuid, options: Invit
defer alloc.free(code);
_ = Encoder.encode(code, &code_bytes);
const name = options.name orelse code;
const created_at = DateTime.now();
try db.insert(

View File

@ -2,28 +2,13 @@ const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Note = struct {
id: Uuid,
author_id: Uuid,
content: []const u8,
created_at: DateTime,
};
pub const NoteDetailed = struct {
id: Uuid,
author: struct {
id: Uuid,
username: []const u8,
},
content: []const u8,
created_at: DateTime,
};
const Note = types.Note;
const QueryArgs = Note.QueryArgs;
const QueryResult = types.QueryResult(Note);
pub const CreateError = error{
DatabaseFailure,
@ -58,10 +43,27 @@ const selectStarFromNote = std.fmt.comptimePrint(
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
return db.queryRow(
Note,
selectStarFromNote ++
\\WHERE id = $1
\\LIMIT 1
,
\\SELECT
\\ note.id,
\\ note.content,
\\ note.created_at,
\\ actor.id AS "author.id",
\\ actor.username AS "author.username",
\\ community.host AS "author.host",
\\ actor.display_name AS "author.display_name",
\\ actor.bio AS "author.bio",
\\ actor.avatar_file_id AS "author.avatar_file_id",
\\ actor.header_file_id AS "author.header_file_id",
\\ actor.profile_fields AS "author.profile_fields",
\\ actor.community_id AS "author.community_id",
\\ actor.created_at AS "author.created_at",
\\ actor.updated_at AS "author.updated_at"
\\FROM note
\\ JOIN actor ON actor.id = note.author_id
\\ JOIN community ON community.id = actor.community_id
\\WHERE id = $1
\\LIMIT 1
,
.{id},
alloc,
) catch |err| switch (err) {
@ -72,40 +74,29 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
const max_max_items = 100;
pub const QueryArgs = struct {
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(@This(), .prev).field_type);
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
community_id: ?Uuid = null,
followed_by: ?Uuid = null,
prev: ?struct {
id: Uuid,
created_at: DateTime,
} = null,
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []NoteDetailed,
prev_page: QueryArgs,
next_page: QueryArgs,
};
pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResult {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();
try builder.appendSlice(
\\SELECT note.id, note.content, note.created_at, actor.id AS "author.id", actor.username AS "author.username"
\\SELECT
\\ note.id,
\\ note.content,
\\ note.created_at,
\\ actor.id AS "author.id",
\\ actor.username AS "author.username",
\\ community.host AS "author.host",
\\ actor.display_name AS "author.display_name",
\\ actor.bio AS "author.bio",
\\ actor.avatar_file_id AS "author.avatar_file_id",
\\ actor.header_file_id AS "author.header_file_id",
\\ actor.profile_fields AS "author.profile_fields",
\\ actor.community_id AS "author.community_id",
\\ actor.created_at AS "author.created_at",
\\ actor.updated_at AS "author.updated_at"
\\FROM note
\\ JOIN actor ON actor.id = note.author_id
\\ JOIN community ON community.id = actor.community_id
\\
);
@ -153,7 +144,7 @@ pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResul
};
const results = try db.queryRowsWithOptions(
NoteDetailed,
Note,
try builder.terminate(),
query_args,
max_items,

362
src/api/services/types.zig Normal file
View File

@ -0,0 +1,362 @@
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const common = struct {
const Direction = enum {
ascending,
descending,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
const PageDirection = enum {
forward,
backward,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
};
pub fn QueryResult(comptime T: type) type {
return QueryResultArguments(T, T.QueryArgs);
}
pub fn QueryResultArguments(comptime T: type, comptime A: type) type {
return struct {
items: []T,
next_page: A,
prev_page: A,
};
}
pub const Account = struct {
pub const Auth = struct {
password_hash: []const u8,
updated_at: DateTime,
};
pub const Kind = enum {
user,
admin,
};
id: Uuid,
invite_id: ?Uuid,
email: ?[]const u8,
kind: Kind,
};
pub const Actor = struct {
pub const ProfileField = struct {
key: []const u8,
value: []const u8,
};
id: Uuid,
username: []const u8,
host: []const u8,
community_id: Uuid,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
created_at: DateTime,
updated_at: DateTime,
// TODO: get rid of this
pub const Profile = struct {
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const ProfileUpdateArgs = struct {
display_name: ??[]const u8,
bio: ?[]const u8,
avatar_file_id: ??Uuid,
header_file_id: ??Uuid,
profile_fields: ?[]const ProfileField,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const Community = struct {
pub const Kind = enum {
admin,
local,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Scheme = enum {
https,
http,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: ?Uuid,
host: []const u8,
name: []const u8,
scheme: Scheme,
kind: Kind,
created_at: DateTime,
pub const CreateOptions = struct {
name: ?[]const u8 = null,
kind: Kind = .local,
};
pub const QueryArgs = struct {
pub const OrderBy = enum {
name,
host,
created_at,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
order_val: OrderVal,
};
pub const OrderVal = union(OrderBy) {
name: []const u8,
host: []const u8,
created_at: DateTime,
};
// Max items to fetch
max_items: usize = 20,
// Selection filters
owner_id: ?Uuid = null, // searches for communities owned by this user
like: ?[]const u8 = null, // searches for communities with host or name LIKE '%?%'
created_before: ?DateTime = null,
created_after: ?DateTime = null,
// Ordering parameter
order_by: OrderBy = .created_at,
direction: Direction = .ascending,
// Page start parameter(s)
// This struct is a reference to the last value scanned
// If prev is present, then prev.order_val must have the same tag as order_by
// "prev" here refers to it being the previous value returned. It may be that
// prev refers to the item directly after the results you are about to recieve,
// if you are querying the previous page.
prev: ?Prev = null,
// What direction to scan the page window
// If "forward", then "prev" is interpreted as the item directly before the items
// to query, in the direction of "direction" above. If "backward", then the opposite
page_direction: PageDirection = .forward,
};
};
pub const DriveEntry = struct {
pub const Kind = enum {
dir,
file,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: Uuid,
name: ?[]const u8,
path: []const u8,
parent_directory_id: ?Uuid,
file_id: ?Uuid,
kind: Kind,
};
pub const FileUpload = struct {
pub const Status = enum {
uploading,
uploaded,
external,
deleted,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: Uuid,
size: usize,
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
status: Status,
created_at: DateTime,
updated_at: DateTime,
pub const CreateOptions = struct {
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
};
pub const UpdateArgs = struct {
filename: ?[]const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: ?bool,
};
};
pub const Invite = struct {
const UseCount = usize;
pub const Kind = enum {
system,
community_owner,
user,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
created_by: Uuid, // User ID
community_id: Uuid,
name: []const u8,
code: []const u8,
created_at: DateTime,
times_used: UseCount,
expires_at: ?DateTime,
max_uses: ?UseCount,
kind: Kind,
pub const CreateOptions = struct {
name: ?[]const u8 = null,
max_uses: ?UseCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
to_community: ?Uuid = null,
};
pub const InternalCreateOptions = struct {
name: ?[]const u8 = null,
max_uses: ?UseCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
};
};
pub const Follow = struct {
id: Uuid,
followed_by_id: Uuid,
followee_id: Uuid,
created_at: DateTime,
pub const QueryArgs = struct {
pub const OrderBy = enum {
created_at,
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
order_val: union(OrderBy) {
created_at: DateTime,
},
};
max_items: usize = 20,
followed_by_id: ?Uuid = null,
followee_id: ?Uuid = null,
order_by: OrderBy = .created_at,
direction: Direction = .descending,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
};
pub const Note = struct {
id: Uuid,
author: Actor,
content: []const u8,
created_at: DateTime,
pub const QueryArgs = struct {
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
created_at: DateTime,
};
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
community_id: ?Uuid = null,
followed_by: ?Uuid = null,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
// TODO: This sucks
pub const sql_serialize = struct {
pub const @"author.profile_fields" = .json;
};
};
pub const Token = struct {
pub const Info = struct {
user_id: Uuid,
issued_at: DateTime,
};
value: []const u8,
info: Info,
};

View File

@ -2,7 +2,7 @@ const api = @import("api");
const util = @import("util");
const controller_utils = @import("../../controllers.zig").helpers;
const QueryArgs = api.CommunityQueryArgs;
const QueryArgs = api.Community.QueryArgs;
pub const create = struct {
pub const method = .POST;
@ -25,9 +25,9 @@ pub const query = struct {
pub const path = "/communities";
pub const Query = struct {
const OrderBy = api.CommunityQueryArgs.OrderBy;
const Direction = api.CommunityQueryArgs.Direction;
const PageDirection = api.CommunityQueryArgs.PageDirection;
const OrderBy = api.Community.QueryArgs.OrderBy;
const Direction = api.Community.QueryArgs.Direction;
const PageDirection = api.Community.QueryArgs.PageDirection;
// Max items to fetch
max_items: usize = 20,
@ -80,7 +80,7 @@ pub const query = struct {
});
const convert = struct {
fn func(args: api.CommunityQueryArgs) Query {
fn func(args: api.Community.QueryArgs) Query {
return .{
.max_items = args.max_items,
.owner_id = args.owner_id,

View File

@ -50,7 +50,7 @@ pub const update_profile = struct {
id: util.Uuid,
};
pub const Body = api.PartialUserProfile;
pub const Body = api.Actor.ProfileUpdateArgs;
// TODO: I don't like that the request body dn response body are substantially different
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {

View File

@ -96,7 +96,7 @@ const login = struct {
try res.headers.put("Location", index.path);
var buf: [64]u8 = undefined;
const cookie_name = try std.fmt.bufPrint(&buf, "token.{s}", .{req.body.username});
try res.headers.setCookie(cookie_name, token.token, .{});
try res.headers.setCookie(cookie_name, token.value, .{});
try res.headers.setCookie("active_account", req.body.username, .{ .HttpOnly = false });
try res.status(.see_other);
@ -190,7 +190,7 @@ const signup = struct {
try res.headers.put("Location", index.path);
var buf: [64]u8 = undefined;
const cookie_name = try std.fmt.bufPrint(&buf, "token.{s}", .{req.body.username});
try res.headers.setCookie(cookie_name, token.token, .{});
try res.headers.setCookie(cookie_name, token.value, .{});
try res.headers.setCookie("active_account", req.body.username, .{ .HttpOnly = false });
try res.status(.see_other);