Compare commits

..

No commits in common. "cc4badae218b526323af6229d897cc7f947fdccb" and "b0db514adceb9869fd1c6cd6299bc8fc1286e092" have entirely different histories.

18 changed files with 748 additions and 1062 deletions

View file

@ -116,12 +116,7 @@ pub fn build(b: *std.build.Builder) !void {
const unittest_template_cmd = b.step("unit:template", "Run tests for template package");
const unittest_template = b.addTest("src/template/lib.zig");
unittest_template_cmd.dependOn(&unittest_template.step);
const unittest_api_cmd = b.step("unit:api", "Run tests for api package");
const unittest_api = b.addTest("src/api/lib.zig");
unittest_api_cmd.dependOn(&unittest_api.step);
unittest_api.addPackage(pkgs.util);
unittest_api.addPackage(pkgs.sql);
//unittest_template.addPackage(pkgs.util);
//const util_tests = b.addTest("src/util/lib.zig");
//const sql_tests = b.addTest("src/sql/lib.zig");
@ -134,7 +129,6 @@ pub fn build(b: *std.build.Builder) !void {
unittest_all.dependOn(unittest_util_cmd);
unittest_all.dependOn(unittest_sql_cmd);
unittest_all.dependOn(unittest_template_cmd);
unittest_all.dependOn(unittest_api_cmd);
const api_integration = b.addTest("./tests/api_integration/lib.zig");
api_integration.addPackage(pkgs.opts);

View file

@ -8,33 +8,16 @@ const Uuid = util.Uuid;
const default_avatar = "static/default_avi.png";
const services = struct {
pub const communities = @import("./services/communities.zig");
pub const actors = @import("./services/actors.zig");
pub const drive = @import("./services/drive.zig");
pub const files = @import("./services/files.zig");
pub const invites = @import("./services/invites.zig");
pub const notes = @import("./services/notes.zig");
pub const follows = @import("./services/follows.zig");
pub const accounts = @import("./services/accounts.zig");
pub const tokens = @import("./services/tokens.zig");
const communities = @import("./services/communities.zig");
const actors = @import("./services/actors.zig");
const auth = @import("./services/auth.zig");
const drive = @import("./services/drive.zig");
const files = @import("./services/files.zig");
const invites = @import("./services/invites.zig");
const notes = @import("./services/notes.zig");
const follows = @import("./services/follows.zig");
};
test {
_ = @import("./methods/auth.zig");
}
const types = @import("./types.zig");
pub const QueryResult = types.QueryResult;
pub const Account = types.Account;
pub const Actor = types.Actor;
pub const Community = types.Community;
pub const Invite = types.Invite;
pub const Note = types.Note;
pub const Token = types.Token;
pub const ClusterMeta = struct {
community_count: usize,
user_count: usize,
@ -47,7 +30,7 @@ pub const RegistrationOptions = struct {
};
pub const InviteOptions = struct {
pub const Kind = Invite.Kind;
pub const Kind = services.invites.Kind;
name: ?[]const u8 = null,
lifespan: ?DateTime.Duration = null,
@ -58,6 +41,9 @@ pub const InviteOptions = struct {
to_community: ?Uuid = null,
};
pub const LoginResponse = services.auth.LoginResult;
pub const ProfileField = services.actors.ProfileField;
pub const UserResponse = struct {
id: Uuid,
@ -73,13 +59,14 @@ pub const UserResponse = struct {
header_file_id: ?Uuid,
header_url: ?[]const u8,
profile_fields: []const Actor.ProfileField,
profile_fields: []const ProfileField,
community_id: Uuid,
created_at: DateTime,
updated_at: DateTime,
};
pub const PartialUserProfile = services.actors.PartialProfile;
pub const NoteResponse = struct {
id: Uuid,
@ -93,9 +80,15 @@ pub const NoteResponse = struct {
created_at: DateTime,
};
pub const Community = services.communities.Community;
pub const CommunityQueryArgs = services.communities.QueryArgs;
pub const CommunityQueryResult = services.communities.QueryResult;
pub const NoteQueryArgs = services.notes.QueryArgs;
pub const TimelineArgs = struct {
pub const PageDirection = Note.QueryArgs.PageDirection;
pub const Prev = Note.QueryArgs.Prev;
pub const PageDirection = NoteQueryArgs.PageDirection;
pub const Prev = NoteQueryArgs.Prev;
max_items: usize = 20,
@ -106,7 +99,7 @@ pub const TimelineArgs = struct {
page_direction: PageDirection = .forward,
fn from(args: Note.QueryArgs) TimelineArgs {
fn from(args: NoteQueryArgs) TimelineArgs {
return .{
.max_items = args.max_items,
.created_before = args.created_before,
@ -118,7 +111,7 @@ pub const TimelineArgs = struct {
};
pub const TimelineResult = struct {
items: []Note,
items: []services.notes.NoteDetailed,
prev_page: TimelineArgs,
next_page: TimelineArgs,
@ -198,7 +191,7 @@ pub const DriveEntry = union(enum) {
},
};
pub const FileUpload = types.FileUpload;
pub const FileUpload = services.files.FileUpload;
pub const DriveGetResult = union(services.drive.Kind) {
dir: struct {
@ -212,13 +205,13 @@ pub const DriveGetResult = union(services.drive.Kind) {
};
pub const FileResult = struct {
meta: FileUpload,
meta: services.files.FileUpload,
data: []const u8,
};
pub const InviteResponse = struct {
code: []const u8,
kind: Invite.Kind,
kind: services.invites.Kind,
name: []const u8,
creator: UserResponse,
@ -257,14 +250,7 @@ pub fn setupAdmin(db: sql.Db, origin: []const u8, username: []const u8, password
arena.allocator(),
);
const user = try @import("./methods/auth.zig").methods(services).createLocalAccount(
tx,
username,
password,
community_id,
.{ .role = .admin },
arena.allocator(),
);
const user = try services.auth.register(tx, username, password, community_id, .{ .kind = .admin }, arena.allocator());
try services.communities.transferOwnership(tx, community_id, user);
@ -279,7 +265,7 @@ pub fn setupAdmin(db: sql.Db, origin: []const u8, username: []const u8, password
pub const ApiSource = struct {
db_conn_pool: *sql.ConnPool,
pub const Conn = ApiConn(sql.Db, services);
pub const Conn = ApiConn(sql.Db);
const root_username = "root";
@ -296,47 +282,63 @@ pub const ApiSource = struct {
return Conn{
.db = db,
.context = .{
.community = community,
},
.user_id = null,
.community = community,
.allocator = alloc,
};
}
pub fn connectToken(self: *ApiSource, host: []const u8, token: []const u8, alloc: std.mem.Allocator) !Conn {
var conn = try self.connectUnauthorized(host, alloc);
errdefer conn.close();
conn.context.token_info = try conn.verifyToken(token);
return conn;
const db = try self.db_conn_pool.acquire();
errdefer db.releaseConnection();
const community = try services.communities.getByHost(db, host, alloc);
const token_info = try services.auth.verifyToken(
db,
token,
community.id,
alloc,
);
return Conn{
.db = db,
.token_info = token_info,
.user_id = token_info.user_id,
.community = community,
.allocator = alloc,
};
}
};
pub const ApiContext = struct {
token_info: ?Token.Info = null,
community: Community,
pub fn userId(self: ApiContext) ?Uuid {
if (self.token_info) |t| return t.user_id else return null;
}
};
fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
fn ApiConn(comptime DbConn: type) type {
return struct {
const Self = @This();
db: DbConn,
context: ApiContext,
token_info: ?services.auth.TokenInfo = null,
user_id: ?Uuid = null,
community: services.communities.Community,
allocator: std.mem.Allocator,
pub fn close(self: *Self) void {
util.deepFree(self.allocator, self.context.community);
if (self.context.token_info) |info| util.deepFree(self.allocator, info);
util.deepFree(self.allocator, self.community);
if (self.token_info) |info| util.deepFree(self.allocator, info);
self.db.releaseConnection();
}
fn isAdmin(self: *Self) bool {
// TODO
return self.context.userId() != null and self.context.community.kind == .admin;
return self.user_id != null and self.community.kind == .admin;
}
pub fn login(self: *Self, username: []const u8, password: []const u8) !LoginResponse {
return services.auth.login(
self.db,
username,
self.community.id,
password,
self.allocator,
);
}
pub const AuthorizationInfo = struct {
@ -348,8 +350,8 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
issued_at: DateTime,
};
pub fn verifyAuthorization(self: *Self) !AuthorizationInfo {
if (self.context.token_info) |info| {
const user = try models.actors.get(self.db, info.user_id, self.allocator);
if (self.token_info) |info| {
const user = try services.actors.get(self.db, info.user_id, self.allocator);
defer util.deepFree(self.allocator, user);
const username = try util.deepClone(self.allocator, user.username);
@ -358,8 +360,8 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
return AuthorizationInfo{
.id = user.id,
.username = username,
.community_id = self.context.community.id,
.host = try util.deepClone(self.allocator, self.context.community.host),
.community_id = self.community.id,
.host = try util.deepClone(self.allocator, self.community.host),
.issued_at = info.issued_at,
};
@ -368,21 +370,21 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
return error.TokenRequired;
}
pub fn createCommunity(self: *Self, origin: []const u8, name: ?[]const u8) !Community {
pub fn createCommunity(self: *Self, origin: []const u8, name: ?[]const u8) !services.communities.Community {
if (!self.isAdmin()) {
return error.PermissionDenied;
}
const tx = try self.db.begin();
errdefer tx.rollback();
const community_id = try models.communities.create(
const community_id = try services.communities.create(
tx,
origin,
.{ .name = name },
self.allocator,
);
const community = models.communities.get(
const community = services.communities.get(
tx,
community_id,
self.allocator,
@ -398,29 +400,30 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
pub fn createInvite(self: *Self, options: InviteOptions) !InviteResponse {
// Only logged in users can make invites
const user_id = self.context.userId() orelse return error.TokenRequired;
const user_id = self.user_id orelse return error.TokenRequired;
const community_id = if (options.to_community) |id| blk: {
// Only admins can send invites for other communities
if (!self.isAdmin()) return error.PermissionDenied;
break :blk id;
} else self.context.community.id;
} else self.community.id;
// Users can only make user invites
if (options.kind != .user and !self.isAdmin()) return error.PermissionDenied;
const invite_id = try models.invites.create(self.db, user_id, community_id, options.name orelse "", .{
const invite_id = try services.invites.create(self.db, user_id, community_id, .{
.name = options.name,
.lifespan = options.lifespan,
.max_uses = options.max_uses,
.kind = options.kind,
}, self.allocator);
const invite = try models.invites.get(self.db, invite_id, self.allocator);
const invite = try services.invites.get(self.db, invite_id, self.allocator);
errdefer util.deepFree(self.allocator, invite);
const url = if (options.to_community) |cid| blk: {
const community = try models.communities.get(self.db, cid, self.allocator);
const community = try services.communities.get(self.db, cid, self.allocator);
defer util.deepFree(self.allocator, community);
break :blk try std.fmt.allocPrint(
@ -431,7 +434,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
} else try std.fmt.allocPrint(
self.allocator,
"{s}://{s}/invite/{s}",
.{ @tagName(self.context.community.scheme), self.context.community.host, invite.code },
.{ @tagName(self.community.scheme), self.community.host, invite.code },
);
errdefer util.deepFree(self.allocator, url);
@ -451,81 +454,80 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
};
}
fn isInviteValid(invite: Invite) bool {
fn isInviteValid(invite: services.invites.Invite) bool {
if (invite.max_uses != null and invite.times_used >= invite.max_uses.?) return false;
if (invite.expires_at != null and DateTime.now().isAfter(invite.expires_at.?)) return false;
return true;
}
pub usingnamespace @import("./methods/auth.zig").methods(models);
// pub fn register(self: *Self, username: []const u8, password: []const u8, opt: RegistrationOptions) !UserResponse {
// const tx = try self.db.beginOrSavepoint();
// const maybe_invite = if (opt.invite_code) |code|
// try models.invites.getByCode(tx, code, self.context.community.id, self.allocator)
// else
// null;
// defer if (maybe_invite) |inv| util.deepFree(self.allocator, inv);
pub fn register(self: *Self, username: []const u8, password: []const u8, opt: RegistrationOptions) !UserResponse {
const tx = try self.db.beginOrSavepoint();
const maybe_invite = if (opt.invite_code) |code|
try services.invites.getByCode(tx, code, self.community.id, self.allocator)
else
null;
defer if (maybe_invite) |inv| util.deepFree(self.allocator, inv);
// if (maybe_invite) |invite| {
// if (!Uuid.eql(invite.community_id, self.context.community.id)) return error.WrongCommunity;
// if (!isInviteValid(invite)) return error.InvalidInvite;
// }
if (maybe_invite) |invite| {
if (!Uuid.eql(invite.community_id, self.community.id)) return error.WrongCommunity;
if (!isInviteValid(invite)) return error.InvalidInvite;
}
// const invite_kind = if (maybe_invite) |inv| inv.kind else .user;
const invite_kind = if (maybe_invite) |inv| inv.kind else .user;
// if (self.context.community.kind == .admin) @panic("Unimplmented");
if (self.community.kind == .admin) @panic("Unimplmented");
// const user_id = try models.auth.register(
// tx,
// username,
// password,
// self.context.community.id,
// .{
// .invite_id = if (maybe_invite) |inv| @as(?Uuid, inv.id) else null,
// .email = opt.email,
// },
// self.allocator,
// );
const user_id = try services.auth.register(
tx,
username,
password,
self.community.id,
.{
.invite_id = if (maybe_invite) |inv| @as(?Uuid, inv.id) else null,
.email = opt.email,
},
self.allocator,
);
// switch (invite_kind) {
// .user => {},
// .system => @panic("System user invites unimplemented"),
// .community_owner => {
// try models.communities.transferOwnership(tx, self.context.community.id, user_id);
// },
// }
switch (invite_kind) {
.user => {},
.system => @panic("System user invites unimplemented"),
.community_owner => {
try services.communities.transferOwnership(tx, self.community.id, user_id);
},
}
// const user = self.getUserUnchecked(tx, user_id) catch |err| switch (err) {
// error.NotFound => return error.Unexpected,
// else => |e| return e,
// };
// errdefer util.deepFree(self.allocator, user);
const user = self.getUserUnchecked(tx, user_id) catch |err| switch (err) {
error.NotFound => return error.Unexpected,
else => |e| return e,
};
errdefer util.deepFree(self.allocator, user);
// try tx.commit();
// return user;
// }
try tx.commit();
return user;
}
fn getUserUnchecked(self: *Self, db: anytype, user_id: Uuid) !UserResponse {
const user = try models.actors.get(db, user_id, self.allocator);
const user = try services.actors.get(db, user_id, self.allocator);
const avatar_url = if (user.avatar_file_id) |fid|
try std.fmt.allocPrint(
self.allocator,
"{s}://{s}/media/{}",
.{ @tagName(self.context.community.scheme), self.context.community.host, fid },
.{ @tagName(self.community.scheme), self.community.host, fid },
)
else
try std.fmt.allocPrint(
self.allocator,
"{s}://{s}/{s}",
.{ @tagName(self.context.community.scheme), self.context.community.host, default_avatar },
.{ @tagName(self.community.scheme), self.community.host, default_avatar },
);
errdefer self.allocator.free(avatar_url);
const header_url = if (user.header_file_id) |fid|
try std.fmt.allocPrint(
self.allocator,
"{s}://{s}/media/{}",
.{ @tagName(self.context.community.scheme), self.context.community.host, fid },
.{ @tagName(self.community.scheme), self.community.host, fid },
)
else
null;
@ -558,20 +560,20 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
const user = try self.getUserUnchecked(self.db, user_id);
errdefer util.deepFree(self.allocator, user);
if (self.context.userId() == null) {
if (!Uuid.eql(self.context.community.id, user.community_id)) return error.NotFound;
if (self.user_id == null) {
if (!Uuid.eql(self.community.id, user.community_id)) return error.NotFound;
}
return user;
}
pub fn createNote(self: *Self, content: []const u8) !Note {
pub fn createNote(self: *Self, content: []const u8) !NoteResponse {
// You cannot post on admin accounts
if (self.context.community.kind == .admin) return error.WrongCommunity;
if (self.community.kind == .admin) return error.WrongCommunity;
// Only authenticated users can post
const user_id = self.context.userId() orelse return error.TokenRequired;
const note_id = try models.notes.create(self.db, user_id, content, self.allocator);
const user_id = self.user_id orelse return error.TokenRequired;
const note_id = try services.notes.create(self.db, user_id, content, self.allocator);
return self.getNote(note_id) catch |err| switch (err) {
error.NotFound => error.Unexpected,
@ -579,26 +581,35 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
};
}
pub fn getNote(self: *Self, note_id: Uuid) !Note {
const note = try models.notes.get(self.db, note_id, self.allocator);
errdefer util.deepFree(self.allocator, note);
pub fn getNote(self: *Self, note_id: Uuid) !NoteResponse {
const note = try services.notes.get(self.db, note_id, self.allocator);
const user = try services.actors.get(self.db, note.author_id, self.allocator);
// Only serve community-specific notes on unauthenticated requests
if (self.context.userId() == null) {
if (!Uuid.eql(self.context.community.id, note.author.community_id)) return error.NotFound;
if (self.user_id == null) {
if (!Uuid.eql(self.community.id, user.community_id)) return error.NotFound;
}
return note;
return NoteResponse{
.id = note.id,
.author = .{
.id = user.id,
.username = user.username,
.host = user.host,
},
.content = note.content,
.created_at = note.created_at,
};
}
pub fn queryCommunities(self: *Self, args: Community.QueryArgs) !QueryResult(Community) {
pub fn queryCommunities(self: *Self, args: services.communities.QueryArgs) !CommunityQueryResult {
if (!self.isAdmin()) return error.PermissionDenied;
return try models.communities.query(self.db, args, self.allocator);
return try services.communities.query(self.db, args, self.allocator);
}
pub fn globalTimeline(self: *Self, args: TimelineArgs) !TimelineResult {
const all_args = std.mem.zeroInit(Note.QueryArgs, args);
const result = try models.notes.query(self.db, all_args, self.allocator);
const all_args = std.mem.zeroInit(NoteQueryArgs, args);
const result = try services.notes.query(self.db, all_args, self.allocator);
return TimelineResult{
.items = result.items,
.prev_page = TimelineArgs.from(result.prev_page),
@ -607,9 +618,9 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn localTimeline(self: *Self, args: TimelineArgs) !TimelineResult {
var all_args = std.mem.zeroInit(Note.QueryArgs, args);
all_args.community_id = self.context.community.id;
const result = try models.notes.query(self.db, all_args, self.allocator);
var all_args = std.mem.zeroInit(NoteQueryArgs, args);
all_args.community_id = self.community.id;
const result = try services.notes.query(self.db, all_args, self.allocator);
return TimelineResult{
.items = result.items,
.prev_page = TimelineArgs.from(result.prev_page),
@ -618,11 +629,11 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn homeTimeline(self: *Self, args: TimelineArgs) !TimelineResult {
if (self.context.userId() == null) return error.NoToken;
if (self.user_id == null) return error.NoToken;
var all_args = std.mem.zeroInit(Note.QueryArgs, args);
all_args.followed_by = self.context.userId();
const result = try models.notes.query(self.db, all_args, self.allocator);
var all_args = std.mem.zeroInit(services.notes.QueryArgs, args);
all_args.followed_by = self.user_id;
const result = try services.notes.query(self.db, all_args, self.allocator);
return TimelineResult{
.items = result.items,
.prev_page = TimelineArgs.from(result.prev_page),
@ -631,9 +642,9 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn queryFollowers(self: *Self, user_id: Uuid, args: FollowerQueryArgs) !FollowerQueryResult {
var all_args = std.mem.zeroInit(models.follows.QueryArgs, args);
var all_args = std.mem.zeroInit(services.follows.QueryArgs, args);
all_args.followee_id = user_id;
const result = try models.follows.query(self.db, all_args, self.allocator);
const result = try services.follows.query(self.db, all_args, self.allocator);
return FollowerQueryResult{
.items = result.items,
.prev_page = FollowQueryArgs.from(result.prev_page),
@ -642,9 +653,9 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn queryFollowing(self: *Self, user_id: Uuid, args: FollowingQueryArgs) !FollowingQueryResult {
var all_args = std.mem.zeroInit(models.follows.QueryArgs, args);
var all_args = std.mem.zeroInit(services.follows.QueryArgs, args);
all_args.followed_by_id = user_id;
const result = try models.follows.query(self.db, all_args, self.allocator);
const result = try services.follows.query(self.db, all_args, self.allocator);
return FollowingQueryResult{
.items = result.items,
.prev_page = FollowQueryArgs.from(result.prev_page),
@ -653,12 +664,12 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn follow(self: *Self, followee: Uuid) !void {
const result = try models.follows.create(self.db, self.context.userId() orelse return error.NoToken, followee, self.allocator);
const result = try services.follows.create(self.db, self.user_id orelse return error.NoToken, followee, self.allocator);
defer util.deepFree(self.allocator, result);
}
pub fn unfollow(self: *Self, followee: Uuid) !void {
const result = try models.follows.delete(self.db, self.context.userId() orelse return error.NoToken, followee, self.allocator);
const result = try services.follows.delete(self.db, self.user_id orelse return error.NoToken, followee, self.allocator);
defer util.deepFree(self.allocator, result);
}
@ -679,7 +690,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
);
}
fn backendDriveEntryToFrontend(self: *Self, entry: models.drive.Entry, recurse: bool) !DriveEntry {
fn backendDriveEntryToFrontend(self: *Self, entry: services.drive.Entry, recurse: bool) !DriveEntry {
return if (entry.file_id) |file_id| .{
.file = .{
.id = entry.id,
@ -688,7 +699,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
.path = entry.path,
.parent_directory_id = entry.parent_directory_id,
.meta = try models.files.get(self.db, file_id, self.allocator),
.meta = try services.files.get(self.db, file_id, self.allocator),
},
} else .{
.dir = .{
@ -701,7 +712,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
.children = blk: {
if (!recurse) break :blk null;
const children = try models.drive.list(self.db, entry.id, self.allocator);
const children = try services.drive.list(self.db, entry.id, self.allocator);
const result = self.allocator.alloc(DriveEntry, children.len) catch |err| {
util.deepFree(self.allocator, children);
@ -718,7 +729,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
errdefer self.allocator.free(result);
for (children) |child, i| {
result[i] = try backendDriveEntryToFrontend(self, child, false);
result[i] = try self.backendDriveEntryToFrontend(child, false);
count += 1;
}
@ -729,8 +740,8 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn driveUpload(self: *Self, meta: UploadFileArgs, body: []const u8) !DriveEntry {
const user_id = self.context.userId() orelse return error.NoToken;
const file_id = try models.files.create(self.db, user_id, .{
const user_id = self.user_id orelse return error.NoToken;
const file_id = try services.files.create(self.db, user_id, .{
.filename = meta.filename,
.description = meta.description,
.content_type = meta.content_type,
@ -738,11 +749,11 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}, body, self.allocator);
const entry = entry: {
errdefer models.files.delete(self.db, file_id, self.allocator) catch |err| {
errdefer services.files.delete(self.db, file_id, self.allocator) catch |err| {
std.log.err("Unable to delete file {}: {}", .{ file_id, err });
};
break :entry models.drive.create(
break :entry services.drive.create(
self.db,
user_id,
meta.dir,
@ -757,7 +768,7 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
const name = split.rest();
const new_name = try std.fmt.bufPrint(&buf, "{s}.{s}.{s}", .{ name, file_id, ext });
break :entry try models.drive.create(
break :entry try services.drive.create(
self.db,
user_id,
meta.dir,
@ -775,67 +786,67 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
}
pub fn driveMkdir(self: *Self, parent_path: []const u8, name: []const u8) !DriveEntry {
const user_id = self.context.userId() orelse return error.NoToken;
const entry = try models.drive.create(self.db, user_id, parent_path, name, null, self.allocator);
const user_id = self.user_id orelse return error.NoToken;
const entry = try services.drive.create(self.db, user_id, parent_path, name, null, self.allocator);
errdefer util.deepFree(self.allocator, entry);
return try self.backendDriveEntryToFrontend(entry, true);
}
pub fn driveDelete(self: *Self, path: []const u8) !void {
const user_id = self.context.userId() orelse return error.NoToken;
const entry = try models.drive.stat(self.db, user_id, path, self.allocator);
const user_id = self.user_id orelse return error.NoToken;
const entry = try services.drive.stat(self.db, user_id, path, self.allocator);
defer util.deepFree(self.allocator, entry);
try models.drive.delete(self.db, entry.id, self.allocator);
if (entry.file_id) |file_id| try models.files.delete(self.db, file_id, self.allocator);
try services.drive.delete(self.db, entry.id, self.allocator);
if (entry.file_id) |file_id| try services.files.delete(self.db, file_id, self.allocator);
}
pub fn driveMove(self: *Self, src: []const u8, dest: []const u8) !DriveEntry {
const user_id = self.context.userId() orelse return error.NoToken;
try models.drive.move(self.db, user_id, src, dest, self.allocator);
const user_id = self.user_id orelse return error.NoToken;
try services.drive.move(self.db, user_id, src, dest, self.allocator);
return try self.driveGet(dest);
}
pub fn driveGet(self: *Self, path: []const u8) !DriveEntry {
const user_id = self.context.userId() orelse return error.NoToken;
const entry = try models.drive.stat(self.db, user_id, path, self.allocator);
const user_id = self.user_id orelse return error.NoToken;
const entry = try services.drive.stat(self.db, user_id, path, self.allocator);
errdefer util.deepFree(self.allocator, entry);
return try self.backendDriveEntryToFrontend(entry, true);
}
pub fn driveUpdate(self: *Self, path: []const u8, meta: FileUpload.UpdateArgs) !DriveEntry {
const user_id = self.context.userId() orelse return error.NoToken;
pub fn driveUpdate(self: *Self, path: []const u8, meta: services.files.PartialMeta) !DriveEntry {
const user_id = self.user_id orelse return error.NoToken;
std.log.debug("{s}", .{path});
const entry = try models.drive.stat(self.db, user_id, path, self.allocator);
const entry = try services.drive.stat(self.db, user_id, path, self.allocator);
defer util.deepFree(self.allocator, entry);
std.log.debug("{}", .{entry.id});
try models.files.update(self.db, entry.file_id orelse return error.NotAFile, meta, self.allocator);
try services.files.update(self.db, entry.file_id orelse return error.NotAFile, meta, self.allocator);
return try self.driveGet(path);
}
pub fn fileDereference(self: *Self, id: Uuid) !FileResult {
const meta = try models.files.get(self.db, id, self.allocator);
const meta = try services.files.get(self.db, id, self.allocator);
errdefer util.deepFree(self.allocator, meta);
return FileResult{
.meta = meta,
.data = try models.files.deref(self.allocator, id),
.data = try services.files.deref(self.allocator, id),
};
}
pub fn updateUserProfile(self: *Self, id: Uuid, data: Actor.ProfileUpdateArgs) !void {
if (!Uuid.eql(id, self.context.userId() orelse return error.NoToken)) return error.AccessDenied;
try models.actors.updateProfile(self.db, id, data, self.allocator);
pub fn updateUserProfile(self: *Self, id: Uuid, data: PartialUserProfile) !void {
if (!Uuid.eql(id, self.user_id orelse return error.NoToken)) return error.AccessDenied;
try services.actors.updateProfile(self.db, id, data, self.allocator);
}
pub fn validateInvite(self: *Self, code: []const u8) !InviteResponse {
const invite = models.invites.getByCode(
const invite = services.invites.getByCode(
self.db,
code,
self.context.community.id,
self.community.id,
self.allocator,
) catch |err| switch (err) {
error.NotFound => return error.InvalidInvite,
@ -843,13 +854,13 @@ fn ApiConn(comptime DbConn: type, comptime models: anytype) type {
};
errdefer util.deepFree(self.allocator, invite);
if (!Uuid.eql(invite.community_id, self.context.community.id)) return error.InvalidInvite;
if (!Uuid.eql(invite.community_id, self.community.id)) return error.InvalidInvite;
if (!isInviteValid(invite)) return error.InvalidInvite;
const url = try std.fmt.allocPrint(
self.allocator,
"{s}://{s}/invite/{s}",
.{ @tagName(self.context.community.scheme), self.context.community.host, invite.code },
.{ @tagName(self.community.scheme), self.community.host, invite.code },
);
errdefer util.deepFree(self.allocator, url);

View file

@ -1,327 +0,0 @@
const std = @import("std");
const util = @import("util");
const types = @import("../types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Invite = @import("../lib.zig").Invite;
pub const Token = types.Token;
pub const RegistrationOptions = struct {
invite_code: ?[]const u8 = null,
email: ?[]const u8 = null,
};
pub const AccountCreateOptions = @import("../services/accounts.zig").CreateOptions;
pub fn methods(comptime models: type) type {
return struct {
fn isInviteValid(invite: Invite) bool {
if (invite.max_uses != null and invite.times_used >= invite.max_uses.?) return false;
if (invite.expires_at != null and DateTime.now().isAfter(invite.expires_at.?)) return false;
return true;
}
pub fn register(self: anytype, username: []const u8, password: []const u8, opt: RegistrationOptions) !types.Actor {
const tx = try self.db.beginOrSavepoint();
const maybe_invite = if (opt.invite_code) |code|
try models.invites.getByCode(tx, code, self.context.community.id, self.allocator)
else
null;
defer if (maybe_invite) |inv| util.deepFree(self.allocator, inv);
if (maybe_invite) |invite| {
if (!Uuid.eql(invite.community_id, self.context.community.id)) return error.WrongCommunity;
if (!isInviteValid(invite)) return error.InvalidInvite;
}
const invite_kind = if (maybe_invite) |inv| inv.kind else .user;
if (self.context.community.kind == .admin) @panic("Unimplmented");
const user_id = try createLocalAccount(
tx,
username,
password,
self.context.community.id,
.{
.invite_id = if (maybe_invite) |inv| @as(?Uuid, inv.id) else null,
.email = opt.email,
},
self.allocator,
);
switch (invite_kind) {
.user => {},
.system => @panic("System user invites unimplemented"),
.community_owner => {
try models.communities.transferOwnership(tx, self.context.community.id, user_id);
},
}
const user = models.actors.get(tx, user_id, self.allocator) catch |err| switch (err) {
error.NotFound => return error.Unexpected,
else => |e| return e,
};
errdefer util.deepFree(self.allocator, user);
try tx.commitOrRelease();
return user;
}
// Only for internal use
pub fn createLocalAccount(
db: anytype,
username: []const u8,
password: []const u8,
community_id: Uuid,
opt: AccountCreateOptions,
alloc: std.mem.Allocator,
) !Uuid {
const tx = try db.beginOrSavepoint();
errdefer tx.rollback();
const hash = try hashPassword(password, alloc);
defer alloc.free(hash);
const id = try models.actors.create(tx, username, community_id, false, alloc);
try models.accounts.create(tx, id, hash, opt, alloc);
try tx.commitOrRelease();
return id;
}
pub fn login(self: anytype, username: []const u8, password: []const u8) !Token {
const community_id = self.context.community.id;
const credentials = try models.accounts.getCredentialsByUsername(
self.db,
username,
community_id,
self.allocator,
);
defer util.deepFree(self.allocator, credentials);
try verifyPassword(credentials.password_hash, password, self.allocator);
const token = try generateToken(self.allocator);
errdefer util.deepFree(self.allocator, token);
const token_hash = hashToken(token, self.allocator) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => unreachable,
};
defer util.deepFree(self.allocator, token_hash);
const tx = try self.db.begin();
errdefer tx.rollback();
// ensure that the password has not changed in the meantime
{
const updated_info = try models.accounts.getCredentialsByUsername(
tx,
username,
community_id,
self.allocator,
);
defer util.deepFree(self.allocator, updated_info);
if (!std.mem.eql(u8, credentials.password_hash, updated_info.password_hash)) return error.InvalidLogin;
}
try models.tokens.create(tx, credentials.account_id, token_hash, self.allocator);
try tx.commit();
const info = try models.tokens.getByHash(self.db, token_hash, community_id, self.allocator);
defer util.deepFree(self.allocator, info);
return .{
.value = token,
.info = .{
.user_id = info.account_id,
.issued_at = info.issued_at,
},
};
}
pub fn verifyToken(self: anytype, token: []const u8) !Token.Info {
const hash = try hashToken(token, self.allocator);
defer self.allocator.free(hash);
const info = try models.tokens.getByHash(self.db, hash, self.context.community.id, self.allocator);
defer util.deepFree(self.allocator, info);
return .{ .user_id = info.account_id, .issued_at = info.issued_at };
}
};
}
// We use scrypt, a password hashing algorithm that attempts to slow down
// GPU-based cracking approaches by using large amounts of memory, for
// password hashing.
// Attempting to calculate/verify a hash will use about 50mb of work space.
const scrypt = std.crypto.pwhash.scrypt;
const password_hash_len = 128;
fn verifyPassword(
hash: []const u8,
password: []const u8,
alloc: std.mem.Allocator,
) !void {
scrypt.strVerify(
hash,
password,
.{ .allocator = alloc },
) catch |err| return switch (err) {
error.PasswordVerificationFailed => error.InvalidLogin,
else => error.HashFailure,
};
}
fn hashPassword(password: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const buf = try alloc.alloc(u8, password_hash_len);
errdefer alloc.free(buf);
return scrypt.strHash(
password,
.{
.allocator = alloc,
.params = scrypt.Params.interactive,
.encoding = .phc,
},
buf,
) catch error.HashFailure;
}
/// A raw token is a sequence of N random bytes, base64 encoded.
/// When the token is generated:
/// - The hash of the token is calculated by:
/// 1. Decoding the base64 text
/// 2. Calculating the SHA256 hash of this text
/// 3. Encoding the hash back as base64
/// - The b64 encoded hash is stored in the database
/// - The original token is returned to the user
/// * The user will treat it as opaque text
/// When the token is verified:
/// - The hash of the token is taken as shown above
/// - The database is scanned for a token matching this hash
/// - If none can be found, the token is invalid
const Sha256 = std.crypto.hash.sha2.Sha256;
const Base64Encoder = std.base64.standard.Encoder;
const Base64Decoder = std.base64.standard.Decoder;
const token_len = 12;
fn generateToken(alloc: std.mem.Allocator) ![]const u8 {
var token = std.mem.zeroes([token_len]u8);
std.crypto.random.bytes(&token);
const token_b64_len = Base64Encoder.calcSize(token.len);
const token_b64 = try alloc.alloc(u8, token_b64_len);
return Base64Encoder.encode(token_b64, &token);
}
fn hashToken(token_b64: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const decoded_token_len = Base64Decoder.calcSizeForSlice(token_b64) catch return error.InvalidToken;
if (decoded_token_len != token_len) return error.InvalidToken;
var token = std.mem.zeroes([token_len]u8);
Base64Decoder.decode(&token, token_b64) catch return error.InvalidToken;
var hash = std.mem.zeroes([Sha256.digest_length]u8);
Sha256.hash(&token, &hash, .{});
const hash_b64_len = Base64Encoder.calcSize(hash.len);
const hash_b64 = try alloc.alloc(u8, hash_b64_len);
return Base64Encoder.encode(hash_b64, &hash);
}
const TestDb = struct {
tx_level: usize = 0,
rolled_back: bool = false,
committed: bool = false,
fn beginOrSavepoint(self: *TestDb) !*TestDb {
self.tx_level += 1;
return self;
}
fn rollback(self: *TestDb) void {
self.rolled_back = true;
self.tx_level -= 1;
}
fn commitOrRelease(self: *TestDb) !void {
self.committed = true;
self.tx_level -= 1;
}
};
test "register" {
comptime var exp_code = "code";
comptime var exp_community = Uuid.parse("a210c035-c9e1-4361-82a2-aaeac8e40dc6") catch unreachable;
comptime var uid = Uuid.parse("6d951fcc-1c9f-497b-9c96-31dfb9873708") catch unreachable;
const MockSvc = struct {
const invites = struct {
fn getByCode(db: *TestDb, code: []const u8, community_id: Uuid, alloc: std.mem.Allocator) !Invite {
try std.testing.expectEqual(db.tx_level, 1);
try std.testing.expectEqualStrings(exp_code, code);
try std.testing.expectEqual(exp_community, community_id);
return try util.deepClone(alloc, Invite{
.id = Uuid.parse("eac18f43-4dcc-489f-9fb5-4c1633e7b4e0") catch unreachable,
.created_by = Uuid.parse("6d951fcc-1c9f-497b-9c96-31dfb9873708") catch unreachable,
.community_id = exp_community,
.name = "test invite",
.code = exp_code,
.kind = .user,
.created_at = DateTime.parse("2022-12-21T09:05:50Z") catch unreachable,
.times_used = 0,
.expires_at = null,
.max_uses = null,
});
}
};
const auth = struct {
fn register(
db: *TestDb,
username: []const u8,
password: []const u8,
community_id: Uuid,
_: AccountCreateOptions,
_: std.mem.Allocator,
) anyerror!Uuid {
try std.testing.expectEqual(db.tx_level, 1);
try std.testing.expectEqualStrings("root", username);
try std.testing.expectEqualStrings("password", password);
try std.testing.expectEqual(exp_community, community_id);
return uid;
}
};
const actors = struct {
fn get(_: *TestDb, id: Uuid, alloc: std.mem.Allocator) anyerror!types.Actor {
try std.testing.expectEqual(uid, id);
return try util.deepClone(alloc, std.mem.zeroInit(types.Actor, .{
.id = id,
.username = "root",
.host = "example.com",
.community_id = exp_community,
}));
}
};
const communities = struct {
fn transferOwnership(_: *TestDb, _: Uuid, _: Uuid) anyerror!void {}
};
};
var db = TestDb{};
util.deepFree(std.testing.allocator, try methods(MockSvc).register(.{
.db = &db,
.allocator = std.testing.allocator,
.community = .{
.id = exp_community,
.kind = .local,
},
}, "root", "password", .{}));
try std.testing.expectEqual(false, db.rolled_back);
try std.testing.expectEqual(true, db.committed);
try std.testing.expectEqual(@as(usize, 0), db.tx_level);
}

View file

@ -1,66 +0,0 @@
const std = @import("std");
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Role = enum {
user,
admin,
};
pub const CreateOptions = struct {
invite_id: ?Uuid = null,
email: ?[]const u8 = null,
role: Role = .user,
};
/// Creates a local account with the given information
pub fn create(
db: anytype,
for_actor: Uuid,
password_hash: []const u8,
options: CreateOptions,
alloc: std.mem.Allocator,
) !void {
const tx = try db.beginOrSavepoint();
errdefer tx.rollback();
tx.insert("account", .{
.id = for_actor,
.invite_id = options.invite_id,
.email = options.email,
.kind = options.role,
}, alloc) catch return error.DatabaseFailure;
tx.insert("password", .{
.account_id = for_actor,
.hash = password_hash,
.changed_at = DateTime.now(),
}, alloc) catch return error.DatabaseFailure;
tx.commitOrRelease() catch return error.DatabaseFailure;
}
pub const Credentials = struct {
account_id: Uuid,
password_hash: []const u8,
};
pub fn getCredentialsByUsername(db: anytype, username: []const u8, community_id: Uuid, alloc: std.mem.Allocator) !Credentials {
return db.queryRow(
Credentials,
\\SELECT account.id as account_id, password.hash as password_hash
\\FROM password
\\ JOIN account
\\ JOIN actor
\\ ON password.account_id = account.id AND account.id = actor.id
\\WHERE actor.username = $1
\\ AND actor.community_id = $2
\\LIMIT 1
,
.{ username, community_id },
alloc,
) catch |err| return switch (err) {
error.NoRows => error.InvalidLogin,
else => |e| return e,
};
}

View file

@ -1,13 +1,13 @@
const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const auth = @import("./auth.zig");
const common = @import("./common.zig");
const files = @import("./files.zig");
const types = @import("../types.zig");
const Partial = common.Partial;
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Actor = types.Actor;
pub const CreateError = error{
UsernameTaken,
@ -17,6 +17,19 @@ pub const CreateError = error{
DatabaseFailure,
};
pub const ActorDetailed = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: ProfileField,
created_at: DateTime,
updated_at: DateTime,
};
pub const LookupError = error{
DatabaseFailure,
};
@ -88,6 +101,35 @@ pub fn create(
return id;
}
pub const ProfileField = struct {
key: []const u8,
value: []const u8,
};
pub const Actor = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
community_id: Uuid,
created_at: DateTime,
updated_at: DateTime,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const GetError = error{ NotFound, DatabaseFailure };
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
return db.queryRow(
@ -120,11 +162,19 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
};
}
pub const PartialProfile = Partial(Profile);
pub const Profile = struct {
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
};
pub const max_fields = 32;
pub const max_display_name_len = 128;
pub const max_bio = 1 << 16;
pub fn updateProfile(db: anytype, id: Uuid, new: Actor.ProfileUpdateArgs, alloc: std.mem.Allocator) !void {
pub fn updateProfile(db: anytype, id: Uuid, new: PartialProfile, alloc: std.mem.Allocator) !void {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();

259
src/api/services/auth.zig Normal file
View file

@ -0,0 +1,259 @@
const std = @import("std");
const util = @import("util");
const actors = @import("./actors.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const RegistrationError = error{
PasswordTooShort,
DatabaseFailure,
HashFailure,
OutOfMemory,
} || actors.CreateError;
pub const min_password_chars = 12;
pub const Kind = enum {
user,
admin,
};
pub const RegistrationOptions = struct {
invite_id: ?Uuid = null,
email: ?[]const u8 = null,
kind: Kind = .user,
};
/// Creates a local account with the given information and returns the
/// account id
pub fn register(
db: anytype,
username: []const u8,
password: []const u8,
community_id: Uuid,
options: RegistrationOptions,
alloc: std.mem.Allocator,
) RegistrationError!Uuid {
if (password.len < min_password_chars) return error.PasswordTooShort;
// perform pre-validation to avoid having to hash the password if it fails
try actors.validateUsername(username, false);
const hash = try hashPassword(password, alloc);
defer alloc.free(hash);
const tx = db.beginOrSavepoint() catch return error.DatabaseFailure;
errdefer tx.rollback();
const id = try actors.create(tx, username, community_id, false, alloc);
tx.insert("account", .{
.id = id,
.invite_id = options.invite_id,
.email = options.email,
.kind = options.kind,
}, alloc) catch return error.DatabaseFailure;
tx.insert("password", .{
.account_id = id,
.hash = hash,
.changed_at = DateTime.now(),
}, alloc) catch return error.DatabaseFailure;
tx.insert("drive_entry", .{
.id = id,
.owner_id = id,
}, alloc) catch return error.DatabaseFailure;
tx.commitOrRelease() catch return error.DatabaseFailure;
return id;
}
pub const LoginError = error{
InvalidLogin,
HashFailure,
DatabaseFailure,
OutOfMemory,
};
pub const LoginResult = struct {
token: []const u8,
user_id: Uuid,
};
/// Attempts to login to the account `@username@community` and creates
/// a login token/cookie for the user
pub fn login(
db: anytype,
username: []const u8,
community_id: Uuid,
password: []const u8,
alloc: std.mem.Allocator,
) LoginError!LoginResult {
std.log.debug("user: {s}, community_id: {}", .{ username, community_id });
const info = db.queryRow(
struct { account_id: Uuid, hash: []const u8 },
\\SELECT account.id as account_id, password.hash
\\FROM password
\\ JOIN account
\\ JOIN actor
\\ ON password.account_id = account.id AND account.id = actor.id
\\WHERE actor.username = $1
\\ AND actor.community_id = $2
\\LIMIT 1
,
.{ username, community_id },
alloc,
) catch |err| return switch (err) {
error.NoRows => error.InvalidLogin,
else => error.DatabaseFailure,
};
defer alloc.free(info.hash);
try verifyPassword(info.hash, password, alloc);
const token = try generateToken(alloc);
errdefer util.deepFree(alloc, token);
const token_hash = hashToken(token, alloc) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => unreachable,
};
defer util.deepFree(alloc, token_hash);
const tx = db.begin() catch return error.DatabaseFailure;
errdefer tx.rollback();
// ensure that the password has not changed in the meantime
{
const updated_info = tx.queryRow(
struct { hash: []const u8 },
\\SELECT hash
\\FROM password
\\WHERE account_id = $1
\\LIMIT 1
,
.{info.account_id},
alloc,
) catch return error.DatabaseFailure;
defer util.deepFree(alloc, updated_info);
if (!std.mem.eql(u8, info.hash, updated_info.hash)) return error.InvalidLogin;
}
tx.insert("token", .{
.account_id = info.account_id,
.hash = token_hash,
.issued_at = DateTime.now(),
}, alloc) catch return error.DatabaseFailure;
tx.commit() catch return error.DatabaseFailure;
return LoginResult{
.token = token,
.user_id = info.account_id,
};
}
pub const VerifyTokenError = error{ InvalidToken, DatabaseFailure, OutOfMemory };
pub const TokenInfo = struct {
user_id: Uuid,
issued_at: DateTime,
};
pub fn verifyToken(
db: anytype,
token: []const u8,
community_id: Uuid,
alloc: std.mem.Allocator,
) VerifyTokenError!TokenInfo {
const hash = try hashToken(token, alloc);
defer alloc.free(hash);
return db.queryRow(
TokenInfo,
\\SELECT token.account_id as user_id, token.issued_at
\\FROM token
\\ JOIN account
\\ JOIN actor
\\ ON token.account_id = account.id AND account.id = actor.id
\\WHERE token.hash = $1 AND actor.community_id = $2
\\LIMIT 1
,
.{ hash, community_id },
alloc,
) catch |err| switch (err) {
error.NoRows => error.InvalidToken,
else => error.DatabaseFailure,
};
}
// We use scrypt, a password hashing algorithm that attempts to slow down
// GPU-based cracking approaches by using large amounts of memory, for
// password hashing.
// Attempting to calculate/verify a hash will use about 50mb of work space.
const scrypt = std.crypto.pwhash.scrypt;
const password_hash_len = 128;
fn verifyPassword(
hash: []const u8,
password: []const u8,
alloc: std.mem.Allocator,
) LoginError!void {
scrypt.strVerify(
hash,
password,
.{ .allocator = alloc },
) catch |err| return switch (err) {
error.PasswordVerificationFailed => error.InvalidLogin,
else => error.HashFailure,
};
}
fn hashPassword(password: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const buf = try alloc.alloc(u8, password_hash_len);
errdefer alloc.free(buf);
return scrypt.strHash(
password,
.{
.allocator = alloc,
.params = scrypt.Params.interactive,
.encoding = .phc,
},
buf,
) catch error.HashFailure;
}
/// A raw token is a sequence of N random bytes, base64 encoded.
/// When the token is generated:
/// - The hash of the token is calculated by:
/// 1. Decoding the base64 text
/// 2. Calculating the SHA256 hash of this text
/// 3. Encoding the hash back as base64
/// - The b64 encoded hash is stored in the database
/// - The original token is returned to the user
/// * The user will treat it as opaque text
/// When the token is verified:
/// - The hash of the token is taken as shown above
/// - The database is scanned for a token matching this hash
/// - If none can be found, the token is invalid
const Sha256 = std.crypto.hash.sha2.Sha256;
const Base64Encoder = std.base64.standard.Encoder;
const Base64Decoder = std.base64.standard.Decoder;
const token_len = 12;
fn generateToken(alloc: std.mem.Allocator) ![]const u8 {
var token = std.mem.zeroes([token_len]u8);
std.crypto.random.bytes(&token);
const token_b64_len = Base64Encoder.calcSize(token.len);
const token_b64 = try alloc.alloc(u8, token_b64_len);
return Base64Encoder.encode(token_b64, &token);
}
fn hashToken(token_b64: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const decoded_token_len = Base64Decoder.calcSizeForSlice(token_b64) catch return error.InvalidToken;
if (decoded_token_len != token_len) return error.InvalidToken;
var token = std.mem.zeroes([token_len]u8);
Base64Decoder.decode(&token, token_b64) catch return error.InvalidToken;
var hash = std.mem.zeroes([Sha256.digest_length]u8);
Sha256.hash(&token, &hash, .{});
const hash_b64_len = Base64Encoder.calcSize(hash.len);
const hash_b64 = try alloc.alloc(u8, hash_b64_len);
return Base64Encoder.encode(hash_b64, &hash);
}

View file

@ -2,15 +2,41 @@ const std = @import("std");
const builtin = @import("builtin");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const actors = @import("./actors.zig");
const types = @import("../types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Community = types.Community;
const CreateOptions = Community.CreateOptions;
const QueryArgs = Community.QueryArgs;
const QueryResult = types.QueryResult(Community);
pub const Community = struct {
pub const Kind = enum {
admin,
local,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Scheme = enum {
https,
http,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: ?Uuid,
host: []const u8,
name: []const u8,
scheme: Scheme,
kind: Kind,
created_at: DateTime,
};
pub const CreateOptions = struct {
name: ?[]const u8 = null,
kind: Community.Kind = .local,
};
pub const CreateError = error{
UnsupportedScheme,
@ -139,6 +165,61 @@ pub fn transferOwnership(db: anytype, community_id: Uuid, new_owner: Uuid) !void
) catch return error.DatabaseFailure;
}
pub const QueryArgs = struct {
pub const OrderBy = enum {
name,
host,
created_at,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(QueryArgs, .prev).field_type);
pub const OrderVal = std.meta.fieldInfo(Prev, .order_val).field_type;
// Max items to fetch
max_items: usize = 20,
// Selection filters
owner_id: ?Uuid = null, // searches for communities owned by this user
like: ?[]const u8 = null, // searches for communities with host or name LIKE '%?%'
created_before: ?DateTime = null,
created_after: ?DateTime = null,
// Ordering parameter
order_by: OrderBy = .created_at,
direction: Direction = .ascending,
// Page start parameter
// This struct is a reference to the last value scanned
// If prev is present, then prev.order_val must have the same tag as order_by
// "prev" here refers to it being the previous value returned. It may be that
// prev refers to the item directly after the results you are about to recieve,
// if you are querying the previous page.
prev: ?struct {
id: Uuid,
order_val: union(OrderBy) {
name: []const u8,
host: []const u8,
created_at: DateTime,
},
} = null,
// What direction to scan the page window
// If "forward", then "prev" is interpreted as the item directly before the items
// to query, in the direction of "direction" above. If "backward", then the opposite
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []const Community,
prev_page: QueryArgs,
next_page: QueryArgs,
};
const max_max_items = 100;
pub const QueryError = error{

View file

@ -1,11 +1,41 @@
const std = @import("std");
const sql = @import("sql");
const util = @import("util");
const types = @import("../types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const FileUpload = types.FileUpload;
pub const FileStatus = enum {
uploading,
uploaded,
external,
deleted,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const FileUpload = struct {
id: Uuid,
owner_id: Uuid,
size: usize,
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
status: FileStatus,
created_at: DateTime,
updated_at: DateTime,
};
pub const FileMeta = struct {
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
};
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !FileUpload {
return try db.queryRow(
@ -30,7 +60,26 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !FileUpload {
);
}
pub fn update(db: anytype, id: Uuid, meta: FileUpload.UpdateArgs, alloc: std.mem.Allocator) !void {
pub const PartialMeta = Partial(FileMeta);
pub fn Partial(comptime T: type) type {
const t_fields = std.meta.fields(T);
var fields: [t_fields.len]std.builtin.Type.StructField = undefined;
for (std.meta.fields(T)) |f, i| fields[i] = .{
.name = f.name,
.field_type = ?f.field_type,
.default_value = &@as(?f.field_type, null),
.is_comptime = false,
.alignment = @alignOf(?f.field_type),
};
return @Type(.{ .Struct = .{
.layout = .Auto,
.fields = &fields,
.decls = &.{},
.is_tuple = false,
} });
}
pub fn update(db: anytype, id: Uuid, meta: PartialMeta, alloc: std.mem.Allocator) !void {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();
@ -57,7 +106,7 @@ pub fn update(db: anytype, id: Uuid, meta: FileUpload.UpdateArgs, alloc: std.mem
}, alloc);
}
pub fn create(db: anytype, owner_id: Uuid, meta: FileUpload.CreateOptions, data: []const u8, alloc: std.mem.Allocator) !Uuid {
pub fn create(db: anytype, owner_id: Uuid, meta: FileMeta, data: []const u8, alloc: std.mem.Allocator) !Uuid {
const id = Uuid.randV4(util.getThreadPrng());
const now = DateTime.now();
try db.insert("file_upload", .{
@ -71,7 +120,7 @@ pub fn create(db: anytype, owner_id: Uuid, meta: FileUpload.CreateOptions, data:
.content_type = meta.content_type,
.sensitive = meta.sensitive,
.status = FileUpload.Status.uploading,
.status = FileStatus.uploading,
.created_at = now,
.updated_at = now,

View file

@ -1,11 +1,9 @@
const std = @import("std");
const builtin = @import("builtin");
const util = @import("util");
const types = @import("../types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Invite = types.Invite;
// 9 random bytes = 12 random b64
const rand_len = 8;
@ -14,14 +12,40 @@ const code_len = 12;
const Encoder = std.base64.url_safe.Encoder;
const Decoder = std.base64.url_safe.Decoder;
pub fn create(
db: anytype,
created_by: Uuid,
pub const Kind = enum {
system,
community_owner,
user,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
const InviteCount = usize;
pub const Invite = struct {
id: Uuid,
created_by: Uuid, // User ID
community_id: Uuid,
name: []const u8,
options: Invite.InternalCreateOptions,
alloc: std.mem.Allocator,
) !Uuid {
code: []const u8,
created_at: DateTime,
times_used: InviteCount,
expires_at: ?DateTime,
max_uses: ?InviteCount,
kind: Kind,
};
pub const InviteOptions = struct {
name: ?[]const u8 = null,
max_uses: ?InviteCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
};
pub fn create(db: anytype, created_by: Uuid, community_id: ?Uuid, options: InviteOptions, alloc: std.mem.Allocator) !Uuid {
const id = Uuid.randV4(util.getThreadPrng());
var code_bytes: [rand_len]u8 = undefined;
@ -31,6 +55,7 @@ pub fn create(
defer alloc.free(code);
_ = Encoder.encode(code, &code_bytes);
const name = options.name orelse code;
const created_at = DateTime.now();
try db.insert(

View file

@ -2,13 +2,28 @@ const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const types = @import("../types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Note = types.Note;
const QueryArgs = Note.QueryArgs;
const QueryResult = types.QueryResult(Note);
pub const Note = struct {
id: Uuid,
author_id: Uuid,
content: []const u8,
created_at: DateTime,
};
pub const NoteDetailed = struct {
id: Uuid,
author: struct {
id: Uuid,
username: []const u8,
},
content: []const u8,
created_at: DateTime,
};
pub const CreateError = error{
DatabaseFailure,
@ -43,27 +58,10 @@ const selectStarFromNote = std.fmt.comptimePrint(
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
return db.queryRow(
Note,
\\SELECT
\\ note.id,
\\ note.content,
\\ note.created_at,
\\ actor.id AS "author.id",
\\ actor.username AS "author.username",
\\ community.host AS "author.host",
\\ actor.display_name AS "author.display_name",
\\ actor.bio AS "author.bio",
\\ actor.avatar_file_id AS "author.avatar_file_id",
\\ actor.header_file_id AS "author.header_file_id",
\\ actor.profile_fields AS "author.profile_fields",
\\ actor.community_id AS "author.community_id",
\\ actor.created_at AS "author.created_at",
\\ actor.updated_at AS "author.updated_at"
\\FROM note
\\ JOIN actor ON actor.id = note.author_id
\\ JOIN community ON community.id = actor.community_id
\\WHERE id = $1
\\LIMIT 1
,
selectStarFromNote ++
\\WHERE id = $1
\\LIMIT 1
,
.{id},
alloc,
) catch |err| switch (err) {
@ -74,29 +72,40 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
const max_max_items = 100;
pub const QueryArgs = struct {
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(@This(), .prev).field_type);
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
community_id: ?Uuid = null,
followed_by: ?Uuid = null,
prev: ?struct {
id: Uuid,
created_at: DateTime,
} = null,
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []NoteDetailed,
prev_page: QueryArgs,
next_page: QueryArgs,
};
pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResult {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();
try builder.appendSlice(
\\SELECT
\\ note.id,
\\ note.content,
\\ note.created_at,
\\ actor.id AS "author.id",
\\ actor.username AS "author.username",
\\ community.host AS "author.host",
\\ actor.display_name AS "author.display_name",
\\ actor.bio AS "author.bio",
\\ actor.avatar_file_id AS "author.avatar_file_id",
\\ actor.header_file_id AS "author.header_file_id",
\\ actor.profile_fields AS "author.profile_fields",
\\ actor.community_id AS "author.community_id",
\\ actor.created_at AS "author.created_at",
\\ actor.updated_at AS "author.updated_at"
\\SELECT note.id, note.content, note.created_at, actor.id AS "author.id", actor.username AS "author.username"
\\FROM note
\\ JOIN actor ON actor.id = note.author_id
\\ JOIN community ON community.id = actor.community_id
\\
);
@ -144,7 +153,7 @@ pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResul
};
const results = try db.queryRowsWithOptions(
Note,
NoteDetailed,
try builder.terminate(),
query_args,
max_items,

View file

@ -1,40 +0,0 @@
const std = @import("std");
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Token = struct {
account_id: Uuid,
issued_at: DateTime,
hash: []const u8,
};
pub fn create(db: anytype, account_id: Uuid, hash: []const u8, alloc: std.mem.Allocator) !void {
const now = DateTime.now();
try db.insert("token", .{
.account_id = account_id,
.hash = hash,
.issued_at = now,
}, alloc);
}
pub fn getByHash(db: anytype, hash: []const u8, community_id: Uuid, alloc: std.mem.Allocator) !Token {
return db.queryRow(
Token,
\\SELECT account_id, issued_at, hash
\\FROM token
\\ JOIN account
\\ JOIN actor
\\ ON token.account_id = account.id AND account.id = actor.id
\\WHERE token.hash = $1 AND actor.community_id = $2
\\LIMIT 1
,
.{ hash, community_id },
alloc,
) catch |err| switch (err) {
error.NoRows => error.InvalidToken,
else => error.DatabaseFailure,
};
}

View file

@ -1,362 +0,0 @@
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const common = struct {
const Direction = enum {
ascending,
descending,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
const PageDirection = enum {
forward,
backward,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
};
pub fn QueryResult(comptime T: type) type {
return QueryResultArguments(T, T.QueryArgs);
}
pub fn QueryResultArguments(comptime T: type, comptime A: type) type {
return struct {
items: []T,
next_page: A,
prev_page: A,
};
}
pub const Account = struct {
pub const Auth = struct {
password_hash: []const u8,
updated_at: DateTime,
};
pub const Kind = enum {
user,
admin,
};
id: Uuid,
invite_id: ?Uuid,
email: ?[]const u8,
kind: Kind,
};
pub const Actor = struct {
pub const ProfileField = struct {
key: []const u8,
value: []const u8,
};
id: Uuid,
username: []const u8,
host: []const u8,
community_id: Uuid,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
created_at: DateTime,
updated_at: DateTime,
// TODO: get rid of this
pub const Profile = struct {
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const ProfileUpdateArgs = struct {
display_name: ??[]const u8,
bio: ?[]const u8,
avatar_file_id: ??Uuid,
header_file_id: ??Uuid,
profile_fields: ?[]const ProfileField,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const Community = struct {
pub const Kind = enum {
admin,
local,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Scheme = enum {
https,
http,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: ?Uuid,
host: []const u8,
name: []const u8,
scheme: Scheme,
kind: Kind,
created_at: DateTime,
pub const CreateOptions = struct {
name: ?[]const u8 = null,
kind: Kind = .local,
};
pub const QueryArgs = struct {
pub const OrderBy = enum {
name,
host,
created_at,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
order_val: OrderVal,
};
pub const OrderVal = union(OrderBy) {
name: []const u8,
host: []const u8,
created_at: DateTime,
};
// Max items to fetch
max_items: usize = 20,
// Selection filters
owner_id: ?Uuid = null, // searches for communities owned by this user
like: ?[]const u8 = null, // searches for communities with host or name LIKE '%?%'
created_before: ?DateTime = null,
created_after: ?DateTime = null,
// Ordering parameter
order_by: OrderBy = .created_at,
direction: Direction = .ascending,
// Page start parameter(s)
// This struct is a reference to the last value scanned
// If prev is present, then prev.order_val must have the same tag as order_by
// "prev" here refers to it being the previous value returned. It may be that
// prev refers to the item directly after the results you are about to recieve,
// if you are querying the previous page.
prev: ?Prev = null,
// What direction to scan the page window
// If "forward", then "prev" is interpreted as the item directly before the items
// to query, in the direction of "direction" above. If "backward", then the opposite
page_direction: PageDirection = .forward,
};
};
pub const DriveEntry = struct {
pub const Kind = enum {
dir,
file,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: Uuid,
name: ?[]const u8,
path: []const u8,
parent_directory_id: ?Uuid,
file_id: ?Uuid,
kind: Kind,
};
pub const FileUpload = struct {
pub const Status = enum {
uploading,
uploaded,
external,
deleted,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: Uuid,
size: usize,
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
status: Status,
created_at: DateTime,
updated_at: DateTime,
pub const CreateOptions = struct {
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
};
pub const UpdateArgs = struct {
filename: ?[]const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: ?bool,
};
};
pub const Invite = struct {
const UseCount = usize;
pub const Kind = enum {
system,
community_owner,
user,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
created_by: Uuid, // User ID
community_id: Uuid,
name: []const u8,
code: []const u8,
created_at: DateTime,
times_used: UseCount,
expires_at: ?DateTime,
max_uses: ?UseCount,
kind: Kind,
pub const CreateOptions = struct {
name: ?[]const u8 = null,
max_uses: ?UseCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
to_community: ?Uuid = null,
};
pub const InternalCreateOptions = struct {
name: ?[]const u8 = null,
max_uses: ?UseCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
};
};
pub const Follow = struct {
id: Uuid,
followed_by_id: Uuid,
followee_id: Uuid,
created_at: DateTime,
pub const QueryArgs = struct {
pub const OrderBy = enum {
created_at,
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
order_val: union(OrderBy) {
created_at: DateTime,
},
};
max_items: usize = 20,
followed_by_id: ?Uuid = null,
followee_id: ?Uuid = null,
order_by: OrderBy = .created_at,
direction: Direction = .descending,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
};
pub const Note = struct {
id: Uuid,
author: Actor,
content: []const u8,
created_at: DateTime,
pub const QueryArgs = struct {
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
created_at: DateTime,
};
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
community_id: ?Uuid = null,
followed_by: ?Uuid = null,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
// TODO: This sucks
pub const sql_serialize = struct {
pub const @"author.profile_fields" = .json;
};
};
pub const Token = struct {
pub const Info = struct {
user_id: Uuid,
issued_at: DateTime,
};
value: []const u8,
info: Info,
};

View file

@ -282,7 +282,7 @@ pub const Response = struct {
pub fn template(self: *Self, status_code: http.Status, srv: anytype, comptime templ: []const u8, data: anytype) !void {
try self.headers.put("Content-Type", "text/html");
const user = if (srv.context.userId()) |uid| try srv.getUser(uid) else null;
const user = if (srv.user_id) |uid| try srv.getUser(uid) else null;
defer util.deepFree(srv.allocator, user);
var stream = try self.open(status_code);
@ -298,8 +298,9 @@ pub const Response = struct {
@embedFile("./controllers/web/_format.tmpl.html"),
data,
.{
.community = srv.context.community,
.community = srv.community,
.user = user,
.user_id = srv.user_id,
},
);

View file

@ -2,7 +2,7 @@ const api = @import("api");
const util = @import("util");
const controller_utils = @import("../../controllers.zig").helpers;
const QueryArgs = api.Community.QueryArgs;
const QueryArgs = api.CommunityQueryArgs;
pub const create = struct {
pub const method = .POST;
@ -25,9 +25,9 @@ pub const query = struct {
pub const path = "/communities";
pub const Query = struct {
const OrderBy = api.Community.QueryArgs.OrderBy;
const Direction = api.Community.QueryArgs.Direction;
const PageDirection = api.Community.QueryArgs.PageDirection;
const OrderBy = api.CommunityQueryArgs.OrderBy;
const Direction = api.CommunityQueryArgs.Direction;
const PageDirection = api.CommunityQueryArgs.PageDirection;
// Max items to fetch
max_items: usize = 20,
@ -80,7 +80,7 @@ pub const query = struct {
});
const convert = struct {
fn func(args: api.Community.QueryArgs) Query {
fn func(args: api.CommunityQueryArgs) Query {
return .{
.max_items = args.max_items,
.owner_id = args.owner_id,

View file

@ -50,7 +50,7 @@ pub const update_profile = struct {
id: util.Uuid,
};
pub const Body = api.Actor.ProfileUpdateArgs;
pub const Body = api.PartialUserProfile;
// TODO: I don't like that the request body dn response body are substantially different
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {

View file

@ -61,7 +61,7 @@ const index = struct {
pub const method = .GET;
pub fn handler(_: anytype, res: anytype, srv: anytype) !void {
if (srv.context.userId() == null) {
if (srv.user_id == null) {
try res.headers.put("Location", about.path);
return res.status(.see_other);
}
@ -96,7 +96,7 @@ const login = struct {
try res.headers.put("Location", index.path);
var buf: [64]u8 = undefined;
const cookie_name = try std.fmt.bufPrint(&buf, "token.{s}", .{req.body.username});
try res.headers.setCookie(cookie_name, token.value, .{});
try res.headers.setCookie(cookie_name, token.token, .{});
try res.headers.setCookie("active_account", req.body.username, .{ .HttpOnly = false });
try res.status(.see_other);
@ -169,7 +169,7 @@ const signup = struct {
error.UsernameEmpty => "Username cannot be empty",
error.UsernameContainsInvalidChar => "Username must be composed of alphanumeric characters and underscore",
error.UsernameTooLong => "Username too long",
//error.PasswordTooShort => "Password too short, must be at least 12 chars",
error.PasswordTooShort => "Password too short, must be at least 12 chars",
error.UsernameTaken => blk: {
status = .unprocessable_entity;
@ -190,7 +190,7 @@ const signup = struct {
try res.headers.put("Location", index.path);
var buf: [64]u8 = undefined;
const cookie_name = try std.fmt.bufPrint(&buf, "token.{s}", .{req.body.username});
try res.headers.setCookie(cookie_name, token.value, .{});
try res.headers.setCookie(cookie_name, token.token, .{});
try res.headers.setCookie("active_account", req.body.username, .{ .HttpOnly = false });
try res.status(.see_other);
@ -208,6 +208,7 @@ const global_timeline = struct {
try res.template(.ok, srv, @embedFile("./web/timelines/global.tmpl.html"), .{
.notes = timeline.items,
.community = srv.community,
});
}
};
@ -369,6 +370,7 @@ const cluster = struct {
pub fn handler(_: anytype, res: anytype, srv: anytype) !void {
const meta = try srv.getClusterMeta();
try res.template(.ok, srv, @embedFile("./web/cluster/overview.tmpl.html"), .{
.community = srv.community,
.meta = meta,
});
}

View file

@ -11,7 +11,7 @@
<nav>
<h1 class="title"><a href="/about">fediglam</a></h1>
<ul>
{#if %user |$_|}
{#if %user_id |$_|}
<li><a class="button" href="/timelines/global">Timeline</a></li>
{#else}
<li><a class="button" href="/about">Sign in</a></li>

View file

@ -1,5 +1,5 @@
<header>
<h2>{ %community.name }</h2>
<h2>{ .community.name }</h2>
</header>
<nav>
<ul>