Compare commits

..

22 commits

Author SHA1 Message Date
a1a93a7466 Add message after server startup
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-01-08 15:36:11 -08:00
3a52aad023 Add tests for registration api call 2023-01-08 15:35:58 -08:00
2571043580 Fix dependencies for unit tests 2023-01-08 15:35:13 -08:00
da558ac40e Add test plug for DateTime.now 2023-01-08 15:34:54 -08:00
303bce771d Fix compile errors 2023-01-04 11:25:08 -08:00
b58266bdd8 Move api code into methods namespace 2023-01-04 11:03:23 -08:00
39565bccf0 fuck 2023-01-02 17:21:14 -08:00
9774f214f3 add stubs to services.zig 2023-01-02 17:17:42 -08:00
494d317ac1 refactor db layer 2023-01-02 12:38:42 -08:00
cc4badae21 refactor
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2023-01-01 23:34:30 -08:00
41ce5f3001 Remove services/auth.zig 2023-01-01 17:32:17 -08:00
df9354d91f Put registration logic in methods 2023-01-01 17:18:05 -08:00
58bc1af969 Move login behavior into methods 2023-01-01 16:28:36 -08:00
ecd844ddd1 Move login to methods/auth.zig 2023-01-01 16:14:04 -08:00
ec4e99c41e Update templates to use new context 2023-01-01 16:01:58 -08:00
6f30696d30 Create API Context 2023-01-01 15:58:17 -08:00
62d47d9d2f start moving api methods 2022-12-26 08:05:26 -08:00
f52b82b506 Move types into shared file 2022-12-21 07:19:13 -08:00
d8e4d6c82b comptime dependency injection for ApiConn 2022-12-21 00:57:36 -08:00
b0db514adc Make helper functions public 2022-12-19 05:42:26 -08:00
7f689c7030 parseQuotedString 2022-12-19 05:41:35 -08:00
4c661672c2 parseToken 2022-12-19 04:31:18 -08:00
39 changed files with 2604 additions and 1416 deletions

View file

@ -99,10 +99,16 @@ pub fn build(b: *std.build.Builder) !void {
exe.addSystemIncludePath("/usr/include/");
exe.addSystemIncludePath("/usr/include/postgresql"); // HACK
const unittest_options = b.addOptions();
unittest_options.addOption(bool, "enable_sqlite", false);
unittest_options.addOption(bool, "enable_postgres", false);
const unittest_pkgs = makePkgs(b, unittest_options.getPackage("build_options"));
const unittest_http_cmd = b.step("unit:http", "Run tests for http package");
const unittest_http = b.addTest("src/http/lib.zig");
unittest_http_cmd.dependOn(&unittest_http.step);
unittest_http.addPackage(pkgs.util);
unittest_http.addPackage(unittest_pkgs.util);
const unittest_util_cmd = b.step("unit:util", "Run tests for util package");
const unittest_util = b.addTest("src/util/lib.zig");
@ -111,12 +117,19 @@ pub fn build(b: *std.build.Builder) !void {
const unittest_sql_cmd = b.step("unit:sql", "Run tests for sql package");
const unittest_sql = b.addTest("src/sql/lib.zig");
unittest_sql_cmd.dependOn(&unittest_sql.step);
unittest_sql.addPackage(pkgs.util);
unittest_sql.addPackage(unittest_pkgs.util);
//unittest_sql.linkLibC();
const unittest_template_cmd = b.step("unit:template", "Run tests for template package");
const unittest_template = b.addTest("src/template/lib.zig");
unittest_template_cmd.dependOn(&unittest_template.step);
//unittest_template.addPackage(pkgs.util);
const unittest_api_cmd = b.step("unit:api", "Run tests for api package");
const unittest_api = b.addTest("src/api/lib.zig");
unittest_api_cmd.dependOn(&unittest_api.step);
unittest_api.addPackage(unittest_pkgs.util);
unittest_api.addPackage(unittest_pkgs.sql);
//unittest_api.linkLibC();
//const util_tests = b.addTest("src/util/lib.zig");
//const sql_tests = b.addTest("src/sql/lib.zig");
@ -129,6 +142,7 @@ pub fn build(b: *std.build.Builder) !void {
unittest_all.dependOn(unittest_util_cmd);
unittest_all.dependOn(unittest_sql_cmd);
unittest_all.dependOn(unittest_template_cmd);
unittest_all.dependOn(unittest_api_cmd);
const api_integration = b.addTest("./tests/api_integration/lib.zig");
api_integration.addPackage(pkgs.opts);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,79 @@
const std = @import("std");
const util = @import("util");
const services = @import("../services.zig");
const pkg = @import("../lib.zig");
const Uuid = util.Uuid;
const ApiContext = pkg.ApiContext;
const default_avatar = "static/default_avi.png";
pub fn get(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
id: Uuid,
) !pkg.Actor {
const actor = try svcs.getActor(alloc, id);
errdefer util.deepFree(alloc, actor);
if (!Uuid.eql(actor.community_id, ctx.community.id) and ctx.userId() == null) {
return error.NotFound;
}
const avatar_url = if (actor.avatar_file_id) |fid|
try std.fmt.allocPrint(
alloc,
"{s}://{s}/media/{}",
.{ @tagName(ctx.community.scheme), ctx.community.host, fid },
)
else
try std.fmt.allocPrint(
alloc,
"{s}://{s}/{s}",
.{ @tagName(ctx.community.scheme), ctx.community.host, default_avatar },
);
errdefer alloc.free(avatar_url);
const header_url = if (actor.header_file_id) |fid|
try std.fmt.allocPrint(
alloc,
"{s}://{s}/media/{}",
.{ @tagName(ctx.community.scheme), ctx.community.host, fid },
)
else
null;
errdefer alloc.free(header_url);
return pkg.Actor{
.id = actor.id,
.username = actor.username,
.host = actor.host,
.display_name = actor.display_name,
.bio = actor.bio,
.avatar_file_id = actor.avatar_file_id,
.avatar_url = avatar_url,
.header_file_id = actor.header_file_id,
.header_url = header_url,
.profile_fields = actor.profile_fields,
.community_id = actor.community_id,
.created_at = actor.created_at,
.updated_at = actor.updated_at,
};
}
pub fn updateProfile(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
id: Uuid,
data: pkg.actors.ProfileUpdateArgs,
) !void {
if (!Uuid.eql(id, ctx.userId() orelse return error.NoToken)) return error.AccessDenied;
try svcs.updateActorProfile(alloc, id, data);
}

437
src/api/methods/auth.zig Normal file
View file

@ -0,0 +1,437 @@
const std = @import("std");
const util = @import("util");
const pkg = @import("../lib.zig");
const services = @import("../services.zig");
const invites = @import("./invites.zig");
const Allocator = std.mem.Allocator;
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const ApiContext = pkg.ApiContext;
const Token = pkg.tokens.Token;
const RegistrationOptions = pkg.auth.RegistrationOptions;
const Invite = services.invites.Invite;
pub fn register(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
opt: RegistrationOptions,
) !Uuid {
const tx = try svcs.beginTx();
errdefer tx.rollbackTx();
const maybe_invite = if (opt.invite_code) |code|
tx.getInviteByCode(alloc, code, ctx.community.id) catch |err| switch (err) {
error.NotFound => return error.InvalidInvite,
else => |e| return e,
}
else
null;
defer if (maybe_invite) |inv| util.deepFree(alloc, inv);
if (maybe_invite) |invite| {
if (!Uuid.eql(invite.community_id, ctx.community.id)) return error.WrongCommunity;
if (!invites.isValid(invite)) return error.InvalidInvite;
}
const invite_kind = if (maybe_invite) |inv| inv.kind else .user;
if (ctx.community.kind == .admin) @panic("Unimplmented");
const account_id = try createLocalAccount(
alloc,
tx,
.{
.username = opt.username,
.password = opt.password,
.community_id = ctx.community.id,
.invite_id = if (maybe_invite) |inv| @as(?Uuid, inv.id) else null,
.email = opt.email,
},
);
switch (invite_kind) {
.user => {},
.system => @panic("System user invites unimplemented"),
.community_owner => {
try tx.transferCommunityOwnership(ctx.community.id, account_id);
},
}
try tx.commitTx();
return account_id;
}
pub const AccountCreateArgs = struct {
username: []const u8,
password: []const u8,
community_id: Uuid,
invite_id: ?Uuid = null,
email: ?[]const u8 = null,
role: services.accounts.Role = .user,
};
pub fn createLocalAccount(
alloc: std.mem.Allocator,
svcs: anytype,
args: AccountCreateArgs,
) !Uuid {
const tx = try svcs.beginTx();
errdefer tx.rollbackTx();
const hash = try hashPassword(args.password, alloc);
defer alloc.free(hash);
const id = try tx.createActor(alloc, args.username, args.community_id, false);
try tx.createAccount(alloc, .{
.for_actor = id,
.password_hash = hash,
.invite_id = args.invite_id,
.email = args.email,
.role = args.role,
});
try tx.commitTx();
return id;
}
pub fn verifyToken(alloc: std.mem.Allocator, ctx: ApiContext, svcs: anytype, token: []const u8) !Token.Info {
const hash = try hashToken(token, alloc);
defer alloc.free(hash);
const info = try svcs.getTokenByHash(alloc, hash, ctx.community.id);
defer util.deepFree(alloc, info);
return .{ .account_id = info.account_id, .issued_at = info.issued_at };
}
pub fn login(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
username: []const u8,
password: []const u8,
) !Token {
const community_id = ctx.community.id;
const credentials = try svcs.getCredentialsByUsername(
alloc,
username,
community_id,
);
defer util.deepFree(alloc, credentials);
try verifyPassword(credentials.password_hash, password, alloc);
const token = try generateToken(alloc);
errdefer util.deepFree(alloc, token);
const token_hash = hashToken(token, alloc) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => unreachable,
};
defer util.deepFree(alloc, token_hash);
const tx = try svcs.beginTx();
errdefer tx.rollbackTx();
// ensure that the password has not changed in the meantime
{
const updated_info = try tx.getCredentialsByUsername(
alloc,
username,
community_id,
);
defer util.deepFree(alloc, updated_info);
if (!std.mem.eql(u8, credentials.password_hash, updated_info.password_hash)) return error.InvalidLogin;
}
try tx.createToken(alloc, credentials.account_id, token_hash);
try tx.commitTx();
const info = try svcs.getTokenByHash(alloc, token_hash, community_id);
defer util.deepFree(alloc, info);
return .{
.value = token,
.info = .{
.account_id = info.account_id,
.issued_at = info.issued_at,
},
};
}
// We use scrypt, a password hashing algorithm that attempts to slow down
// GPU-based cracking approaches by using large amounts of memory, for
// password hashing.
// Attempting to calculate/verify a hash will use about 50mb of work space.
const scrypt = std.crypto.pwhash.scrypt;
const max_password_hash_len = 128;
fn verifyPassword(
hash: []const u8,
password: []const u8,
alloc: std.mem.Allocator,
) !void {
scrypt.strVerify(
hash,
password,
.{ .allocator = alloc },
) catch |err| return switch (err) {
error.PasswordVerificationFailed => return error.InvalidLogin,
error.OutOfMemory => return error.OutOfMemory,
else => |e| return e,
};
}
const scrypt_params = if (!@import("builtin").is_test)
scrypt.Params.interactive
else
scrypt.Params{
.ln = 8,
.r = 8,
.p = 1,
};
fn hashPassword(password: []const u8, alloc: std.mem.Allocator) ![]const u8 {
var buf: [max_password_hash_len]u8 = undefined;
const hash = try scrypt.strHash(
password,
.{
.allocator = alloc,
.params = scrypt_params,
.encoding = .phc,
},
&buf,
);
return util.deepClone(alloc, hash);
}
/// A raw token is a sequence of N random bytes, base64 encoded.
/// When the token is generated:
/// - The hash of the token is calculated by:
/// 1. Decoding the base64 text
/// 2. Calculating the SHA256 hash of this text
/// 3. Encoding the hash back as base64
/// - The b64 encoded hash is stored in the database
/// - The original token is returned to the user
/// * The user will treat it as opaque text
/// When the token is verified:
/// - The hash of the token is taken as shown above
/// - The database is scanned for a token matching this hash
/// - If none can be found, the token is invalid
const Sha256 = std.crypto.hash.sha2.Sha256;
const Base64Encoder = std.base64.standard.Encoder;
const Base64Decoder = std.base64.standard.Decoder;
const token_len = 12;
fn generateToken(alloc: std.mem.Allocator) ![]const u8 {
var token = std.mem.zeroes([token_len]u8);
std.crypto.random.bytes(&token);
const token_b64_len = Base64Encoder.calcSize(token.len);
const token_b64 = try alloc.alloc(u8, token_b64_len);
return Base64Encoder.encode(token_b64, &token);
}
fn hashToken(token_b64: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const decoded_token_len = Base64Decoder.calcSizeForSlice(token_b64) catch return error.InvalidToken;
if (decoded_token_len != token_len) return error.InvalidToken;
var token = std.mem.zeroes([token_len]u8);
Base64Decoder.decode(&token, token_b64) catch return error.InvalidToken;
var hash = std.mem.zeroes([Sha256.digest_length]u8);
Sha256.hash(&token, &hash, .{});
const hash_b64_len = Base64Encoder.calcSize(hash.len);
const hash_b64 = try alloc.alloc(u8, hash_b64_len);
return Base64Encoder.encode(hash_b64, &hash);
}
test "register" {
const testCase = struct {
const test_invite_code = "xyz";
const test_invite_id = Uuid.parse("d24e7f2a-7e6e-4e2a-8e9d-987538a04a40") catch unreachable;
const test_acc_id = Uuid.parse("e8e21e1d-7b80-4e48-876d-9929326af511") catch unreachable;
const test_community_id = Uuid.parse("8bf88bd7-fb07-492d-a89a-6350c036183f") catch unreachable;
const Args = struct {
username: []const u8 = "username",
password: []const u8 = "password1234",
use_invite: bool = false,
invite_community_id: Uuid = test_community_id,
invite_kind: services.invites.Kind = .user,
invite_max_uses: ?usize = null,
invite_current_uses: usize = 0,
invite_expires_at: ?DateTime = null,
get_invite_error: ?anyerror = null,
create_account_error: ?anyerror = null,
create_actor_error: ?anyerror = null,
transfer_error: ?anyerror = null,
expect_error: ?anyerror = null,
expect_transferred: bool = false,
};
fn runCaseOnce(allocator: std.mem.Allocator, test_args: Args) anyerror!void {
const Svc = struct {
test_args: Args,
tx_level: usize = 0,
rolled_back: bool = false,
committed: bool = false,
account_created: bool = false,
actor_created: bool = false,
community_transferred: bool = false,
fn beginTx(self: *@This()) !*@This() {
self.tx_level += 1;
return self;
}
fn rollbackTx(self: *@This()) void {
self.tx_level -= 1;
self.rolled_back = true;
}
fn commitTx(self: *@This()) !void {
self.tx_level -= 1;
self.committed = true;
}
fn getInviteByCode(self: *@This(), alloc: Allocator, code: []const u8, community_id: Uuid) anyerror!services.invites.Invite {
try std.testing.expect(self.tx_level > 0);
try std.testing.expectEqualStrings(test_invite_code, code);
try std.testing.expectEqual(test_community_id, community_id);
if (self.test_args.get_invite_error) |err| return err;
return try util.deepClone(alloc, std.mem.zeroInit(services.invites.Invite, .{
.id = test_invite_id,
.community_id = self.test_args.invite_community_id,
.code = code,
.kind = self.test_args.invite_kind,
.times_used = self.test_args.invite_current_uses,
.max_uses = self.test_args.invite_max_uses,
.expires_at = self.test_args.invite_expires_at,
}));
}
fn createActor(self: *@This(), _: Allocator, username: []const u8, community_id: Uuid, _: bool) anyerror!Uuid {
try std.testing.expect(self.tx_level > 0);
if (self.test_args.create_actor_error) |err| return err;
try std.testing.expectEqualStrings(self.test_args.username, username);
try std.testing.expectEqual(test_community_id, community_id);
self.actor_created = true;
return test_acc_id;
}
fn createAccount(self: *@This(), alloc: Allocator, args: services.accounts.CreateArgs) anyerror!void {
try std.testing.expect(self.tx_level > 0);
if (self.test_args.create_account_error) |err| return err;
try verifyPassword(args.password_hash, self.test_args.password, alloc);
if (self.test_args.use_invite)
try std.testing.expectEqual(@as(?Uuid, test_invite_id), args.invite_id)
else
try std.testing.expect(args.invite_id == null);
try std.testing.expectEqual(services.accounts.Role.user, args.role);
self.account_created = true;
}
fn transferCommunityOwnership(self: *@This(), community_id: Uuid, account_id: Uuid) !void {
try std.testing.expect(self.tx_level > 0);
if (self.test_args.transfer_error) |err| return err;
self.community_transferred = true;
try std.testing.expectEqual(test_community_id, community_id);
try std.testing.expectEqual(test_acc_id, account_id);
}
};
var svc = Svc{ .test_args = test_args };
const community = std.mem.zeroInit(pkg.Community, .{ .kind = .local, .id = test_community_id });
const result = register(
allocator,
.{ .community = community },
&svc,
.{
.username = test_args.username,
.password = test_args.password,
.invite_code = if (test_args.use_invite) test_invite_code else null,
},
// shortcut out of memory errors to test allocation
) catch |err| if (err == error.OutOfMemory) return err else err;
if (test_args.expect_error) |err| {
try std.testing.expectError(err, result);
try std.testing.expect(!svc.committed);
if (svc.account_created or svc.actor_created or svc.community_transferred) {
try std.testing.expect(svc.rolled_back);
}
} else {
try std.testing.expectEqual(test_acc_id, try result);
try std.testing.expect(svc.committed);
try std.testing.expect(!svc.rolled_back);
try std.testing.expect(svc.account_created);
try std.testing.expect(svc.actor_created);
try std.testing.expectEqual(test_args.expect_transferred, svc.community_transferred);
}
}
fn case(args: Args) !void {
try std.testing.checkAllAllocationFailures(std.testing.allocator, runCaseOnce, .{args});
}
}.case;
// regular registration
try testCase(.{});
// registration with invite
try testCase(.{ .use_invite = true });
// registration with invite for a different community
try testCase(.{
.invite_community_id = Uuid.parse("11111111-1111-1111-1111-111111111111") catch unreachable,
.use_invite = true,
.expect_error = error.WrongCommunity,
});
// registration as a new community owner
try testCase(.{
.use_invite = true,
.invite_kind = .community_owner,
.expect_transferred = true,
});
// invite with expiration info
try testCase(.{
.use_invite = true,
.invite_max_uses = 100,
.invite_current_uses = 10,
.invite_expires_at = DateTime{ .seconds_since_epoch = DateTime.test_now_timestamp + 3600 },
});
// missing invite
try testCase(.{
.use_invite = true,
.get_invite_error = error.NotFound,
.expect_error = error.InvalidInvite,
});
// expired invite
try testCase(.{
.use_invite = true,
.invite_expires_at = DateTime{ .seconds_since_epoch = DateTime.test_now_timestamp - 3600 },
.expect_error = error.InvalidInvite,
});
// used invite
try testCase(.{
.use_invite = true,
.invite_max_uses = 100,
.invite_current_uses = 110,
.expect_error = error.InvalidInvite,
});
}

View file

@ -0,0 +1,48 @@
const std = @import("std");
const util = @import("util");
const types = @import("../types.zig");
const pkg = @import("../lib.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const ApiContext = pkg.ApiContext;
const Community = types.communities.Community;
const QueryArgs = types.communities.QueryArgs;
const QueryResult = types.communities.QueryResult;
pub fn create(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
origin: []const u8,
name: ?[]const u8,
) !Uuid {
if (!ctx.isAdmin()) {
return error.PermissionDenied;
}
return try svcs.createCommunity(
alloc,
origin,
.{ .name = name },
);
}
pub fn get(
alloc: std.mem.Allocator,
_: ApiContext,
svcs: anytype,
id: Uuid,
) !Community {
return try svcs.getCommunity(alloc, id);
}
pub fn query(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
args: QueryArgs,
) !QueryResult {
if (!ctx.isAdmin()) return error.PermissionDenied;
return try svcs.queryCommunities(alloc, args);
}

206
src/api/methods/drive.zig Normal file
View file

@ -0,0 +1,206 @@
const std = @import("std");
const util = @import("util");
const pkg = @import("../lib.zig");
const services = @import("../services.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const ApiContext = pkg.ApiContext;
const DriveEntry = pkg.drive.DriveEntry;
pub fn upload(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
args: pkg.drive.UploadArgs,
body: []const u8,
) !Uuid {
const owner = ctx.userId() orelse return error.NoToken;
const file_id = try svcs.createFile(alloc, owner, .{
.filename = args.filename,
.description = args.description,
.content_type = args.content_type,
.sensitive = args.sensitive,
}, body);
const entry_id = entry: {
errdefer svcs.deleteFile(alloc, file_id) catch |err| {
std.log.err("Unable to delete file {}: {}", .{ file_id, err });
};
break :entry svcs.createDriveEntry(
alloc,
owner,
args.dir,
args.filename,
file_id,
) catch |err| switch (err) {
error.PathAlreadyExists => {
var buf: [256]u8 = undefined;
var split = std.mem.splitBackwards(u8, args.filename, ".");
const ext = split.first();
const name = split.rest();
const new_name = try std.fmt.bufPrint(&buf, "{s}.{s}.{s}", .{ name, file_id, ext });
break :entry try svcs.createDriveEntry(
alloc,
owner,
args.dir,
new_name,
file_id,
);
},
else => |e| return e,
};
};
return entry_id;
}
pub fn mkdir(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
parent_path: []const u8,
name: []const u8,
) !Uuid {
const user_id = ctx.userId() orelse return error.NoToken;
return try svcs.createDriveEntry(alloc, user_id, parent_path, name, null);
}
pub fn delete(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
path: []const u8,
) !void {
const user_id = ctx.userId() orelse return error.NoToken;
const entry = try svcs.statDriveEntry(alloc, user_id, path);
defer util.deepFree(alloc, entry);
try svcs.deleteDriveEntry(alloc, entry.id);
if (entry.file_id) |file_id| try svcs.deleteFile(alloc, file_id);
}
pub fn move(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
src: []const u8,
dest: []const u8,
) !void {
const user_id = ctx.userId() orelse return error.NoToken;
try svcs.moveDriveEntry(alloc, user_id, src, dest);
}
pub fn get(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
path: []const u8,
) !pkg.drive.DriveEntry {
const user_id = ctx.userId() orelse return error.NoToken;
const entry = try svcs.statDriveEntry(alloc, user_id, path);
defer util.deepFree(alloc, entry);
return try convert(alloc, svcs, entry, true);
}
pub fn getById(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
id: Uuid,
) !pkg.drive.DriveEntry {
const user_id = ctx.userId() orelse return error.NoToken;
const entry = try svcs.getDriveEntry(alloc, id);
defer util.deepFree(alloc, entry);
if (!Uuid.eql(entry.owner_id, user_id)) return error.NotFound;
return try convert(alloc, svcs, entry, true);
}
// TODO: These next two functions are more about files than drive entries, consider refactor?
pub fn update(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
path: []const u8,
meta: pkg.files.UpdateArgs,
) !void {
const user_id = ctx.userId() orelse return error.NoToken;
const entry = try svcs.statDriveEntry(alloc, user_id, path);
defer util.deepFree(alloc, entry);
try svcs.updateFileMetadata(alloc, entry.file_id orelse return error.NotAFile, meta);
}
pub fn dereference(
alloc: std.mem.Allocator,
_: ApiContext,
svcs: anytype,
file_id: Uuid,
) !pkg.files.DerefResult {
const meta = try svcs.statFile(alloc, file_id);
errdefer util.deepFree(alloc, meta);
return .{
.meta = meta,
.data = try svcs.derefFile(alloc, file_id),
};
}
fn convert(
alloc: std.mem.Allocator,
svcs: anytype,
entry: services.drive.DriveEntry,
recurse: bool,
) !DriveEntry {
if (entry.file_id) |file_id| return .{
.file = .{
.id = entry.id,
.owner_id = entry.owner_id,
.name = entry.name,
.path = entry.path,
.parent_directory_id = entry.parent_directory_id,
.meta = try svcs.statFile(alloc, file_id),
},
} else return .{
.dir = .{
.id = entry.id,
.owner_id = entry.owner_id,
.name = entry.name,
.path = entry.path,
.parent_directory_id = entry.parent_directory_id,
.children = blk: {
if (!recurse) break :blk null;
const children = try svcs.listDriveEntry(alloc, entry.id);
const result = alloc.alloc(DriveEntry, children.len) catch |err| {
util.deepFree(alloc, children);
return err;
};
var count: usize = 0;
errdefer for (children) |child, i| {
if (i < count)
util.deepFree(alloc, result[i])
else
util.deepFree(alloc, child);
};
defer alloc.free(children);
errdefer alloc.free(result);
for (children) |child, i| {
result[i] = try convert(alloc, svcs, child, false);
count += 1;
}
break :blk result;
},
},
};
}

View file

@ -0,0 +1,83 @@
const std = @import("std");
const util = @import("util");
const pkg = @import("../lib.zig");
const services = @import("../services.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const ApiContext = pkg.ApiContext;
const QueryArgs = services.follows.QueryArgs;
const FollowerQueryArgs = pkg.follows.FollowerQueryArgs;
const FollowingQueryArgs = pkg.follows.FollowingQueryArgs;
pub fn follow(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
followee: Uuid,
) !void {
const user_id = ctx.userId() orelse return error.NoToken;
try svcs.createFollow(alloc, user_id, followee);
}
pub fn unfollow(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
followee: Uuid,
) !void {
const user_id = ctx.userId() orelse return error.NoToken;
try svcs.deleteFollow(alloc, user_id, followee);
}
pub fn queryFollowers(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
of: Uuid,
args: FollowerQueryArgs,
) !pkg.follows.FollowerQueryResult {
const user = try svcs.getActor(alloc, of);
defer util.deepFree(alloc, user);
if (!Uuid.eql(user.community_id, ctx.community.id) and ctx.userId() == null) return error.NotFound;
var all_args = std.mem.zeroInit(QueryArgs, args);
all_args.followee_id = of;
const result = try svcs.queryFollows(alloc, all_args);
return .{
.items = result.items,
.prev_page = convert(FollowerQueryArgs, result.prev_page),
.next_page = convert(FollowerQueryArgs, result.next_page),
};
}
pub fn queryFollowing(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
of: Uuid,
args: FollowingQueryArgs,
) !pkg.follows.FollowingQueryResult {
const user = try svcs.getActor(alloc, of);
defer util.deepFree(alloc, user);
if (!Uuid.eql(user.community_id, ctx.community.id) and ctx.userId() == null) return error.NotFound;
var all_args = std.mem.zeroInit(QueryArgs, args);
all_args.followed_by_id = of;
const result = try svcs.queryFollows(alloc, all_args);
return .{
.items = result.items,
.prev_page = convert(FollowingQueryArgs, result.prev_page),
.next_page = convert(FollowingQueryArgs, result.next_page),
};
}
fn convert(comptime T: type, args: QueryArgs) T {
return .{
.max_items = args.max_items,
.order_by = args.order_by,
.direction = args.direction,
.prev = args.prev,
.page_direction = args.page_direction,
};
}

107
src/api/methods/invites.zig Normal file
View file

@ -0,0 +1,107 @@
const std = @import("std");
const util = @import("util");
const services = @import("../services.zig");
const pkg = @import("../lib.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const ApiContext = pkg.ApiContext;
const CreateOptions = pkg.invites.CreateOptions;
const Invite = pkg.invites.Invite;
pub fn create(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
options: CreateOptions,
) !Uuid {
// Only logged in users can make invites
const user_id = ctx.userId() orelse return error.TokenRequired;
const community_id = if (options.to_community) |id| blk: {
// Only admins can send invites for other communities
if (!ctx.isAdmin()) return error.PermissionDenied;
break :blk id;
} else ctx.community.id;
// Users can only make user invites
if (options.kind != .user and !ctx.isAdmin()) return error.PermissionDenied;
return try svcs.createInvite(alloc, .{
.created_by = user_id,
.community_id = community_id,
.name = options.name,
.lifespan = options.lifespan,
.max_uses = options.max_uses,
.kind = options.kind,
});
}
pub fn isValid(invite: services.invites.Invite) bool {
if (invite.max_uses != null and invite.times_used >= invite.max_uses.?) return false;
if (invite.expires_at != null and DateTime.now().isAfter(invite.expires_at.?)) return false;
return true;
}
fn getInviteImpl(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
invite: services.invites.Invite,
) !Invite {
errdefer util.deepFree(alloc, invite);
if (!Uuid.eql(invite.community_id, ctx.community.id) and !ctx.isAdmin()) return error.NotFound;
if (!isValid(invite)) return error.NotFound;
const community = try svcs.getCommunity(alloc, invite.community_id);
defer util.deepFree(alloc, community);
const url = try std.fmt.allocPrint(
alloc,
"{s}://{s}/invite/{s}",
.{ @tagName(community.scheme), community.host, invite.code },
);
errdefer util.deepFree(alloc, url);
return Invite{
.id = invite.id,
.created_by = invite.created_by,
.community_id = invite.community_id,
.name = invite.name,
.code = invite.code,
.url = url,
.created_at = invite.created_at,
.times_used = invite.times_used,
.expires_at = invite.expires_at,
.max_uses = invite.max_uses,
.kind = invite.kind,
};
}
pub fn get(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
id: Uuid,
) !Invite {
const invite = try svcs.getInvite(alloc, id);
return getInviteImpl(alloc, ctx, svcs, invite);
}
pub fn getByCode(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
code: []const u8,
) !Invite {
const invite = try svcs.getInviteByCode(alloc, code, ctx.community.id);
return getInviteImpl(alloc, ctx, svcs, invite);
}

38
src/api/methods/notes.zig Normal file
View file

@ -0,0 +1,38 @@
const std = @import("std");
const util = @import("util");
const services = @import("../services.zig");
const pkg = @import("../lib.zig");
const Uuid = util.Uuid;
const ApiContext = pkg.ApiContext;
pub fn create(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
content: []const u8,
) !Uuid {
// You cannot post on admin accounts
if (ctx.community.kind == .admin) return error.WrongCommunity;
// Only authenticated users can post
const user_id = ctx.userId() orelse return error.TokenRequired;
return try svcs.createNote(alloc, user_id, content);
}
pub fn get(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
note_id: Uuid,
) !pkg.Note {
const note = try svcs.getNote(alloc, note_id);
errdefer util.deepFree(alloc, note);
// Only serve community-specific notes on unauthenticated requests
if (ctx.userId() == null) {
if (!Uuid.eql(ctx.community.id, note.author.community_id)) return error.NotFound;
}
return note;
}

View file

@ -0,0 +1,80 @@
const std = @import("std");
const util = @import("util");
const pkg = @import("../lib.zig");
const types = @import("../types.zig");
const ApiContext = pkg.ApiContext;
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const services = @import("../services.zig");
const Note = services.Note;
const QueryArgs = services.notes.QueryArgs;
const TimelineArgs = types.timelines.TimelineArgs;
fn timelineArgs(args: services.notes.QueryArgs) TimelineArgs {
return .{
.max_items = args.max_items,
.created_before = args.created_before,
.created_after = args.created_after,
.prev = args.prev,
.page_direction = args.page_direction,
};
}
pub const TimelineResult = struct {
items: []Note,
prev_page: TimelineArgs,
next_page: TimelineArgs,
};
pub fn globalTimeline(
alloc: std.mem.Allocator,
_: ApiContext,
svcs: anytype,
args: TimelineArgs,
) !TimelineResult {
const all_args = std.mem.zeroInit(QueryArgs, args);
const result = try svcs.queryNotes(alloc, all_args);
return TimelineResult{
.items = result.items,
.prev_page = timelineArgs(result.prev_page),
.next_page = timelineArgs(result.next_page),
};
}
pub fn localTimeline(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
args: TimelineArgs,
) !TimelineResult {
var all_args = std.mem.zeroInit(QueryArgs, args);
all_args.community_id = ctx.community.id;
const result = try svcs.queryNotes(alloc, all_args);
return TimelineResult{
.items = result.items,
.prev_page = timelineArgs(result.prev_page),
.next_page = timelineArgs(result.next_page),
};
}
pub fn homeTimeline(
alloc: std.mem.Allocator,
ctx: ApiContext,
svcs: anytype,
args: TimelineArgs,
) !TimelineResult {
if (ctx.userId() == null) return error.NoToken;
var all_args = std.mem.zeroInit(QueryArgs, args);
all_args.followed_by = ctx.userId();
const result = try svcs.queryNotes(alloc, all_args);
return TimelineResult{
.items = result.items,
.prev_page = timelineArgs(result.prev_page),
.next_page = timelineArgs(result.next_page),
};
}

338
src/api/services.zig Normal file
View file

@ -0,0 +1,338 @@
const std = @import("std");
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const impl = struct {
const communities = @import("./services/communities.zig");
const actors = @import("./services/actors.zig");
const drive = @import("./services/drive.zig");
const files = @import("./services/files.zig");
const invites = @import("./services/invites.zig");
const notes = @import("./services/notes.zig");
const follows = @import("./services/follows.zig");
const accounts = @import("./services/accounts.zig");
const tokens = @import("./services/tokens.zig");
};
const types = @import("./services/types.zig");
pub usingnamespace types;
pub const Account = types.accounts.Account;
pub const Credentials = types.accounts.Credentials;
pub const Actor = types.actors.Actor;
pub const Community = types.communities.Community;
pub const DriveEntry = types.drive.DriveEntry;
pub const FileUpload = types.files.FileUpload;
pub const Invite = types.invites.Invite;
pub const Note = types.notes.Note;
pub const Token = types.tokens.Token;
pub fn Services(comptime Db: type) type {
return struct {
const Self = @This();
db: Db,
pub fn beginTx(self: Self) !Services(Db.BeginOrSavepoint) {
return Services(Db.BeginOrSavepoint){
.db = try self.db.beginOrSavepoint(),
};
}
pub fn commitTx(self: Self) !void {
return try self.db.commitOrRelease();
}
pub fn rollbackTx(self: Self) void {
return self.db.rollback();
}
pub fn createAccount(
self: Self,
alloc: std.mem.Allocator,
args: types.accounts.CreateArgs,
) !void {
return try impl.accounts.create(self.db, args, alloc);
}
pub fn getCredentialsByUsername(
self: Self,
alloc: std.mem.Allocator,
username: []const u8,
community_id: Uuid,
) !Credentials {
return try impl.accounts.getCredentialsByUsername(self.db, username, community_id, alloc);
}
pub fn createActor(
self: Self,
alloc: std.mem.Allocator,
username: []const u8,
community_id: Uuid,
lax_username: bool, // TODO: remove this
) !Uuid {
return try impl.actors.create(self.db, username, community_id, lax_username, alloc);
}
pub fn getActor(
self: Self,
alloc: std.mem.Allocator,
user_id: Uuid,
) !Actor {
return try impl.actors.get(self.db, user_id, alloc);
}
pub fn lookupActorByUsername(
self: Self,
alloc: std.mem.Allocator,
username: []const u8,
community_id: Uuid,
) !Actor {
return try impl.actors.lookupByUsername(self.db, username, community_id, alloc);
}
pub fn updateActorProfile(
self: Self,
alloc: std.mem.Allocator,
actor_id: Uuid,
new: types.actors.ProfileUpdateArgs,
) !void {
return try impl.actors.updateProfile(self.db, actor_id, new, alloc);
}
pub fn createCommunity(
self: Self,
alloc: std.mem.Allocator,
origin: []const u8,
options: types.communities.CreateOptions,
) !Uuid {
return try impl.communities.create(self.db, origin, options, alloc);
}
pub fn getCommunity(
self: Self,
alloc: std.mem.Allocator,
id: Uuid,
) !Community {
return try impl.communities.get(self.db, id, alloc);
}
pub fn getCommunityByHost(
self: Self,
alloc: std.mem.Allocator,
host: []const u8,
) !Community {
return try impl.communities.getByHost(self.db, host, alloc);
}
pub fn getAdminCommunityId(self: Self) !Uuid {
return try impl.communities.adminCommunityId(self.db);
}
pub fn transferCommunityOwnership(self: Self, community_id: Uuid, owner_id: Uuid) !void {
return try impl.communities.transferOwnership(self.db, community_id, owner_id);
}
pub fn queryCommunities(
self: Self,
alloc: std.mem.Allocator,
args: types.communities.QueryArgs,
) !types.communities.QueryResult {
return try impl.communities.query(self.db, args, alloc);
}
pub fn statDriveEntry(
self: Self,
alloc: std.mem.Allocator,
owner_id: Uuid,
path: []const u8,
) !DriveEntry {
return try impl.drive.stat(self.db, owner_id, path, alloc);
}
pub fn getDriveEntry(
self: Self,
alloc: std.mem.Allocator,
id: Uuid,
) !DriveEntry {
return try impl.drive.get(self.db, id, alloc);
}
pub fn createDriveEntry(
self: Self,
alloc: std.mem.Allocator,
owner_id: Uuid,
containing_path: []const u8,
name: []const u8,
file_id: ?Uuid,
) !Uuid {
return try impl.drive.create(self.db, owner_id, containing_path, name, file_id, alloc);
}
pub fn deleteDriveEntry(
self: Self,
alloc: std.mem.Allocator,
entry_id: Uuid,
) !void {
return try impl.drive.delete(self.db, entry_id, alloc);
}
pub fn moveDriveEntry(
self: Self,
alloc: std.mem.Allocator,
owner_id: Uuid,
src: []const u8,
dest: []const u8,
) !void {
return try impl.drive.move(self.db, owner_id, src, dest, alloc);
}
// TODO: paginate
pub fn listDriveEntry(
self: Self,
alloc: std.mem.Allocator,
entry_id: Uuid,
) ![]DriveEntry {
return try impl.drive.list(self.db, entry_id, alloc);
}
pub fn createFile(
self: Self,
alloc: std.mem.Allocator,
owner_id: Uuid,
meta: types.files.CreateOptions,
data: []const u8,
) !Uuid {
return try impl.files.create(self.db, owner_id, meta, data, alloc);
}
pub fn deleteFile(
self: Self,
alloc: std.mem.Allocator,
id: Uuid,
) !void {
return try impl.files.delete(self.db, id, alloc);
}
pub fn statFile(
self: Self,
alloc: std.mem.Allocator,
id: Uuid,
) !FileUpload {
return try impl.files.get(self.db, id, alloc);
}
pub fn derefFile(
_: Self,
alloc: std.mem.Allocator,
id: Uuid,
) ![]const u8 {
return try impl.files.deref(alloc, id);
}
pub fn updateFileMetadata(
self: Self,
alloc: std.mem.Allocator,
id: Uuid,
meta: types.files.UpdateArgs,
) !void {
return try impl.files.update(self.db, id, meta, alloc);
}
pub fn createFollow(
self: Self,
alloc: std.mem.Allocator,
followed_by: Uuid,
followee: Uuid,
) !void {
return try impl.follows.create(self.db, followed_by, followee, alloc);
}
pub fn deleteFollow(
self: Self,
alloc: std.mem.Allocator,
followed_by: Uuid,
followee: Uuid,
) !void {
return try impl.follows.delete(self.db, followed_by, followee, alloc);
}
pub fn queryFollows(
self: Self,
alloc: std.mem.Allocator,
args: types.follows.QueryArgs,
) !types.follows.QueryResult {
return try impl.follows.query(self.db, args, alloc);
}
pub fn createInvite(
self: Self,
alloc: std.mem.Allocator,
options: types.invites.CreateOptions,
) !Uuid {
return try impl.invites.create(self.db, options, alloc);
}
pub fn getInvite(
self: Self,
alloc: std.mem.Allocator,
invite_id: Uuid,
) !Invite {
return try impl.invites.get(self.db, invite_id, alloc);
}
pub fn createNote(
self: Self,
alloc: std.mem.Allocator,
author_id: Uuid,
content: []const u8,
) !Uuid {
return try impl.notes.create(self.db, author_id, content, alloc);
}
pub fn getNote(
self: Self,
alloc: std.mem.Allocator,
id: Uuid,
) !Note {
return try impl.notes.get(self.db, id, alloc);
}
pub fn queryNotes(
self: Self,
alloc: std.mem.Allocator,
args: types.notes.QueryArgs,
) !types.notes.QueryResult {
return try impl.notes.query(self.db, args, alloc);
}
pub fn getInviteByCode(
self: Self,
alloc: std.mem.Allocator,
code: []const u8,
community_id: Uuid,
) !Invite {
return try impl.invites.getByCode(self.db, code, community_id, alloc);
}
pub fn createToken(
self: Self,
alloc: std.mem.Allocator,
account_id: Uuid,
hash: []const u8,
) !void {
return try impl.tokens.create(self.db, account_id, hash, alloc);
}
pub fn getTokenByHash(
self: Self,
alloc: std.mem.Allocator,
hash: []const u8,
community_id: Uuid,
) !Token {
return try impl.tokens.getByHash(self.db, hash, community_id, alloc);
}
};
}

View file

@ -0,0 +1,53 @@
const std = @import("std");
const util = @import("util");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const CreateArgs = types.accounts.CreateArgs;
const Credentials = types.accounts.Credentials;
/// Creates a local account with the given information
pub fn create(
db: anytype,
args: CreateArgs,
alloc: std.mem.Allocator,
) !void {
const tx = try db.beginOrSavepoint();
errdefer tx.rollback();
tx.insert("account", .{
.id = args.for_actor,
.invite_id = args.invite_id,
.email = args.email,
.kind = args.role,
}, alloc) catch return error.DatabaseFailure;
tx.insert("password", .{
.account_id = args.for_actor,
.hash = args.password_hash,
.changed_at = DateTime.now(),
}, alloc) catch return error.DatabaseFailure;
tx.commitOrRelease() catch return error.DatabaseFailure;
}
pub fn getCredentialsByUsername(db: anytype, username: []const u8, community_id: Uuid, alloc: std.mem.Allocator) !Credentials {
return db.queryRow(
Credentials,
\\SELECT account.id as account_id, password.hash as password_hash
\\FROM password
\\ JOIN account
\\ JOIN actor
\\ ON password.account_id = account.id AND account.id = actor.id
\\WHERE actor.username = $1
\\ AND actor.community_id = $2
\\LIMIT 1
,
.{ username, community_id },
alloc,
) catch |err| return switch (err) {
error.NoRows => error.InvalidLogin,
else => |e| return e,
};
}

View file

@ -1,13 +1,13 @@
const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const auth = @import("./auth.zig");
const common = @import("./common.zig");
const files = @import("./files.zig");
const types = @import("./types.zig");
const Partial = common.Partial;
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Actor = types.actors.Actor;
pub const CreateError = error{
UsernameTaken,
@ -17,19 +17,6 @@ pub const CreateError = error{
DatabaseFailure,
};
pub const ActorDetailed = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: ProfileField,
created_at: DateTime,
updated_at: DateTime,
};
pub const LookupError = error{
DatabaseFailure,
};
@ -101,35 +88,6 @@ pub fn create(
return id;
}
pub const ProfileField = struct {
key: []const u8,
value: []const u8,
};
pub const Actor = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
community_id: Uuid,
created_at: DateTime,
updated_at: DateTime,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const GetError = error{ NotFound, DatabaseFailure };
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
return db.queryRow(
@ -162,19 +120,11 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Actor {
};
}
pub const PartialProfile = Partial(Profile);
pub const Profile = struct {
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
};
pub const max_fields = 32;
pub const max_display_name_len = 128;
pub const max_bio = 1 << 16;
pub fn updateProfile(db: anytype, id: Uuid, new: PartialProfile, alloc: std.mem.Allocator) !void {
pub fn updateProfile(db: anytype, id: Uuid, new: types.actors.ProfileUpdateArgs, alloc: std.mem.Allocator) !void {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();

View file

@ -1,259 +0,0 @@
const std = @import("std");
const util = @import("util");
const actors = @import("./actors.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const RegistrationError = error{
PasswordTooShort,
DatabaseFailure,
HashFailure,
OutOfMemory,
} || actors.CreateError;
pub const min_password_chars = 12;
pub const Kind = enum {
user,
admin,
};
pub const RegistrationOptions = struct {
invite_id: ?Uuid = null,
email: ?[]const u8 = null,
kind: Kind = .user,
};
/// Creates a local account with the given information and returns the
/// account id
pub fn register(
db: anytype,
username: []const u8,
password: []const u8,
community_id: Uuid,
options: RegistrationOptions,
alloc: std.mem.Allocator,
) RegistrationError!Uuid {
if (password.len < min_password_chars) return error.PasswordTooShort;
// perform pre-validation to avoid having to hash the password if it fails
try actors.validateUsername(username, false);
const hash = try hashPassword(password, alloc);
defer alloc.free(hash);
const tx = db.beginOrSavepoint() catch return error.DatabaseFailure;
errdefer tx.rollback();
const id = try actors.create(tx, username, community_id, false, alloc);
tx.insert("account", .{
.id = id,
.invite_id = options.invite_id,
.email = options.email,
.kind = options.kind,
}, alloc) catch return error.DatabaseFailure;
tx.insert("password", .{
.account_id = id,
.hash = hash,
.changed_at = DateTime.now(),
}, alloc) catch return error.DatabaseFailure;
tx.insert("drive_entry", .{
.id = id,
.owner_id = id,
}, alloc) catch return error.DatabaseFailure;
tx.commitOrRelease() catch return error.DatabaseFailure;
return id;
}
pub const LoginError = error{
InvalidLogin,
HashFailure,
DatabaseFailure,
OutOfMemory,
};
pub const LoginResult = struct {
token: []const u8,
user_id: Uuid,
};
/// Attempts to login to the account `@username@community` and creates
/// a login token/cookie for the user
pub fn login(
db: anytype,
username: []const u8,
community_id: Uuid,
password: []const u8,
alloc: std.mem.Allocator,
) LoginError!LoginResult {
std.log.debug("user: {s}, community_id: {}", .{ username, community_id });
const info = db.queryRow(
struct { account_id: Uuid, hash: []const u8 },
\\SELECT account.id as account_id, password.hash
\\FROM password
\\ JOIN account
\\ JOIN actor
\\ ON password.account_id = account.id AND account.id = actor.id
\\WHERE actor.username = $1
\\ AND actor.community_id = $2
\\LIMIT 1
,
.{ username, community_id },
alloc,
) catch |err| return switch (err) {
error.NoRows => error.InvalidLogin,
else => error.DatabaseFailure,
};
defer alloc.free(info.hash);
try verifyPassword(info.hash, password, alloc);
const token = try generateToken(alloc);
errdefer util.deepFree(alloc, token);
const token_hash = hashToken(token, alloc) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => unreachable,
};
defer util.deepFree(alloc, token_hash);
const tx = db.begin() catch return error.DatabaseFailure;
errdefer tx.rollback();
// ensure that the password has not changed in the meantime
{
const updated_info = tx.queryRow(
struct { hash: []const u8 },
\\SELECT hash
\\FROM password
\\WHERE account_id = $1
\\LIMIT 1
,
.{info.account_id},
alloc,
) catch return error.DatabaseFailure;
defer util.deepFree(alloc, updated_info);
if (!std.mem.eql(u8, info.hash, updated_info.hash)) return error.InvalidLogin;
}
tx.insert("token", .{
.account_id = info.account_id,
.hash = token_hash,
.issued_at = DateTime.now(),
}, alloc) catch return error.DatabaseFailure;
tx.commit() catch return error.DatabaseFailure;
return LoginResult{
.token = token,
.user_id = info.account_id,
};
}
pub const VerifyTokenError = error{ InvalidToken, DatabaseFailure, OutOfMemory };
pub const TokenInfo = struct {
user_id: Uuid,
issued_at: DateTime,
};
pub fn verifyToken(
db: anytype,
token: []const u8,
community_id: Uuid,
alloc: std.mem.Allocator,
) VerifyTokenError!TokenInfo {
const hash = try hashToken(token, alloc);
defer alloc.free(hash);
return db.queryRow(
TokenInfo,
\\SELECT token.account_id as user_id, token.issued_at
\\FROM token
\\ JOIN account
\\ JOIN actor
\\ ON token.account_id = account.id AND account.id = actor.id
\\WHERE token.hash = $1 AND actor.community_id = $2
\\LIMIT 1
,
.{ hash, community_id },
alloc,
) catch |err| switch (err) {
error.NoRows => error.InvalidToken,
else => error.DatabaseFailure,
};
}
// We use scrypt, a password hashing algorithm that attempts to slow down
// GPU-based cracking approaches by using large amounts of memory, for
// password hashing.
// Attempting to calculate/verify a hash will use about 50mb of work space.
const scrypt = std.crypto.pwhash.scrypt;
const password_hash_len = 128;
fn verifyPassword(
hash: []const u8,
password: []const u8,
alloc: std.mem.Allocator,
) LoginError!void {
scrypt.strVerify(
hash,
password,
.{ .allocator = alloc },
) catch |err| return switch (err) {
error.PasswordVerificationFailed => error.InvalidLogin,
else => error.HashFailure,
};
}
fn hashPassword(password: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const buf = try alloc.alloc(u8, password_hash_len);
errdefer alloc.free(buf);
return scrypt.strHash(
password,
.{
.allocator = alloc,
.params = scrypt.Params.interactive,
.encoding = .phc,
},
buf,
) catch error.HashFailure;
}
/// A raw token is a sequence of N random bytes, base64 encoded.
/// When the token is generated:
/// - The hash of the token is calculated by:
/// 1. Decoding the base64 text
/// 2. Calculating the SHA256 hash of this text
/// 3. Encoding the hash back as base64
/// - The b64 encoded hash is stored in the database
/// - The original token is returned to the user
/// * The user will treat it as opaque text
/// When the token is verified:
/// - The hash of the token is taken as shown above
/// - The database is scanned for a token matching this hash
/// - If none can be found, the token is invalid
const Sha256 = std.crypto.hash.sha2.Sha256;
const Base64Encoder = std.base64.standard.Encoder;
const Base64Decoder = std.base64.standard.Decoder;
const token_len = 12;
fn generateToken(alloc: std.mem.Allocator) ![]const u8 {
var token = std.mem.zeroes([token_len]u8);
std.crypto.random.bytes(&token);
const token_b64_len = Base64Encoder.calcSize(token.len);
const token_b64 = try alloc.alloc(u8, token_b64_len);
return Base64Encoder.encode(token_b64, &token);
}
fn hashToken(token_b64: []const u8, alloc: std.mem.Allocator) ![]const u8 {
const decoded_token_len = Base64Decoder.calcSizeForSlice(token_b64) catch return error.InvalidToken;
if (decoded_token_len != token_len) return error.InvalidToken;
var token = std.mem.zeroes([token_len]u8);
Base64Decoder.decode(&token, token_b64) catch return error.InvalidToken;
var hash = std.mem.zeroes([Sha256.digest_length]u8);
Sha256.hash(&token, &hash, .{});
const hash_b64_len = Base64Encoder.calcSize(hash.len);
const hash_b64 = try alloc.alloc(u8, hash_b64_len);
return Base64Encoder.encode(hash_b64, &hash);
}

View file

@ -2,41 +2,16 @@ const std = @import("std");
const builtin = @import("builtin");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const actors = @import("./actors.zig");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Community = struct {
pub const Kind = enum {
admin,
local,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Scheme = enum {
https,
http,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: ?Uuid,
host: []const u8,
name: []const u8,
scheme: Scheme,
kind: Kind,
created_at: DateTime,
};
pub const CreateOptions = struct {
name: ?[]const u8 = null,
kind: Community.Kind = .local,
};
const Community = types.communities.Community;
const Scheme = types.communities.Scheme;
const CreateOptions = types.communities.CreateOptions;
const QueryArgs = types.communities.QueryArgs;
const QueryResult = types.communities.QueryResult;
pub const CreateError = error{
UnsupportedScheme,
@ -47,7 +22,7 @@ pub const CreateError = error{
pub fn create(db: anytype, origin: []const u8, options: CreateOptions, alloc: std.mem.Allocator) CreateError!Uuid {
const scheme_len = std.mem.indexOfScalar(u8, origin, ':') orelse return error.InvalidOrigin;
const scheme_str = origin[0..scheme_len];
const scheme = std.meta.stringToEnum(Community.Scheme, scheme_str) orelse return error.UnsupportedScheme;
const scheme = std.meta.stringToEnum(Scheme, scheme_str) orelse return error.UnsupportedScheme;
// host must be in the format "{scheme}://{host}"
if (origin.len <= scheme_len + ("://").len or
@ -165,61 +140,6 @@ pub fn transferOwnership(db: anytype, community_id: Uuid, new_owner: Uuid) !void
) catch return error.DatabaseFailure;
}
pub const QueryArgs = struct {
pub const OrderBy = enum {
name,
host,
created_at,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(QueryArgs, .prev).field_type);
pub const OrderVal = std.meta.fieldInfo(Prev, .order_val).field_type;
// Max items to fetch
max_items: usize = 20,
// Selection filters
owner_id: ?Uuid = null, // searches for communities owned by this user
like: ?[]const u8 = null, // searches for communities with host or name LIKE '%?%'
created_before: ?DateTime = null,
created_after: ?DateTime = null,
// Ordering parameter
order_by: OrderBy = .created_at,
direction: Direction = .ascending,
// Page start parameter
// This struct is a reference to the last value scanned
// If prev is present, then prev.order_val must have the same tag as order_by
// "prev" here refers to it being the previous value returned. It may be that
// prev refers to the item directly after the results you are about to recieve,
// if you are querying the previous page.
prev: ?struct {
id: Uuid,
order_val: union(OrderBy) {
name: []const u8,
host: []const u8,
created_at: DateTime,
},
} = null,
// What direction to scan the page window
// If "forward", then "prev" is interpreted as the item directly before the items
// to query, in the direction of "direction" above. If "backward", then the opposite
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []const Community,
prev_page: QueryArgs,
next_page: QueryArgs,
};
const max_max_items = 100;
pub const QueryError = error{

View file

@ -1,48 +1,46 @@
const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Entry = types.drive.DriveEntry;
pub const DriveOwner = union(enum) {
user_id: Uuid,
community_id: Uuid,
};
pub const Entry = struct {
id: Uuid,
owner_id: Uuid,
name: ?[]const u8,
path: []const u8,
parent_directory_id: ?Uuid,
file_id: ?Uuid,
kind: Kind,
};
pub const Kind = enum {
dir,
file,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub fn stat(db: anytype, owner: Uuid, path: []const u8, alloc: std.mem.Allocator) !Entry {
return (db.queryRow(Entry,
fn doGetQuery(db: anytype, comptime clause: []const u8, args: anytype, alloc: std.mem.Allocator) !Entry {
const q = std.fmt.comptimePrint(
\\SELECT id, path, owner_id, name, file_id, kind, parent_directory_id
\\FROM drive_entry_path
\\WHERE owner_id = $1 AND path = ('/' || $2)
\\WHERE {s}
\\LIMIT 1
, .{
owner,
std.mem.trim(u8, path, "/"),
}, alloc) catch |err| switch (err) {
,
.{clause},
);
return db.queryRow(Entry, q, args, alloc) catch |err| switch (err) {
error.NoRows => return error.NotFound,
else => |e| return e,
});
};
}
pub fn stat(db: anytype, owner: Uuid, path: []const u8, alloc: std.mem.Allocator) !Entry {
return try doGetQuery(
db,
"owner_id = $1 AND path = ('/' || $2)",
.{
owner,
std.mem.trim(u8, path, "/"),
},
alloc,
);
}
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !Entry {
return try doGetQuery(db, "id = $1", .{id}, alloc);
}
/// Creates a file or directory
pub fn create(db: anytype, owner: Uuid, dir: []const u8, name: []const u8, file_id: ?Uuid, alloc: std.mem.Allocator) !Entry {
pub fn create(db: anytype, owner: Uuid, dir: []const u8, name: []const u8, file_id: ?Uuid, alloc: std.mem.Allocator) !Uuid {
if (name.len == 0) return error.EmptyName;
const id = Uuid.randV4(util.getThreadPrng());
@ -66,18 +64,7 @@ pub fn create(db: anytype, owner: Uuid, dir: []const u8, name: []const u8, file_
try tx.commit();
const path = try std.mem.join(alloc, "/", if (dir.len == 0) &.{ "", name } else &.{ "", dir, name });
errdefer alloc.free(path);
return Entry{
.id = id,
.owner_id = owner,
.name = try util.deepClone(alloc, name),
.path = path,
.parent_directory_id = parent.id,
.file_id = file_id,
.kind = if (file_id) |_| .file else .dir,
};
return id;
}
pub fn delete(db: anytype, id: Uuid, alloc: std.mem.Allocator) !void {

View file

@ -1,41 +1,13 @@
const std = @import("std");
const sql = @import("sql");
const util = @import("util");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const FileStatus = enum {
uploading,
uploaded,
external,
deleted,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const FileUpload = struct {
id: Uuid,
owner_id: Uuid,
size: usize,
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
status: FileStatus,
created_at: DateTime,
updated_at: DateTime,
};
pub const FileMeta = struct {
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
};
const FileUpload = types.files.FileUpload;
const CreateOptions = types.files.CreateOptions;
const UpdateArgs = types.files.UpdateArgs;
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !FileUpload {
return try db.queryRow(
@ -60,26 +32,7 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) !FileUpload {
);
}
pub const PartialMeta = Partial(FileMeta);
pub fn Partial(comptime T: type) type {
const t_fields = std.meta.fields(T);
var fields: [t_fields.len]std.builtin.Type.StructField = undefined;
for (std.meta.fields(T)) |f, i| fields[i] = .{
.name = f.name,
.field_type = ?f.field_type,
.default_value = &@as(?f.field_type, null),
.is_comptime = false,
.alignment = @alignOf(?f.field_type),
};
return @Type(.{ .Struct = .{
.layout = .Auto,
.fields = &fields,
.decls = &.{},
.is_tuple = false,
} });
}
pub fn update(db: anytype, id: Uuid, meta: PartialMeta, alloc: std.mem.Allocator) !void {
pub fn update(db: anytype, id: Uuid, meta: UpdateArgs, alloc: std.mem.Allocator) !void {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();
@ -106,7 +59,7 @@ pub fn update(db: anytype, id: Uuid, meta: PartialMeta, alloc: std.mem.Allocator
}, alloc);
}
pub fn create(db: anytype, owner_id: Uuid, meta: FileMeta, data: []const u8, alloc: std.mem.Allocator) !Uuid {
pub fn create(db: anytype, owner_id: Uuid, meta: CreateOptions, data: []const u8, alloc: std.mem.Allocator) !Uuid {
const id = Uuid.randV4(util.getThreadPrng());
const now = DateTime.now();
try db.insert("file_upload", .{
@ -120,7 +73,7 @@ pub fn create(db: anytype, owner_id: Uuid, meta: FileMeta, data: []const u8, all
.content_type = meta.content_type,
.sensitive = meta.sensitive,
.status = FileStatus.uploading,
.status = FileUpload.Status.uploading,
.created_at = now,
.updated_at = now,

View file

@ -1,20 +1,13 @@
const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Follow = struct {
id: Uuid,
followed_by_id: Uuid,
followee_id: Uuid,
created_at: DateTime,
};
const QueryArgs = types.follows.QueryArgs;
const QueryResult = types.follows.QueryResult;
const Follow = types.follows.Follow;
pub fn create(db: anytype, followed_by_id: Uuid, followee_id: Uuid, alloc: std.mem.Allocator) !void {
if (Uuid.eql(followed_by_id, followee_id)) return error.SelfFollow;
@ -46,41 +39,6 @@ pub fn delete(db: anytype, followed_by_id: Uuid, followee_id: Uuid, alloc: std.m
const max_max_items = 100;
pub const QueryArgs = struct {
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(@This(), .prev).field_type);
pub const OrderBy = enum {
created_at,
};
max_items: usize = 20,
followed_by_id: ?Uuid = null,
followee_id: ?Uuid = null,
order_by: OrderBy = .created_at,
direction: Direction = .descending,
prev: ?struct {
id: Uuid,
order_val: union(OrderBy) {
created_at: DateTime,
},
} = null,
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []Follow,
prev_page: QueryArgs,
next_page: QueryArgs,
};
pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResult {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();

View file

@ -1,9 +1,11 @@
const std = @import("std");
const builtin = @import("builtin");
const util = @import("util");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Invite = types.invites.Invite;
// 9 random bytes = 12 random b64
const rand_len = 8;
@ -12,40 +14,11 @@ const code_len = 12;
const Encoder = std.base64.url_safe.Encoder;
const Decoder = std.base64.url_safe.Decoder;
pub const Kind = enum {
system,
community_owner,
user,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
const InviteCount = usize;
pub const Invite = struct {
id: Uuid,
created_by: Uuid, // User ID
community_id: Uuid,
name: []const u8,
code: []const u8,
created_at: DateTime,
times_used: InviteCount,
expires_at: ?DateTime,
max_uses: ?InviteCount,
kind: Kind,
};
pub const InviteOptions = struct {
name: ?[]const u8 = null,
max_uses: ?InviteCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
};
pub fn create(db: anytype, created_by: Uuid, community_id: ?Uuid, options: InviteOptions, alloc: std.mem.Allocator) !Uuid {
pub fn create(
db: anytype,
options: types.invites.CreateOptions,
alloc: std.mem.Allocator,
) !Uuid {
const id = Uuid.randV4(util.getThreadPrng());
var code_bytes: [rand_len]u8 = undefined;
@ -55,7 +28,6 @@ pub fn create(db: anytype, created_by: Uuid, community_id: ?Uuid, options: Invit
defer alloc.free(code);
_ = Encoder.encode(code, &code_bytes);
const name = options.name orelse code;
const created_at = DateTime.now();
try db.insert(
@ -63,9 +35,9 @@ pub fn create(db: anytype, created_by: Uuid, community_id: ?Uuid, options: Invit
.{
.id = id,
.created_by = created_by,
.community_id = community_id,
.name = name,
.created_by = options.created_by,
.community_id = options.community_id,
.name = options.name,
.code = code,
.max_uses = options.max_uses,

View file

@ -1,29 +1,13 @@
const std = @import("std");
const util = @import("util");
const sql = @import("sql");
const common = @import("./common.zig");
const types = @import("./types.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
pub const Note = struct {
id: Uuid,
author_id: Uuid,
content: []const u8,
created_at: DateTime,
};
pub const NoteDetailed = struct {
id: Uuid,
author: struct {
id: Uuid,
username: []const u8,
},
content: []const u8,
created_at: DateTime,
};
const Note = types.notes.Note;
const QueryArgs = types.notes.QueryArgs;
const QueryResult = types.notes.QueryResult;
pub const CreateError = error{
DatabaseFailure,
@ -58,10 +42,27 @@ const selectStarFromNote = std.fmt.comptimePrint(
pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
return db.queryRow(
Note,
selectStarFromNote ++
\\WHERE id = $1
\\LIMIT 1
,
\\SELECT
\\ note.id,
\\ note.content,
\\ note.created_at,
\\ actor.id AS "author.id",
\\ actor.username AS "author.username",
\\ community.host AS "author.host",
\\ actor.display_name AS "author.display_name",
\\ actor.bio AS "author.bio",
\\ actor.avatar_file_id AS "author.avatar_file_id",
\\ actor.header_file_id AS "author.header_file_id",
\\ actor.profile_fields AS "author.profile_fields",
\\ actor.community_id AS "author.community_id",
\\ actor.created_at AS "author.created_at",
\\ actor.updated_at AS "author.updated_at"
\\FROM note
\\ JOIN actor ON actor.id = note.author_id
\\ JOIN community ON community.id = actor.community_id
\\WHERE id = $1
\\LIMIT 1
,
.{id},
alloc,
) catch |err| switch (err) {
@ -72,40 +73,29 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
const max_max_items = 100;
pub const QueryArgs = struct {
pub const PageDirection = common.PageDirection;
pub const Prev = std.meta.Child(std.meta.fieldInfo(@This(), .prev).field_type);
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
community_id: ?Uuid = null,
followed_by: ?Uuid = null,
prev: ?struct {
id: Uuid,
created_at: DateTime,
} = null,
page_direction: PageDirection = .forward,
};
pub const QueryResult = struct {
items: []NoteDetailed,
prev_page: QueryArgs,
next_page: QueryArgs,
};
pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResult {
var builder = sql.QueryBuilder.init(alloc);
defer builder.deinit();
try builder.appendSlice(
\\SELECT note.id, note.content, note.created_at, actor.id AS "author.id", actor.username AS "author.username"
\\SELECT
\\ note.id,
\\ note.content,
\\ note.created_at,
\\ actor.id AS "author.id",
\\ actor.username AS "author.username",
\\ community.host AS "author.host",
\\ actor.display_name AS "author.display_name",
\\ actor.bio AS "author.bio",
\\ actor.avatar_file_id AS "author.avatar_file_id",
\\ actor.header_file_id AS "author.header_file_id",
\\ actor.profile_fields AS "author.profile_fields",
\\ actor.community_id AS "author.community_id",
\\ actor.created_at AS "author.created_at",
\\ actor.updated_at AS "author.updated_at"
\\FROM note
\\ JOIN actor ON actor.id = note.author_id
\\ JOIN community ON community.id = actor.community_id
\\
);
@ -153,7 +143,7 @@ pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResul
};
const results = try db.queryRowsWithOptions(
NoteDetailed,
Note,
try builder.terminate(),
query_args,
max_items,

View file

@ -0,0 +1,35 @@
const std = @import("std");
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const Token = @import("./types.zig").tokens.Token;
pub fn create(db: anytype, account_id: Uuid, hash: []const u8, alloc: std.mem.Allocator) !void {
const now = DateTime.now();
try db.insert("token", .{
.account_id = account_id,
.hash = hash,
.issued_at = now,
}, alloc);
}
pub fn getByHash(db: anytype, hash: []const u8, community_id: Uuid, alloc: std.mem.Allocator) !Token {
return db.queryRow(
Token,
\\SELECT account_id, issued_at, hash
\\FROM token
\\ JOIN account
\\ JOIN actor
\\ ON token.account_id = account.id AND account.id = actor.id
\\WHERE token.hash = $1 AND actor.community_id = $2
\\LIMIT 1
,
.{ hash, community_id },
alloc,
) catch |err| switch (err) {
error.NoRows => error.InvalidToken,
else => error.DatabaseFailure,
};
}

370
src/api/services/types.zig Normal file
View file

@ -0,0 +1,370 @@
const util = @import("util");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
const common = struct {
const Direction = enum {
ascending,
descending,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
const PageDirection = enum {
forward,
backward,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
fn QueryResult(comptime R: type, comptime A: type) type {
return struct {
items: []R,
next_page: A,
prev_page: A,
};
}
};
pub const accounts = struct {
pub const Role = enum {
user,
admin,
};
pub const CreateArgs = struct {
for_actor: Uuid,
password_hash: []const u8,
invite_id: ?Uuid = null,
email: ?[]const u8 = null,
role: Role = .user,
};
pub const Credentials = struct {
account_id: Uuid,
password_hash: []const u8,
};
};
pub const actors = struct {
pub const Actor = struct {
id: Uuid,
username: []const u8,
host: []const u8,
community_id: Uuid,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
created_at: DateTime,
updated_at: DateTime,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const ProfileField = struct {
key: []const u8,
value: []const u8,
};
// TODO: get rid of this
pub const Profile = struct {
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
header_file_id: ?Uuid,
profile_fields: []const ProfileField,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
pub const ProfileUpdateArgs = struct {
display_name: ??[]const u8,
bio: ?[]const u8,
avatar_file_id: ??Uuid,
header_file_id: ??Uuid,
profile_fields: ?[]const ProfileField,
pub const sql_serialize = struct {
pub const profile_fields = .json;
};
};
};
pub const communities = struct {
pub const Community = struct {
id: Uuid,
owner_id: ?Uuid,
host: []const u8,
name: []const u8,
scheme: Scheme,
kind: Kind,
created_at: DateTime,
};
pub const Kind = enum {
admin,
local,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Scheme = enum {
https,
http,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const CreateOptions = struct {
name: ?[]const u8 = null,
kind: Kind = .local,
};
pub const QueryArgs = struct {
pub const OrderBy = enum {
name,
host,
created_at,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
order_val: OrderVal,
};
pub const OrderVal = union(OrderBy) {
name: []const u8,
host: []const u8,
created_at: DateTime,
};
// Max items to fetch
max_items: usize = 20,
// Selection filters
owner_id: ?Uuid = null, // searches for communities owned by this user
like: ?[]const u8 = null, // searches for communities with host or name LIKE '%?%'
created_before: ?DateTime = null,
created_after: ?DateTime = null,
// Ordering parameter
order_by: OrderBy = .created_at,
direction: Direction = .ascending,
// Page start parameter(s)
// This struct is a reference to the last value scanned
// If prev is present, then prev.order_val must have the same tag as order_by
// "prev" here refers to it being the previous value returned. It may be that
// prev refers to the item directly after the results you are about to recieve,
// if you are querying the previous page.
prev: ?Prev = null,
// What direction to scan the page window
// If "forward", then "prev" is interpreted as the item directly before the items
// to query, in the direction of "direction" above. If "backward", then the opposite
page_direction: PageDirection = .forward,
};
pub const QueryResult = common.QueryResult(Community, QueryArgs);
};
pub const drive = struct {
pub const DriveEntry = struct {
pub const Kind = enum {
dir,
file,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: Uuid,
name: ?[]const u8,
path: []const u8,
parent_directory_id: ?Uuid,
file_id: ?Uuid,
kind: Kind,
};
};
pub const files = struct {
pub const FileUpload = struct {
pub const Status = enum {
uploading,
uploaded,
external,
deleted,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
id: Uuid,
owner_id: Uuid,
size: usize,
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
status: Status,
created_at: DateTime,
updated_at: DateTime,
};
pub const CreateOptions = struct {
filename: []const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: bool,
};
pub const UpdateArgs = struct {
filename: ?[]const u8,
description: ?[]const u8,
content_type: ?[]const u8,
sensitive: ?bool,
};
};
pub const invites = struct {
pub const UseCount = usize;
pub const Invite = struct {
id: Uuid,
created_by: Uuid, // User ID
community_id: Uuid,
name: []const u8,
code: []const u8,
created_at: DateTime,
times_used: UseCount,
expires_at: ?DateTime,
max_uses: ?UseCount,
kind: Kind,
};
pub const Kind = enum {
system,
community_owner,
user,
pub const jsonStringify = util.jsonSerializeEnumAsString;
};
pub const CreateOptions = struct {
created_by: Uuid,
community_id: Uuid,
name: ?[]const u8 = null,
max_uses: ?UseCount = null,
lifespan: ?DateTime.Duration = null,
kind: Kind = .user,
};
};
pub const follows = struct {
pub const Follow = struct {
id: Uuid,
followed_by_id: Uuid,
followee_id: Uuid,
created_at: DateTime,
};
pub const QueryArgs = struct {
pub const OrderBy = enum {
created_at,
};
pub const Direction = common.Direction;
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
order_val: union(OrderBy) {
created_at: DateTime,
},
};
max_items: usize = 20,
followed_by_id: ?Uuid = null,
followee_id: ?Uuid = null,
order_by: OrderBy = .created_at,
direction: Direction = .descending,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
pub const QueryResult = common.QueryResult(Follow, QueryArgs);
};
pub const notes = struct {
pub const Note = struct {
id: Uuid,
author: actors.Actor, // TODO
content: []const u8,
created_at: DateTime,
// TODO: This sucks
pub const sql_serialize = struct {
pub const @"author.profile_fields" = .json;
};
};
pub const QueryArgs = struct {
pub const PageDirection = common.PageDirection;
pub const Prev = struct {
id: Uuid,
created_at: DateTime,
};
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
community_id: ?Uuid = null,
followed_by: ?Uuid = null,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
pub const QueryResult = common.QueryResult(Note, QueryArgs);
};
pub const tokens = struct {
pub const Token = struct {
account_id: Uuid,
issued_at: DateTime,
hash: []const u8,
};
};

201
src/api/types.zig Normal file
View file

@ -0,0 +1,201 @@
const util = @import("util");
const services = @import("./services.zig");
const Uuid = util.Uuid;
const DateTime = util.DateTime;
fn QueryResult(comptime R: type, comptime A: type) type {
return struct {
items: []R,
next_page: A,
prev_page: A,
};
}
pub const auth = struct {
pub const RegistrationOptions = struct {
username: []const u8,
password: []const u8,
invite_code: ?[]const u8 = null,
email: ?[]const u8 = null,
};
};
pub const actors = struct {
pub const Actor = struct {
id: Uuid,
username: []const u8,
host: []const u8,
display_name: ?[]const u8,
bio: []const u8,
avatar_file_id: ?Uuid,
avatar_url: []const u8,
header_file_id: ?Uuid,
header_url: ?[]const u8,
profile_fields: []const ProfileField,
community_id: Uuid,
created_at: DateTime,
updated_at: DateTime,
};
pub const ProfileField = services.actors.ProfileField;
pub const ProfileUpdateArgs = services.actors.ProfileUpdateArgs;
};
pub const communities = struct {
pub const Community = services.communities.Community;
pub const QueryArgs = services.communities.QueryArgs;
pub const QueryResult = services.communities.QueryResult;
};
pub const drive = struct {
pub const DriveEntry = union(enum) {
pub const Kind = services.drive.DriveEntry.Kind;
dir: struct {
id: Uuid,
owner_id: Uuid,
name: ?[]const u8,
path: []const u8,
parent_directory_id: ?Uuid,
kind: Kind = .dir,
// If null = not enumerated
children: ?[]const DriveEntry,
},
file: struct {
id: Uuid,
owner_id: Uuid,
name: ?[]const u8,
path: []const u8,
parent_directory_id: ?Uuid,
kind: Kind = .file,
meta: files.FileUpload,
},
};
pub const UploadArgs = struct {
filename: []const u8,
dir: []const u8,
description: ?[]const u8,
content_type: []const u8,
sensitive: bool,
};
};
pub const files = struct {
pub const FileUpload = services.files.FileUpload;
pub const UpdateArgs = services.files.UpdateArgs;
pub const DerefResult = struct {
meta: FileUpload,
data: []const u8,
};
};
pub const follows = struct {
pub const Follow = services.follows.Follow;
const QueryArgs = struct {
pub const OrderBy = services.follows.QueryArgs.OrderBy;
pub const Direction = services.follows.QueryArgs.Direction;
pub const PageDirection = services.follows.QueryArgs.PageDirection;
pub const Prev = services.follows.QueryArgs.Prev;
max_items: usize = 20,
order_by: OrderBy = .created_at,
direction: Direction = .descending,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
pub const FollowerQueryArgs = QueryArgs;
pub const FollowingQueryArgs = QueryArgs;
pub const FollowerQueryResult = QueryResult(Follow, FollowerQueryArgs);
pub const FollowingQueryResult = QueryResult(Follow, FollowingQueryArgs);
};
pub const invites = struct {
pub const UseCount = services.invites.UseCount;
pub const Invite = struct {
id: Uuid,
created_by: Uuid, // User ID
community_id: Uuid,
name: []const u8,
code: []const u8,
url: []const u8,
created_at: DateTime,
times_used: UseCount,
expires_at: ?DateTime,
max_uses: ?UseCount,
kind: Kind,
};
pub const Kind = services.invites.Kind;
pub const CreateOptions = struct {
name: ?[]const u8 = null,
lifespan: ?DateTime.Duration = null,
max_uses: ?usize = null,
// admin only options
kind: Kind = .user,
to_community: ?Uuid = null,
};
};
pub const notes = struct {
pub const Note = services.notes.Note;
pub const QueryArgs = services.notes.QueryArgs;
};
pub const timelines = struct {
pub const TimelineArgs = struct {
pub const PageDirection = notes.QueryArgs.PageDirection;
pub const Prev = notes.QueryArgs.Prev;
max_items: usize = 20,
created_before: ?DateTime = null,
created_after: ?DateTime = null,
prev: ?Prev = null,
page_direction: PageDirection = .forward,
};
pub const TimelineResult = struct {
items: []notes.Note,
prev_page: TimelineArgs,
next_page: TimelineArgs,
};
};
pub const tokens = struct {
pub const Token = struct {
pub const Info = struct {
account_id: Uuid,
issued_at: DateTime,
};
value: []const u8,
info: Info,
};
};

View file

@ -265,3 +265,207 @@ fn parseEncoding(encoding: ?[]const u8) !Encoding {
if (std.mem.eql(u8, encoding.?, "chunked")) return .chunked;
return error.UnsupportedMediaType;
}
fn isTokenChar(ch: u8) bool {
switch (ch) {
'"', '(', ')', '/', ':', ';', '<', '=', '>', '?', '@', '[', '\\', ']', '{', '}' => return false,
'!', '#', '$', '%', '&', '\'', '*', '+', '-', '.', '^', '_', '`', '|', '~' => return true,
else => return std.ascii.isAlphanumeric(ch),
}
}
// Parses a quoted-string (rfc 9110) off the stream. Backslash-tokens are unescaped.
// The caller takes responsibility for deallocating the memory returned.
pub fn parseQuotedString(alloc: std.mem.Allocator, peek_stream: anytype) ![]const u8 {
const reader = peek_stream.reader();
var data = std.ArrayList(u8).init(alloc);
errdefer data.deinit();
{
const start = try reader.readByte();
if (start != '"') {
try peek_stream.putBackByte(start);
return error.MissingStartQuote;
}
}
while (true) {
const ch = switch (try reader.readByte()) {
'\t', ' ', '!', 0x23...0x5b, 0x5d...0x7e, 0x80...0xff => |c| c,
'\\' => switch (try reader.readByte()) {
'\t', ' ', 0x21...0x7e, 0x80...0xff => |c| c,
else => return error.UnexpectedChar,
},
'"' => break,
else => return error.UnexpectedChar,
};
try data.append(ch);
}
return data.toOwnedSlice();
}
test "parseQuotedString" {
const testCase = struct {
fn func(data: []const u8, stream_error: ?anyerror, expected: anyerror![]const u8, remaining: []const u8) !void {
var fbs = std.io.fixedBufferStream(data);
var stream = errorReader(stream_error orelse error.EndOfStream, fbs.reader());
var peeker = std.io.peekStream(1, stream.reader());
const result = parseQuotedString(std.testing.allocator, &peeker);
defer if (result) |v| std.testing.allocator.free(v) else |_| {};
if (expected) |val|
try std.testing.expectEqualStrings(val, try result)
else |expected_err|
try std.testing.expectError(expected_err, result);
try std.testing.expect(try peeker.reader().isBytes(remaining));
try std.testing.expectError(stream_error orelse error.EndOfStream, peeker.reader().readByte());
}
}.func;
try testCase("\"abcdefg\"", null, "abcdefg", "");
try testCase("\"abcdefg\"abcd", null, "abcdefg", "abcd");
try testCase("\"xyz\\\"z\"", null, "xyz\"z", "");
try testCase("\"xyz\\\\z\"", null, "xyz\\z", "");
try testCase("\"💯\"", null, "💯", "");
try testCase("abcdefg\"abcd", null, error.MissingStartQuote, "abcdefg\"abcd");
try testCase("\"abcdefg", null, error.EndOfStream, "");
try testCase("\"abcdefg", error.ClosedPipe, error.ClosedPipe, "");
}
// Attempts to parse a token (rfc 9110) off the stream. It stops at the first non-token
// char. Said char reamins on the stream. If the token is empty, returns error.EmptyToken;
// The caller takes responsibility for deallocating the memory returned.
pub fn parseToken(alloc: std.mem.Allocator, peek_stream: anytype) ![]const u8 {
var data = std.ArrayList(u8).init(alloc);
errdefer data.deinit();
const reader = peek_stream.reader();
while (reader.readByte()) |ch| {
if (!isTokenChar(ch)) {
try peek_stream.putBackByte(ch);
break;
}
try data.append(ch);
} else |err| if (err != error.EndOfStream) return err;
if (data.items.len == 0) return error.EmptyToken;
return data.toOwnedSlice();
}
test "parseToken" {
const testCase = struct {
fn func(data: []const u8, stream_error: ?anyerror, expected: anyerror![]const u8, remaining: []const u8) !void {
var fbs = std.io.fixedBufferStream(data);
var stream = errorReader(stream_error orelse error.EndOfStream, fbs.reader());
var peeker = std.io.peekStream(1, stream.reader());
const result = parseToken(std.testing.allocator, &peeker);
defer if (result) |v| std.testing.allocator.free(v) else |_| {};
if (expected) |val|
try std.testing.expectEqualStrings(val, try result)
else |expected_err|
try std.testing.expectError(expected_err, result);
try std.testing.expect(try peeker.reader().isBytes(remaining));
try std.testing.expectError(stream_error orelse error.EndOfStream, peeker.reader().readByte());
}
}.func;
try testCase("abcdefg", null, "abcdefg", "");
try testCase("abc defg", null, "abc", " defg");
try testCase("abc;defg", null, "abc", ";defg");
try testCase("abc%defg$; ", null, "abc%defg$", "; ");
try testCase(" ", null, error.EmptyToken, " ");
try testCase(";", null, error.EmptyToken, ";");
try testCase("abcdefg", error.ClosedPipe, error.ClosedPipe, "");
}
// Parses a token or quoted string (rfc 9110) off the stream, as appropriate.
// The caller takes responsibility for deallocating the memory returned.
pub fn parseTokenOrQuotedString(alloc: std.mem.Allocator, peek_stream: anytype) ![]const u8 {
return parseToken(alloc, peek_stream) catch |err| switch (err) {
error.EmptyToken => return try parseQuotedString(alloc, peek_stream),
else => |e| return e,
};
}
test "parseTokenOrQuotedString" {
const testCase = struct {
fn func(data: []const u8, stream_error: ?anyerror, expected: anyerror![]const u8, remaining: []const u8) !void {
var fbs = std.io.fixedBufferStream(data);
var stream = errorReader(stream_error orelse error.EndOfStream, fbs.reader());
var peeker = std.io.peekStream(1, stream.reader());
const result = parseTokenOrQuotedString(std.testing.allocator, &peeker);
defer if (result) |v| std.testing.allocator.free(v) else |_| {};
if (expected) |val|
try std.testing.expectEqualStrings(val, try result)
else |expected_err|
try std.testing.expectError(expected_err, result);
try std.testing.expect(try peeker.reader().isBytes(remaining));
try std.testing.expectError(stream_error orelse error.EndOfStream, peeker.reader().readByte());
}
}.func;
try testCase("abcdefg", null, "abcdefg", "");
try testCase("abc defg", null, "abc", " defg");
try testCase("abc;defg", null, "abc", ";defg");
try testCase("abc%defg$; ", null, "abc%defg$", "; ");
try testCase("\"abcdefg\"", null, "abcdefg", "");
try testCase("\"abcdefg\"abcd", null, "abcdefg", "abcd");
try testCase("\"xyz\\\"z\"", null, "xyz\"z", "");
try testCase("\"xyz\\\\z\"", null, "xyz\\z", "");
try testCase("\"💯\"", null, "💯", "");
try testCase(" ", null, error.MissingStartQuote, " ");
try testCase(";", null, error.MissingStartQuote, ";");
try testCase("\"abcdefg", null, error.EndOfStream, "");
try testCase("abcdefg", error.ClosedPipe, error.ClosedPipe, "");
try testCase("\"abcdefg", error.ClosedPipe, error.ClosedPipe, "");
}
fn ErrorReader(comptime E: type, comptime ReaderType: type) type {
return struct {
inner_reader: ReaderType,
err: E,
pub const Error = ReaderType.Error || E;
pub const Reader = std.io.Reader(*@This(), Error, read);
pub fn read(self: *@This(), dest: []u8) Error!usize {
const count = try self.inner_reader.readAll(dest);
if (count == 0) return self.err;
return dest.len;
}
pub fn reader(self: *@This()) Reader {
return .{ .context = self };
}
};
}
/// Returns the given error after the underlying stream is finished
fn errorReader(err: anytype, reader: anytype) ErrorReader(@TypeOf(err), @TypeOf(reader)) {
return .{
.inner_reader = reader,
.err = err,
};
}

View file

@ -282,7 +282,7 @@ pub const Response = struct {
pub fn template(self: *Self, status_code: http.Status, srv: anytype, comptime templ: []const u8, data: anytype) !void {
try self.headers.put("Content-Type", "text/html");
const user = if (srv.user_id) |uid| try srv.getUser(uid) else null;
const user = if (srv.context.userId()) |uid| try srv.getActor(uid) else null;
defer util.deepFree(srv.allocator, user);
var stream = try self.open(status_code);
@ -298,9 +298,8 @@ pub const Response = struct {
@embedFile("./controllers/web/_format.tmpl.html"),
data,
.{
.community = srv.community,
.community = srv.context.community,
.user = user,
.user_id = srv.user_id,
},
);

View file

@ -24,8 +24,10 @@ pub const verify_login = struct {
pub const path = "/auth/login";
pub fn handler(_: anytype, res: anytype, srv: anytype) !void {
const info = try srv.verifyAuthorization();
try res.json(.ok, info);
if (srv.context.token_info) |token| {
return try res.json(.ok, token);
} else {
return try res.status(.unauthorized);
}
}
};

View file

@ -2,7 +2,7 @@ const api = @import("api");
const util = @import("util");
const controller_utils = @import("../../controllers.zig").helpers;
const QueryArgs = api.CommunityQueryArgs;
const QueryArgs = api.communities.QueryArgs;
pub const create = struct {
pub const method = .POST;
@ -25,9 +25,9 @@ pub const query = struct {
pub const path = "/communities";
pub const Query = struct {
const OrderBy = api.CommunityQueryArgs.OrderBy;
const Direction = api.CommunityQueryArgs.Direction;
const PageDirection = api.CommunityQueryArgs.PageDirection;
const OrderBy = QueryArgs.OrderBy;
const Direction = QueryArgs.Direction;
const PageDirection = QueryArgs.PageDirection;
// Max items to fetch
max_items: usize = 20,
@ -80,7 +80,7 @@ pub const query = struct {
});
const convert = struct {
fn func(args: api.CommunityQueryArgs) Query {
fn func(args: QueryArgs) Query {
return .{
.max_items = args.max_items,
.owner_id = args.owner_id,

View file

@ -98,12 +98,14 @@ pub const update = struct {
};
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const result = try srv.driveUpdate(req.args.path, .{
try srv.driveUpdate(req.args.path, .{
.filename = req.body.meta.filename,
.description = req.body.meta.description,
.content_type = req.body.meta.content_type,
.sensitive = req.body.meta.sensitive,
});
const result = try srv.driveGet(req.args.path);
defer util.deepFree(srv.allocator, result);
try res.json(.ok, result);
}
@ -117,7 +119,9 @@ pub const move = struct {
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const destination = req.headers.get("Destination") orelse return error.NoDestination;
const result = try srv.driveMove(req.args.path, destination);
try srv.driveMove(req.args.path, destination);
const result = try srv.driveGet(req.args.path);
defer util.deepFree(srv.allocator, result);
try res.headers.put("Location", destination);

View file

@ -4,7 +4,7 @@ pub const create = struct {
pub const method = .POST;
pub const path = "/invites";
pub const Body = api.InviteOptions;
pub const Body = api.invites.CreateOptions;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
// No need to free because it will be freed when the api conn

View file

@ -2,11 +2,13 @@ const std = @import("std");
const api = @import("api");
const controller_utils = @import("../../controllers.zig").helpers;
const TimelineArgs = api.timelines.TimelineArgs;
pub const global = struct {
pub const method = .GET;
pub const path = "/timelines/global";
pub const Query = api.TimelineArgs;
pub const Query = TimelineArgs;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const results = try srv.globalTimeline(req.query);
@ -18,7 +20,7 @@ pub const local = struct {
pub const method = .GET;
pub const path = "/timelines/local";
pub const Query = api.TimelineArgs;
pub const Query = TimelineArgs;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const results = try srv.localTimeline(req.query);
@ -30,7 +32,7 @@ pub const home = struct {
pub const method = .GET;
pub const path = "/timelines/home";
pub const Query = api.TimelineArgs;
pub const Query = TimelineArgs;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const results = try srv.homeTimeline(req.query);

View file

@ -14,10 +14,12 @@ pub const create = struct {
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const options = .{
.username = req.body.username,
.password = req.body.password,
.invite_code = req.body.invite_code,
.email = req.body.email,
};
const user = srv.register(req.body.username, req.body.password, options) catch |err| switch (err) {
const user = srv.register(options) catch |err| switch (err) {
error.UsernameTaken => return res.err(.unprocessable_entity, "Username Unavailable", {}),
else => return err,
};
@ -35,7 +37,7 @@ pub const get = struct {
};
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const result = try srv.getUser(req.args.id);
const result = try srv.getActor(req.args.id);
defer util.deepFree(srv.allocator, result);
try res.json(.ok, result);
@ -50,13 +52,13 @@ pub const update_profile = struct {
id: util.Uuid,
};
pub const Body = api.PartialUserProfile;
pub const Body = api.actors.ProfileUpdateArgs;
// TODO: I don't like that the request body dn response body are substantially different
// TODO: I don't like that the request body and response body are substantially different
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
try srv.updateUserProfile(req.args.id, req.body);
const result = try srv.getUser(req.args.id);
const result = try srv.getActor(req.args.id);
defer util.deepFree(srv.allocator, result);
try res.json(.ok, result);

View file

@ -42,7 +42,7 @@ pub const query_followers = struct {
id: Uuid,
};
pub const Query = api.FollowingQueryArgs;
pub const Query = api.follows.FollowingQueryArgs;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const results = try srv.queryFollowers(req.args.id, req.query);
@ -59,7 +59,7 @@ pub const query_following = struct {
id: Uuid,
};
pub const Query = api.FollowerQueryArgs;
pub const Query = api.follows.FollowerQueryArgs;
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const results = try srv.queryFollowing(req.args.id, req.query);

View file

@ -61,7 +61,7 @@ const index = struct {
pub const method = .GET;
pub fn handler(_: anytype, res: anytype, srv: anytype) !void {
if (srv.user_id == null) {
if (srv.context.userId() == null) {
try res.headers.put("Location", about.path);
return res.status(.see_other);
}
@ -96,7 +96,7 @@ const login = struct {
try res.headers.put("Location", index.path);
var buf: [64]u8 = undefined;
const cookie_name = try std.fmt.bufPrint(&buf, "token.{s}", .{req.body.username});
try res.headers.setCookie(cookie_name, token.token, .{});
try res.headers.setCookie(cookie_name, token.value, .{});
try res.headers.setCookie("active_account", req.body.username, .{ .HttpOnly = false });
try res.status(.see_other);
@ -114,14 +114,19 @@ const signup = struct {
srv: anytype,
) !void {
const invite = if (invite_code) |code| srv.validateInvite(code) catch |err| switch (err) {
error.InvalidInvite => return servePage(null, "Invite is not valid", .bad_request, res, srv),
//error.InvalidInvite => return servePage(null, "Invite is not valid", .bad_request, res, srv),
else => |e| return e,
} else null;
defer util.deepFree(srv.allocator, invite);
const creator = if (invite) |inv| try srv.getActor(inv.created_by) else null;
defer util.deepFree(srv.allocator, creator);
try res.template(status, srv, tmpl, .{
.error_msg = error_msg,
.invite = invite,
.invite = if (invite) |inv| .{
.meta = inv,
.creator = creator.?,
} else null,
});
}
@ -169,7 +174,7 @@ const signup = struct {
error.UsernameEmpty => "Username cannot be empty",
error.UsernameContainsInvalidChar => "Username must be composed of alphanumeric characters and underscore",
error.UsernameTooLong => "Username too long",
error.PasswordTooShort => "Password too short, must be at least 12 chars",
//error.PasswordTooShort => "Password too short, must be at least 12 chars",
error.UsernameTaken => blk: {
status = .unprocessable_entity;
@ -190,7 +195,7 @@ const signup = struct {
try res.headers.put("Location", index.path);
var buf: [64]u8 = undefined;
const cookie_name = try std.fmt.bufPrint(&buf, "token.{s}", .{req.body.username});
try res.headers.setCookie(cookie_name, token.token, .{});
try res.headers.setCookie(cookie_name, token.value, .{});
try res.headers.setCookie("active_account", req.body.username, .{ .HttpOnly = false });
try res.status(.see_other);
@ -208,7 +213,6 @@ const global_timeline = struct {
try res.template(.ok, srv, @embedFile("./web/timelines/global.tmpl.html"), .{
.notes = timeline.items,
.community = srv.community,
});
}
};
@ -224,7 +228,7 @@ const user_details = struct {
};
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const user = try srv.getUser(req.args.id);
const user = try srv.getActor(req.args.id);
defer util.deepFree(srv.allocator, user);
try res.template(.ok, srv, tmpl, user);
@ -337,7 +341,7 @@ const drive = struct {
return res.status(.see_other);
},
.upload => |body| {
const entry = try srv.driveUpload(
const entry_id = try srv.driveUpload(
.{
.filename = body.file.filename,
.dir = trimmed_path,
@ -347,6 +351,7 @@ const drive = struct {
},
body.file.data,
);
const entry = try srv.driveGetEntryById(entry_id);
defer util.deepFree(srv.allocator, entry);
const url = try std.fmt.allocPrint(srv.allocator, "{s}/drive/{s}", .{
@ -370,7 +375,6 @@ const cluster = struct {
pub fn handler(_: anytype, res: anytype, srv: anytype) !void {
const meta = try srv.getClusterMeta();
try res.template(.ok, srv, @embedFile("./web/cluster/overview.tmpl.html"), .{
.community = srv.community,
.meta = meta,
});
}
@ -399,15 +403,17 @@ const cluster = struct {
};
pub fn handler(req: anytype, res: anytype, srv: anytype) !void {
const community = try srv.createCommunity(req.body.origin, req.body.name);
const comm_id = try srv.createCommunity(req.body.origin, req.body.name);
const community = try srv.getCommunity(comm_id);
defer util.deepFree(srv.allocator, community);
const invite = try srv.createInvite(.{
const invite_id = try srv.createInvite(.{
.max_uses = 1,
.kind = .community_owner,
.to_community = community.id,
});
const invite = try srv.getInvite(invite_id);
defer util.deepFree(srv.allocator, invite);
try res.template(.ok, srv, success_tmpl, .{ .community = community, .invite = invite });

View file

@ -11,7 +11,7 @@
<nav>
<h1 class="title"><a href="/about">fediglam</a></h1>
<ul>
{#if %user_id |$_|}
{#if %user |$_|}
<li><a class="button" href="/timelines/global">Timeline</a></li>
{#else}
<li><a class="button" href="/about">Sign in</a></li>

View file

@ -1,5 +1,5 @@
<header>
<h2>{ .community.name }</h2>
<h2>{ %community.name }</h2>
</header>
<nav>
<ul>

View file

@ -10,7 +10,7 @@
<div>
<div>You are about to accept an invite from:</div>
{#template mini-user $invite.creator}
{#if @isTag($invite.kind, community_owner) =}
{#if @isTag($invite.meta.kind, community_owner) =}
<div>This act will make your new account the owner of { %community.name }.</div>
{/if =}
</div>
@ -39,7 +39,7 @@
</div>
</label>
{#if .invite |$invite| =}
<input style="display: none" type="text" name="invite_code" value="{$invite.code}" />
<input style="display: none" type="text" name="invite_code" value="{$invite.meta.code}" />
{/if =}
<button type="submit">Sign up</button>
</form>

View file

@ -84,7 +84,11 @@ pub fn main() !void {
var api_src = try api.ApiSource.init(&pool);
var srv = http.Server.init();
defer srv.deinit();
try srv.listen(std.net.Address.parseIp("::1", 8080) catch unreachable);
const addr = "::1";
const port = 8080;
try srv.listen(std.net.Address.parseIp(addr, port) catch unreachable);
std.log.info("Listening on {s}:{}", .{ addr, port });
var i: usize = 0;
while (i < cfg.worker_threads - 1) : (i += 1) {

View file

@ -505,6 +505,7 @@ fn Tx(comptime tx_level: u8) type {
};
}
pub const BeginOrSavepoint = Tx(tx_level + 1);
pub const beginOrSavepoint = if (tx_level == 0) begin else savepoint;
pub const commitOrRelease = if (tx_level < 2) commit else release;

View file

@ -67,7 +67,14 @@ pub fn parseRfc3339(str: []const u8) !DateTime {
};
}
const is_test = @import("builtin").is_test;
const test_utils = struct {
pub threadlocal var test_now_timestamp: i64 = 1356076800;
};
pub usingnamespace if (is_test) test_utils else struct {};
pub fn now() DateTime {
if (comptime is_test) return .{ .seconds_since_epoch = test_utils.test_now_timestamp };
return .{ .seconds_since_epoch = std.time.timestamp() };
}