Basic global timeline
This commit is contained in:
parent
7d13b2546b
commit
83da3c914b
6 changed files with 139 additions and 4 deletions
|
@ -351,5 +351,10 @@ fn ApiConn(comptime DbConn: type) type {
|
||||||
if (!self.isAdmin()) return error.PermissionDenied;
|
if (!self.isAdmin()) return error.PermissionDenied;
|
||||||
return try services.communities.query(self.db, args, self.arena.allocator());
|
return try services.communities.query(self.db, args, self.arena.allocator());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn globalTimeline(self: *Self) ![]services.notes.Note {
|
||||||
|
const result = try services.notes.query(self.db, .{}, self.arena.allocator());
|
||||||
|
return result.items;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -239,12 +239,12 @@ pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResul
|
||||||
}
|
}
|
||||||
switch (args.direction) {
|
switch (args.direction) {
|
||||||
.ascending => switch (args.page_direction) {
|
.ascending => switch (args.page_direction) {
|
||||||
.forward => try builder.andWhere(" > "),
|
.forward => try builder.appendSlice(" > "),
|
||||||
.backward => try builder.andWhere(" < "),
|
.backward => try builder.appendSlice(" < "),
|
||||||
},
|
},
|
||||||
.descending => switch (args.page_direction) {
|
.descending => switch (args.page_direction) {
|
||||||
.forward => try builder.andWhere(" < "),
|
.forward => try builder.appendSlice(" < "),
|
||||||
.backward => try builder.andWhere(" > "),
|
.backward => try builder.appendSlice(" > "),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const util = @import("util");
|
const util = @import("util");
|
||||||
const sql = @import("sql");
|
const sql = @import("sql");
|
||||||
|
const common = @import("./common.zig");
|
||||||
|
|
||||||
const Uuid = util.Uuid;
|
const Uuid = util.Uuid;
|
||||||
const DateTime = util.DateTime;
|
const DateTime = util.DateTime;
|
||||||
|
@ -57,3 +58,113 @@ pub fn get(db: anytype, id: Uuid, alloc: std.mem.Allocator) GetError!Note {
|
||||||
else => error.DatabaseFailure,
|
else => error.DatabaseFailure,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const max_max_items = 100;
|
||||||
|
|
||||||
|
pub const QueryArgs = struct {
|
||||||
|
pub const PageDirection = common.PageDirection;
|
||||||
|
pub const Prev = std.meta.Child(std.meta.field(@This(), .prev).field_type);
|
||||||
|
|
||||||
|
max_items: usize = 20,
|
||||||
|
|
||||||
|
created_before: ?DateTime = null,
|
||||||
|
created_after: ?DateTime = null,
|
||||||
|
|
||||||
|
prev: ?struct {
|
||||||
|
id: Uuid,
|
||||||
|
created_at: DateTime,
|
||||||
|
} = null,
|
||||||
|
|
||||||
|
page_direction: PageDirection = .forward,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const QueryResult = struct {
|
||||||
|
items: []Note,
|
||||||
|
|
||||||
|
prev_page: QueryArgs,
|
||||||
|
next_page: QueryArgs,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn query(db: anytype, args: QueryArgs, alloc: std.mem.Allocator) !QueryResult {
|
||||||
|
var builder = sql.QueryBuilder.init(alloc);
|
||||||
|
defer builder.deinit();
|
||||||
|
|
||||||
|
try builder.appendSlice(selectStarFromNote);
|
||||||
|
|
||||||
|
if (args.created_before != null) try builder.andWhere("note.created_at < $1");
|
||||||
|
if (args.created_after != null) try builder.andWhere("note.created_at > $2");
|
||||||
|
if (args.prev != null) {
|
||||||
|
try builder.andWhere("(note.created_at, note.id)");
|
||||||
|
|
||||||
|
switch (args.page_direction) {
|
||||||
|
.forward => try builder.appendSlice(" < "),
|
||||||
|
.backward => try builder.appendSlice(" > "),
|
||||||
|
}
|
||||||
|
try builder.appendSlice("($3, $4)");
|
||||||
|
}
|
||||||
|
|
||||||
|
try builder.appendSlice(
|
||||||
|
\\
|
||||||
|
\\ORDER BY note.created_at DESC
|
||||||
|
\\LIMIT $5
|
||||||
|
\\
|
||||||
|
);
|
||||||
|
|
||||||
|
const max_items = if (args.max_items > max_max_items) max_max_items else args.max_items;
|
||||||
|
|
||||||
|
const query_args = blk: {
|
||||||
|
const prev_created_at = if (args.prev) |prev| @as(?DateTime, prev.created_at) else null;
|
||||||
|
const prev_id = if (args.prev) |prev| @as(?Uuid, prev.id) else null;
|
||||||
|
|
||||||
|
break :blk .{
|
||||||
|
args.created_before,
|
||||||
|
args.created_after,
|
||||||
|
prev_created_at,
|
||||||
|
prev_id,
|
||||||
|
max_items,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
var results = try db.queryWithOptions(
|
||||||
|
Note,
|
||||||
|
try builder.terminate(),
|
||||||
|
query_args,
|
||||||
|
.{ .prep_allocator = alloc, .ignore_unused_arguments = true },
|
||||||
|
);
|
||||||
|
defer results.finish();
|
||||||
|
|
||||||
|
const result_buf = try alloc.alloc(Note, args.max_items);
|
||||||
|
errdefer alloc.free(result_buf);
|
||||||
|
|
||||||
|
var count: usize = 0;
|
||||||
|
errdefer for (result_buf[0..count]) |c| util.deepFree(alloc, c);
|
||||||
|
|
||||||
|
for (result_buf) |*c| {
|
||||||
|
c.* = (try results.row(alloc)) orelse break;
|
||||||
|
|
||||||
|
count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var next_page = args;
|
||||||
|
var prev_page = args;
|
||||||
|
prev_page.page_direction = .backward;
|
||||||
|
next_page.page_direction = .forward;
|
||||||
|
if (count != 0) {
|
||||||
|
prev_page.prev = .{
|
||||||
|
.id = result_buf[0].id,
|
||||||
|
.created_at = result_buf[0].created_at,
|
||||||
|
};
|
||||||
|
|
||||||
|
next_page.prev = .{
|
||||||
|
.id = result_buf[count - 1].id,
|
||||||
|
.created_at = result_buf[count - 1].created_at,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// TODO: this will give incorrect links on an empty page
|
||||||
|
|
||||||
|
return QueryResult{
|
||||||
|
.items = result_buf[0..count],
|
||||||
|
.next_page = next_page,
|
||||||
|
.prev_page = prev_page,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ pub const invites = @import("./controllers/invites.zig");
|
||||||
pub const users = @import("./controllers/users.zig");
|
pub const users = @import("./controllers/users.zig");
|
||||||
pub const notes = @import("./controllers/notes.zig");
|
pub const notes = @import("./controllers/notes.zig");
|
||||||
pub const streaming = @import("./controllers/streaming.zig");
|
pub const streaming = @import("./controllers/streaming.zig");
|
||||||
|
pub const timelines = @import("./controllers/timelines.zig");
|
||||||
|
|
||||||
pub fn routeRequest(api_source: anytype, req: *http.Request, res: *http.Response, alloc: std.mem.Allocator) void {
|
pub fn routeRequest(api_source: anytype, req: *http.Request, res: *http.Response, alloc: std.mem.Allocator) void {
|
||||||
// TODO: hashmaps?
|
// TODO: hashmaps?
|
||||||
|
@ -42,6 +43,7 @@ const routes = .{
|
||||||
notes.create,
|
notes.create,
|
||||||
notes.get,
|
notes.get,
|
||||||
streaming.streaming,
|
streaming.streaming,
|
||||||
|
timelines.global,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn Context(comptime Route: type) type {
|
pub fn Context(comptime Route: type) type {
|
||||||
|
|
10
src/main/controllers/timelines.zig
Normal file
10
src/main/controllers/timelines.zig
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
pub const global = struct {
|
||||||
|
pub const method = .GET;
|
||||||
|
pub const path = "/timelines/global";
|
||||||
|
|
||||||
|
pub fn handler(_: anytype, res: anytype, srv: anytype) !void {
|
||||||
|
const results = try srv.globalTimeline();
|
||||||
|
|
||||||
|
try res.json(.ok, results);
|
||||||
|
}
|
||||||
|
};
|
|
@ -64,6 +64,13 @@ pub const QueryBuilder = struct {
|
||||||
pub fn str(self: *const QueryBuilder) []const u8 {
|
pub fn str(self: *const QueryBuilder) []const u8 {
|
||||||
return self.array.items;
|
return self.array.items;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn terminate(self: *QueryBuilder) ![:0]const u8 {
|
||||||
|
std.debug.assert(self.array.items.len != 0);
|
||||||
|
if (self.array.items[self.array.items.len - 1] != 0) try self.array.append(0);
|
||||||
|
|
||||||
|
return std.meta.assumeSentinel(self.array.items, 0);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: make this suck less
|
// TODO: make this suck less
|
||||||
|
|
Loading…
Reference in a new issue