Sadly, the presence API is worse than I hoped

This commit is contained in:
Cadence Ember 2025-02-11 01:37:23 +13:00
parent 15826dcb3f
commit 381861ee8e
7 changed files with 69 additions and 19 deletions

12
package-lock.json generated
View file

@ -30,7 +30,7 @@
"get-relative-path": "^1.0.2",
"get-stream": "^6.0.1",
"h3": "^1.12.0",
"heatsync": "^2.5.5",
"heatsync": "^2.6.0",
"lru-cache": "^10.4.3",
"minimist": "^1.2.8",
"node-fetch": "^2.6.7",
@ -1193,7 +1193,8 @@
"node_modules/backtracker": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/backtracker/-/backtracker-4.0.0.tgz",
"integrity": "sha512-XG2ldN+WDRq9niJMnoZDjLLUnhDOQGhFZc6qZQotN59xj8oOa4KXSCu6YyZQawPqi6gG3HilGFt91zT6Hbdh1w=="
"integrity": "sha512-XG2ldN+WDRq9niJMnoZDjLLUnhDOQGhFZc6qZQotN59xj8oOa4KXSCu6YyZQawPqi6gG3HilGFt91zT6Hbdh1w==",
"license": "MIT"
},
"node_modules/balanced-match": {
"version": "1.0.2",
@ -1938,9 +1939,10 @@
}
},
"node_modules/heatsync": {
"version": "2.5.5",
"resolved": "https://registry.npmjs.org/heatsync/-/heatsync-2.5.5.tgz",
"integrity": "sha512-Sy2/X2a69W2W1xgp7GBY81naHtWXxwV8N6uzPTJLQXgq4oTMJeL6F/AUlGS+fUa/Pt5ioxzi7gvd8THMJ3GpyA==",
"version": "2.6.0",
"resolved": "https://registry.npmjs.org/heatsync/-/heatsync-2.6.0.tgz",
"integrity": "sha512-UfemOt4Kg1hvhDj/Zz8sYa1pF73ul+tF19MYNisYoOymXoTo4iCZv2BDdCMFE1xvZ6YFjcMoekb/aeBU1uqFjQ==",
"license": "MIT",
"dependencies": {
"backtracker": "^4.0.0"
}

View file

@ -39,7 +39,7 @@
"get-relative-path": "^1.0.2",
"get-stream": "^6.0.1",
"h3": "^1.12.0",
"heatsync": "^2.5.5",
"heatsync": "^2.6.0",
"lru-cache": "^10.4.3",
"minimist": "^1.2.8",
"node-fetch": "^2.6.7",

View file

@ -32,7 +32,7 @@ async function createSpace(guild, kstate) {
assert(name)
const memberCount = guild["member_count"] ?? guild.approximate_member_count ?? 0
const enablePresenceByDefault = +(memberCount < 150) // could increase this later on if it doesn't cause any problems
const enablePresenceByDefault = +(memberCount < 50) // scary! all active users in a presence-enabled guild will be pinging the server every <30 seconds to stay online
const globalAdmins = select("member_power", "mxid", {room_id: "*"}).pluck().all()
const roomID = await createRoom.postApplyPowerLevels(kstate, async kstate => {

View file

@ -5,9 +5,28 @@ const {sync, select} = passthrough
/** @type {import("../../matrix/api")} */
const api = sync.require("../../matrix/api")
// Adding a debounce to all updates because events are issued multiple times, once for each guild.
// Sometimes a status update is even issued twice in a row for the same user+guild, weird!
/*
We do this in two phases for optimisation reasons.
Discord sends us an event when the presence *changes.*
We need to keep the event data in memory because we need to *repeatedly* send it to Matrix using a long-lived loop.
There are two phases to get it from Discord to Matrix.
The first phase stores Discord presence data in memory.
The second phase loops over the memory and sends it on to Matrix.
In the first phase, for optimisation reasons, we want to do as little work as possible if the presence doesn't actually need to be sent all the way through.
* Presence can be deactivated per-guild in OOYE settings. If it's deactivated for all of a user's guilds, we shouldn't send them to the second phase.
* Presence can be sent for users without sims. In this case, we shouldn't send them to the second phase.
* Presence can be sent multiple times in a row for the same user for each guild we share. We want to batch these up so we only query the mxid and enter the second phase once per user.
*/
// ***** first phase *****
// Delay before querying user details and putting them in memory.
const presenceDelay = 1500
/** @type {Map<string, NodeJS.Timeout>} user ID -> cancelable timeout */
const presenceDelayMap = new Map()
@ -20,8 +39,9 @@ function checkPresenceEnabledGuilds() {
checkPresenceEnabledGuilds()
/**
* This function is called for each Discord presence packet.
* @param {string} userID Discord user ID
* @param {string} guildID Discord guild ID that this presence applies to (really, the same presence applies to every single guild, but is delivered separately)
* @param {string} guildID Discord guild ID that this presence applies to (really, the same presence applies to every single guild, but is delivered separately by Discord for some reason)
* @param {string} status status field from Discord's PRESENCE_UPDATE event
*/
function setPresence(userID, guildID, status) {
@ -47,11 +67,38 @@ function setPresenceCallback(user_id, status) {
( status === "online" ? "online"
: status === "offline" ? "offline"
: "unavailable") // idle, dnd, and anything else they dream up in the future
api.setPresence(presence, mxid).catch(e => {
console.error("d->m: Skipping presence update failure:")
console.error(e)
})
if (presence === "offline") {
userPresence.delete(mxid) // stop syncing next cycle
} else {
const delay = userPresence.get(mxid)?.delay || presenceLoopInterval * Math.random() // distribute the updates across the presence loop
userPresence.set(mxid, {data: {presence}, delay}) // will be synced next cycle
}
}
// ***** second phase *****
// Synapse expires each user's presence after 30 seconds and makes them offline, so we have loop every 28 seconds and update each user again.
const presenceLoopInterval = 28e3
/** @type {Map<string, {data: {presence: "online" | "offline" | "unavailable", status_msg?: string}, delay: number}>} mxid -> presence data to send to api */
const userPresence = new Map()
sync.addTemporaryInterval(() => {
for (const [mxid, memory] of userPresence.entries()) {
// I haven't tried, but assuming Synapse explodes if you try to update too many presences at the same time,
// I'll space them out over the whole 28 second cycle.
setTimeout(() => {
const d = new Date().toISOString().slice(0, 19)
api.setPresence(memory.data, mxid).catch(e => {
console.error("d->m: Skipping presence update failure:")
console.error(e)
})
}, memory.delay)
}
}, presenceLoopInterval)
module.exports.setPresence = setPresence
module.exports.checkPresenceEnabledGuilds = checkPresenceEnabledGuilds

View file

@ -409,11 +409,11 @@ async function setAccountData(type, content, mxid) {
}
/**
* @param {"online" | "offline" | "unavailable"} presence
* @param {{presence: "online" | "offline" | "unavailable", status_msg?: string}} data
* @param {string} mxid
*/
async function setPresence(presence, mxid) {
await mreq.mreq("PUT", path(`/client/v3/presence/${mxid}/status`, mxid), {presence})
async function setPresence(data, mxid) {
await mreq.mreq("PUT", path(`/client/v3/presence/${mxid}/status`, mxid), data)
}
module.exports.path = path

View file

@ -4,7 +4,7 @@
* @typedef {Object} Passthrough
* @property {import("repl").REPLServer} repl
* @property {import("./d2m/discord-client")} discord
* @property {import("heatsync").default} sync
* @property {import("heatsync")} sync
* @property {import("better-sqlite3/lib/database")} db
* @property {import("@cloudrac3r/in-your-element").AppService} as
* @property {import("./db/orm").from} from

View file

@ -1,6 +1,7 @@
#!/usr/bin/env node
// @ts-check
const fs = require("fs")
const sqlite = require("better-sqlite3")
const migrate = require("./src/db/migrate")
const HeatSync = require("heatsync")
@ -9,7 +10,7 @@ const {reg} = require("./src/matrix/read-registration")
const passthrough = require("./src/passthrough")
const db = new sqlite("ooye.db")
const sync = new HeatSync()
const sync = new HeatSync({watchFunction: fs.watchFile})
Object.assign(passthrough, {sync, db})