From 57cd37d1ebe95858f492d87bacbce2399fc06c6b Mon Sep 17 00:00:00 2001 From: Essem Date: Fri, 11 Mar 2022 20:28:35 -0600 Subject: [PATCH] Improve running the bot in a global context --- app.js | 4 +++- commands/general/info.js | 4 +++- commands/general/stats.js | 4 +++- shard.js | 13 ++++++++----- utils/handler.js | 4 ++-- utils/image.js | 2 +- utils/misc.js | 2 +- utils/services/image.js | 4 ++-- utils/soundplayer.js | 2 +- 9 files changed, 24 insertions(+), 15 deletions(-) diff --git a/app.js b/app.js index 32296a6..4e9edae 100644 --- a/app.js +++ b/app.js @@ -9,8 +9,10 @@ Please refer to step 3 of the setup guide.`); } // load config from .env file +import { resolve, dirname } from "path"; +import { fileURLToPath } from "url"; import { config } from "dotenv"; -config(); +config({ path: resolve(dirname(fileURLToPath(import.meta.url)), ".env") }); // main sharding manager import { Fleet } from "eris-fleet"; diff --git a/commands/general/info.js b/commands/general/info.js index 655f457..dad8830 100644 --- a/commands/general/info.js +++ b/commands/general/info.js @@ -1,4 +1,6 @@ import { readFileSync } from "fs"; +import { dirname } from "path"; +import { fileURLToPath } from "url"; const { version } = JSON.parse(readFileSync(new URL("../../package.json", import.meta.url))); import Command from "../../classes/command.js"; import { exec as baseExec } from "child_process"; @@ -19,7 +21,7 @@ class InfoCommand extends Command { description: `This instance is managed by **${owner.username}#${owner.discriminator}**.`, fields: [{ name: "ℹī¸ Version:", - value: `v${version}${process.env.NODE_ENV === "development" ? `-dev (${(await exec("git rev-parse HEAD")).stdout.substring(0, 7)})` : ""}` + value: `v${version}${process.env.NODE_ENV === "development" ? `-dev (${(await exec("git rev-parse HEAD", { cwd: dirname(fileURLToPath(import.meta.url)) })).stdout.substring(0, 7)})` : ""}` }, { name: "📝 Credits:", diff --git a/commands/general/stats.js b/commands/general/stats.js index afece29..2b2a15e 100644 --- a/commands/general/stats.js +++ b/commands/general/stats.js @@ -1,4 +1,6 @@ import { readFileSync } from "fs"; +import { dirname } from "path"; +import { fileURLToPath } from "url"; const { version } = JSON.parse(readFileSync(new URL("../../package.json", import.meta.url))); import os from "os"; import Command from "../../classes/command.js"; @@ -23,7 +25,7 @@ class StatsCommand extends Command { "color": 16711680, "fields": [{ "name": "Version", - "value": `v${version}${process.env.NODE_ENV === "development" ? `-dev (${(await exec("git rev-parse HEAD")).stdout.substring(0, 7)})` : ""}` + "value": `v${version}${process.env.NODE_ENV === "development" ? `-dev (${(await exec("git rev-parse HEAD", { cwd: dirname(fileURLToPath(import.meta.url)) })).stdout.substring(0, 7)})` : ""}` }, { "name": "Cluster Memory Usage", diff --git a/shard.js b/shard.js index 6ed9514..d33e26c 100644 --- a/shard.js +++ b/shard.js @@ -3,6 +3,8 @@ import { BaseClusterWorker } from "eris-fleet"; // path stuff import { readdir } from "fs/promises"; import { readFileSync } from "fs"; +import { resolve, dirname } from "path"; +import { fileURLToPath } from "url"; // fancy loggings import { log, error } from "./utils/logger.js"; // initialize command loader @@ -34,7 +36,7 @@ class Shard extends BaseClusterWorker { // register commands and their info const soundStatus = await checkStatus(); log("info", "Attempting to load commands..."); - for await (const commandFile of this.getFiles("./commands/")) { + for await (const commandFile of this.getFiles(resolve(dirname(fileURLToPath(import.meta.url)), "./commands/"))) { log("log", `Loading command from ${commandFile}...`); try { await load(commandFile, soundStatus); @@ -48,11 +50,11 @@ class Shard extends BaseClusterWorker { // register events log("info", "Attempting to load events..."); - for await (const file of this.getFiles("./events/")) { + for await (const file of this.getFiles(resolve(dirname(fileURLToPath(import.meta.url)), "./events/"))) { log("log", `Loading event from ${file}...`); const eventArray = file.split("/"); const eventName = eventArray[eventArray.length - 1].split(".")[0]; - const { default: event } = await import(`./${file}`); + const { default: event } = await import(file); this.bot.on(eventName, event.bind(null, this.bot, this.clusterID, this.workerID, this.ipc)); } log("info", "Finished loading events."); @@ -123,10 +125,11 @@ class Shard extends BaseClusterWorker { async* getFiles(dir) { const dirents = await readdir(dir, { withFileTypes: true }); for (const dirent of dirents) { + const name = dir + (dir.charAt(dir.length - 1) !== "/" ? "/" : "") + dirent.name; if (dirent.isDirectory()) { - yield* this.getFiles(dir + dirent.name); + yield* this.getFiles(name); } else if (dirent.name.endsWith(".js")) { - yield dir + (dir.charAt(dir.length - 1) !== "/" ? "/" : "") + dirent.name; + yield name; } } } diff --git a/utils/handler.js b/utils/handler.js index 84a44a7..006adb1 100644 --- a/utils/handler.js +++ b/utils/handler.js @@ -5,7 +5,7 @@ let queryValue = 0; // load command into memory export async function load(command, soundStatus) { - const { default: props } = await import(`../${command}?v=${queryValue}`); + const { default: props } = await import(`${command}?v=${queryValue}`); queryValue++; if (props.requires.includes("sound") && soundStatus) return log("warn", `Failed to connect to some Lavalink nodes, skipped loading command ${command}...`); const commandArray = command.split("/"); @@ -15,7 +15,7 @@ export async function load(command, soundStatus) { commands.set(commandName, props); info.set(commandName, { - category: commandArray[2], + category: commandArray[commandArray.length - 2], description: props.description, aliases: props.aliases, params: props.arguments, diff --git a/utils/image.js b/utils/image.js index 17a40fa..ef2f6df 100644 --- a/utils/image.js +++ b/utils/image.js @@ -8,7 +8,7 @@ export const jobs = {}; export const connections = new Map(); -export const servers = JSON.parse(fs.readFileSync("./servers.json", { encoding: "utf8" })).image; +export const servers = JSON.parse(fs.readFileSync(new URL("../servers.json", import.meta.url), { encoding: "utf8" })).image; export async function getType(image, extraReturnTypes) { if (!image.startsWith("http")) { diff --git a/utils/misc.js b/utils/misc.js index 0956650..a98b745 100644 --- a/utils/misc.js +++ b/utils/misc.js @@ -24,7 +24,7 @@ export async function clean(text) { .replaceAll("@", `@${String.fromCharCode(8203)}`); const { parsed } = config(); - const imageServers = JSON.parse(fs.readFileSync("./servers.json", { encoding: "utf8" })).image; + const imageServers = JSON.parse(fs.readFileSync(new URL("../servers.json", import.meta.url), { encoding: "utf8" })).image; if (imageServers && imageServers.length !== 0) { for (const { server, auth } of imageServers) { diff --git a/utils/services/image.js b/utils/services/image.js index 67437a3..6b79a61 100644 --- a/utils/services/image.js +++ b/utils/services/image.js @@ -21,7 +21,7 @@ class ImageWorker extends BaseServiceWorker { if (process.env.API === "true") { this.jobs = {}; this.connections = new Map(); - this.servers = JSON.parse(fs.readFileSync("./servers.json", { encoding: "utf8" })).image; + this.servers = JSON.parse(fs.readFileSync(new URL("../../servers.json", import.meta.url), { encoding: "utf8" })).image; this.nextID = 0; } @@ -42,7 +42,7 @@ class ImageWorker extends BaseServiceWorker { } async repopulate() { - const data = await fs.promises.readFile("./servers.json", { encoding: "utf8" }); + const data = await fs.promises.readFile(new URL("../../servers.json", import.meta.url), { encoding: "utf8" }); this.servers = JSON.parse(data).image; return; } diff --git a/utils/soundplayer.js b/utils/soundplayer.js index 1281443..4aad4e2 100644 --- a/utils/soundplayer.js +++ b/utils/soundplayer.js @@ -15,7 +15,7 @@ export let status = false; export let connected = false; export async function checkStatus() { - const json = await fs.promises.readFile("./servers.json", { encoding: "utf8" }); + const json = await fs.promises.readFile(new URL("../servers.json", import.meta.url), { encoding: "utf8" }); nodes = JSON.parse(json).lava; const newNodes = []; for (const node of nodes) {