refactor(api): ♻️ Use Web Workers instead of spawning the same process once for each thread

This commit is contained in:
Jesse Wierzbinski 2024-06-26 14:44:08 -10:00
parent bc8220c8f9
commit d29603275a
No known key found for this signature in database
7 changed files with 213 additions and 164 deletions

97
app.ts Normal file
View file

@ -0,0 +1,97 @@
import { errorResponse, response } from "@/response";
import { getLogger } from "@logtape/logtape";
import { config } from "config-manager";
import { Hono } from "hono";
import { agentBans } from "./middlewares/agent-bans";
import { bait } from "./middlewares/bait";
import { boundaryCheck } from "./middlewares/boundary-check";
import { ipBans } from "./middlewares/ip-bans";
import { logger } from "./middlewares/logger";
import { handleGlitchRequest } from "./packages/glitch-server/main";
import { routes } from "./routes";
import type { ApiRouteExports } from "./types/api";
export const appFactory = async () => {
const serverLogger = getLogger("server");
const app = new Hono({
strict: false,
});
app.use(ipBans);
app.use(agentBans);
app.use(bait);
app.use(logger);
app.use(boundaryCheck);
// Disabled as federation now checks for this
// app.use(urlCheck);
// Inject own filesystem router
for (const [, path] of Object.entries(routes)) {
// use app.get(path, handler) to add routes
const route: ApiRouteExports = await import(path);
if (!(route.meta && route.default)) {
throw new Error(`Route ${path} does not have the correct exports.`);
}
route.default(app);
}
app.options("*", () => {
return response(null);
});
app.all("*", async (context) => {
if (config.frontend.glitch.enabled) {
const glitch = await handleGlitchRequest(context.req.raw);
if (glitch) {
return glitch;
}
}
const replacedUrl = new URL(
new URL(context.req.url).pathname,
config.frontend.url,
).toString();
serverLogger.debug`Proxying ${replacedUrl}`;
const proxy = await fetch(replacedUrl, {
headers: {
// Include for SSR
"X-Forwarded-Host": `${config.http.bind}:${config.http.bind_port}`,
"Accept-Encoding": "identity",
},
redirect: "manual",
}).catch((e) => {
serverLogger.error`${e}`;
serverLogger.error`The Frontend is not running or the route is not found: ${replacedUrl}`;
return null;
});
proxy?.headers.set("Cache-Control", "max-age=31536000");
if (!proxy || proxy.status === 404) {
return errorResponse(
"Route not found on proxy or API route. Are you using the correct HTTP method?",
404,
);
}
// Disable CSP upgrade-insecure-requests if an .onion domain is used
if (new URL(context.req.url).hostname.endsWith(".onion")) {
proxy.headers.set(
"Content-Security-Policy",
proxy.headers
.get("Content-Security-Policy")
?.replace("upgrade-insecure-requests;", "") ?? "",
);
}
return proxy;
});
return app;
};

View file

@ -33,21 +33,18 @@ export default class Start extends BaseCommand<typeof Start> {
public async run(): Promise<void> { public async run(): Promise<void> {
const { flags } = await this.parse(Start); const { flags } = await this.parse(Start);
const numCpUs = flags["all-threads"] ? os.cpus().length : flags.threads; const numCpus = flags["all-threads"] ? os.cpus().length : flags.threads;
// Check if index is a JS or TS file (depending on the environment) // Check if index is a JS or TS file (depending on the environment)
const index = (await Bun.file("index.ts").exists()) const index = (await Bun.file("index.ts").exists())
? "index.ts" ? "index.ts"
: "index.js"; : "index.js";
for (let i = 0; i < numCpUs; i++) { await import("../../setup");
const args = ["bun", index];
if (i !== 0 || flags.silent) { for (let i = 0; i < numCpus; i++) {
args.push("--silent"); new Worker(index, {
} type: "module",
Bun.spawn(args, {
stdio: ["inherit", "inherit", "inherit"],
env: { ...process.env },
}); });
} }
} }

154
index.ts
View file

@ -1,152 +1,12 @@
import { checkConfig } from "@/init"; import { createServer } from "@/server";
import { configureLoggers } from "@/loggers";
import { connectMeili } from "@/meilisearch";
import { errorResponse, response } from "@/response";
import { getLogger } from "@logtape/logtape";
import { config } from "config-manager"; import { config } from "config-manager";
import { Hono } from "hono"; import { appFactory } from "~/app";
import { setupDatabase } from "~/drizzle/db"; import { setupDatabase } from "./drizzle/db";
import { agentBans } from "~/middlewares/agent-bans";
import { bait } from "~/middlewares/bait";
import { boundaryCheck } from "~/middlewares/boundary-check";
import { ipBans } from "~/middlewares/ip-bans";
import { logger } from "~/middlewares/logger";
import { Note } from "~/packages/database-interface/note";
import { handleGlitchRequest } from "~/packages/glitch-server/main";
import { routes } from "~/routes";
import { createServer } from "~/server";
import type { ApiRouteExports } from "~/types/api";
const timeAtStart = performance.now(); if (import.meta.main) {
await import("./setup");
const isEntry = }
import.meta.path === Bun.main && !process.argv.includes("--silent");
await configureLoggers(isEntry);
const serverLogger = getLogger("server");
serverLogger.info`Starting Lysand...`;
await setupDatabase(); await setupDatabase();
if (config.meilisearch.enabled) { createServer(config, await appFactory());
await connectMeili();
}
process.on("SIGINT", () => {
process.exit();
});
// Check if database is reachable
const postCount = await Note.getCount();
if (isEntry) {
await checkConfig(config);
}
const app = new Hono({
strict: false,
});
app.use(ipBans);
app.use(agentBans);
app.use(bait);
app.use(logger);
app.use(boundaryCheck);
// Disabled as federation now checks for this
// app.use(urlCheck);
// Inject own filesystem router
for (const [, path] of Object.entries(routes)) {
// use app.get(path, handler) to add routes
const route: ApiRouteExports = await import(path);
if (!(route.meta && route.default)) {
throw new Error(`Route ${path} does not have the correct exports.`);
}
route.default(app);
}
app.options("*", () => {
return response(null);
});
app.all("*", async (context) => {
if (config.frontend.glitch.enabled) {
const glitch = await handleGlitchRequest(context.req.raw);
if (glitch) {
return glitch;
}
}
const replacedUrl = new URL(
new URL(context.req.url).pathname,
config.frontend.url,
).toString();
serverLogger.debug`Proxying ${replacedUrl}`;
const proxy = await fetch(replacedUrl, {
headers: {
// Include for SSR
"X-Forwarded-Host": `${config.http.bind}:${config.http.bind_port}`,
"Accept-Encoding": "identity",
},
redirect: "manual",
}).catch((e) => {
serverLogger.error`${e}`;
serverLogger.error`The Frontend is not running or the route is not found: ${replacedUrl}`;
return null;
});
proxy?.headers.set("Cache-Control", "max-age=31536000");
if (!proxy || proxy.status === 404) {
return errorResponse(
"Route not found on proxy or API route. Are you using the correct HTTP method?",
404,
);
}
// Disable CSP upgrade-insecure-requests if an .onion domain is used
if (new URL(context.req.url).hostname.endsWith(".onion")) {
proxy.headers.set(
"Content-Security-Policy",
proxy.headers
.get("Content-Security-Policy")
?.replace("upgrade-insecure-requests;", "") ?? "",
);
}
return proxy;
});
createServer(config, app);
serverLogger.info`Lysand started at ${config.http.bind}:${config.http.bind_port} in ${(performance.now() - timeAtStart).toFixed(0)}ms`;
serverLogger.info`Database is online, now serving ${postCount} posts`;
if (config.frontend.enabled) {
if (!URL.canParse(config.frontend.url)) {
serverLogger.error`Frontend URL is not a valid URL: ${config.frontend.url}`;
// Hang until Ctrl+C is pressed
await Bun.sleep(Number.POSITIVE_INFINITY);
}
// Check if frontend is reachable
const response = await fetch(new URL("/", config.frontend.url))
.then((res) => res.ok)
.catch(() => false);
if (!response) {
serverLogger.error`Frontend is unreachable at ${config.frontend.url}`;
serverLogger.error`Please ensure the frontend is online and reachable`;
}
} else {
serverLogger.warn`Frontend is disabled, skipping check`;
}
export { app };

54
setup.ts Normal file
View file

@ -0,0 +1,54 @@
import { checkConfig } from "@/init";
import { configureLoggers } from "@/loggers";
import { connectMeili } from "@/meilisearch";
import { getLogger } from "@logtape/logtape";
import { config } from "config-manager";
import { setupDatabase } from "~/drizzle/db";
import { Note } from "~/packages/database-interface/note";
const timeAtStart = performance.now();
await configureLoggers();
const serverLogger = getLogger("server");
serverLogger.info`Starting Lysand...`;
await setupDatabase();
if (config.meilisearch.enabled) {
await connectMeili();
}
process.on("SIGINT", () => {
process.exit();
});
// Check if database is reachable
const postCount = await Note.getCount();
await checkConfig(config);
serverLogger.info`Lysand started at ${config.http.bind}:${config.http.bind_port} in ${(performance.now() - timeAtStart).toFixed(0)}ms`;
serverLogger.info`Database is online, now serving ${postCount} posts`;
if (config.frontend.enabled) {
if (!URL.canParse(config.frontend.url)) {
serverLogger.error`Frontend URL is not a valid URL: ${config.frontend.url}`;
// Hang until Ctrl+C is pressed
await Bun.sleep(Number.POSITIVE_INFINITY);
}
// Check if frontend is reachable
const response = await fetch(new URL("/", config.frontend.url))
.then((res) => res.ok)
.catch(() => false);
if (!response) {
serverLogger.error`Frontend is unreachable at ${config.frontend.url}`;
serverLogger.error`Please ensure the frontend is online and reachable`;
}
} else {
serverLogger.warn`Frontend is disabled, skipping check`;
}

View file

@ -2,11 +2,11 @@ import { generateChallenge } from "@/challenges";
import { randomString } from "@/math"; import { randomString } from "@/math";
import { solveChallenge } from "altcha-lib"; import { solveChallenge } from "altcha-lib";
import { asc, inArray, like } from "drizzle-orm"; import { asc, inArray, like } from "drizzle-orm";
import { appFactory } from "~/app";
import type { Status } from "~/database/entities/status"; import type { Status } from "~/database/entities/status";
import { db } from "~/drizzle/db"; import { db } from "~/drizzle/db";
import { setupDatabase } from "~/drizzle/db"; import { setupDatabase } from "~/drizzle/db";
import { Notes, Tokens, Users } from "~/drizzle/schema"; import { Notes, Tokens, Users } from "~/drizzle/schema";
import { app } from "~/index";
import { Note } from "~/packages/database-interface/note"; import { Note } from "~/packages/database-interface/note";
import { User } from "~/packages/database-interface/user"; import { User } from "~/packages/database-interface/user";
@ -17,9 +17,9 @@ await setupDatabase();
* @param req Request to send * @param req Request to send
* @returns Response from the server * @returns Response from the server
*/ */
export function sendTestRequest(req: Request): Promise<Response> { export async function sendTestRequest(req: Request): Promise<Response> {
// return fetch(req); // return fetch(req);
return Promise.resolve(app.fetch(req)); return Promise.resolve((await appFactory()).fetch(req));
} }
export function wrapRelativeUrl(url: string, baseUrl: string) { export function wrapRelativeUrl(url: string, baseUrl: string) {

View file

@ -10,6 +10,8 @@ import {
import { import {
type LogLevel, type LogLevel,
type LogRecord, type LogRecord,
type RotatingFileSinkOptions,
type Sink,
configure, configure,
getConsoleSink, getConsoleSink,
getLevelFilter, getLevelFilter,
@ -21,9 +23,48 @@ import { config } from "~/packages/config-manager";
// HACK: This is a workaround for the lack of type exports in the Logtape package. // HACK: This is a workaround for the lack of type exports in the Logtape package.
type RotatingFileSinkDriver<T> = type RotatingFileSinkDriver<T> =
import("../node_modules/@logtape/logtape/logtape/sink").RotatingFileSinkDriver<T>; import("../node_modules/@logtape/logtape/logtape/sink").RotatingFileSinkDriver<T>;
const getBaseRotatingFileSink = (
await import("../node_modules/@logtape/logtape/logtape/sink") // HACK: Stolen
).getRotatingFileSink; export function getBaseRotatingFileSink<TFile>(
path: string,
options: RotatingFileSinkOptions & RotatingFileSinkDriver<TFile>,
): Sink & Disposable {
const formatter = options.formatter ?? defaultTextFormatter;
const encoder = options.encoder ?? new TextEncoder();
const maxSize = options.maxSize ?? 1024 * 1024;
const maxFiles = options.maxFiles ?? 5;
let { size: offset } = options.statSync(path);
let fd = options.openSync(path);
function shouldRollover(bytes: Uint8Array): boolean {
return offset + bytes.length > maxSize;
}
function performRollover(): void {
options.closeSync(fd);
for (let i = maxFiles - 1; i > 0; i--) {
const oldPath = `${path}.${i}`;
const newPath = `${path}.${i + 1}`;
try {
options.renameSync(oldPath, newPath);
} catch (_) {
// Continue if the file does not exist.
}
}
options.renameSync(path, `${path}.1`);
offset = 0;
fd = options.openSync(path);
}
const sink: Sink & Disposable = (record: LogRecord) => {
const bytes = encoder.encode(formatter(record));
if (shouldRollover(bytes)) {
performRollover();
}
options.writeSync(fd, bytes);
options.flushSync(fd);
offset += bytes.length;
};
sink[Symbol.dispose] = () => options.closeSync(fd);
return sink;
}
const levelAbbreviations: Record<LogLevel, string> = { const levelAbbreviations: Record<LogLevel, string> = {
debug: "DBG", debug: "DBG",
@ -149,8 +190,8 @@ export const configureLoggers = (silent = false) =>
}, },
filters: { filters: {
configFilter: silent configFilter: silent
? getLevelFilter(config.logging.log_level) ? getLevelFilter(null)
: getLevelFilter(null), : getLevelFilter(config.logging.log_level),
}, },
loggers: [ loggers: [
{ {