Replace eslint and prettier with Biome

This commit is contained in:
Jesse Wierzbinski 2024-04-06 19:30:49 -10:00
parent 4a5a2ea590
commit af0d627f19
No known key found for this signature in database
199 changed files with 16493 additions and 16361 deletions

View file

@ -3,20 +3,20 @@ import type { RouteHandler } from "~server/api/routes.type";
import type { APIRouteMeta } from "~types/api";
export const applyConfig = (routeMeta: APIRouteMeta) => {
const newMeta = routeMeta;
const newMeta = routeMeta;
// Apply ratelimits from config
newMeta.ratelimits.duration *= config.ratelimits.duration_coeff;
newMeta.ratelimits.max *= config.ratelimits.max_coeff;
// Apply ratelimits from config
newMeta.ratelimits.duration *= config.ratelimits.duration_coeff;
newMeta.ratelimits.max *= config.ratelimits.max_coeff;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (config.custom_ratelimits[routeMeta.route]) {
newMeta.ratelimits = config.custom_ratelimits[routeMeta.route];
}
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (config.custom_ratelimits[routeMeta.route]) {
newMeta.ratelimits = config.custom_ratelimits[routeMeta.route];
}
return newMeta;
return newMeta;
};
export const apiRoute = <T>(routeFunction: RouteHandler<T>) => {
return routeFunction;
return routeFunction;
};

View file

@ -1,4 +1,4 @@
import { config } from "config-manager";
export const oauthRedirectUri = (issuer: string) =>
`${config.http.base_url}/oauth/callback/${issuer}`;
`${config.http.base_url}/oauth/callback/${issuer}`;

View file

@ -1,19 +1,21 @@
import type { ContentFormat } from "~types/lysand/Object";
export const getBestContentType = (contents: ContentFormat[]) => {
// Find the best content and content type
if (contents.find(c => c.content_type === "text/x.misskeymarkdown")) {
return (
contents.find(c => c.content_type === "text/x.misskeymarkdown") ||
null
);
} else if (contents.find(c => c.content_type === "text/html")) {
return contents.find(c => c.content_type === "text/html") || null;
} else if (contents.find(c => c.content_type === "text/markdown")) {
return contents.find(c => c.content_type === "text/markdown") || null;
} else if (contents.find(c => c.content_type === "text/plain")) {
return contents.find(c => c.content_type === "text/plain") || null;
} else {
return contents[0] || null;
}
// Find the best content and content type
if (contents.find((c) => c.content_type === "text/x.misskeymarkdown")) {
return (
contents.find((c) => c.content_type === "text/x.misskeymarkdown") ||
null
);
}
if (contents.find((c) => c.content_type === "text/html")) {
return contents.find((c) => c.content_type === "text/html") || null;
}
if (contents.find((c) => c.content_type === "text/markdown")) {
return contents.find((c) => c.content_type === "text/markdown") || null;
}
if (contents.find((c) => c.content_type === "text/plain")) {
return contents.find((c) => c.content_type === "text/plain") || null;
}
return contents[0] || null;
};

View file

@ -10,20 +10,20 @@ import { parse } from "marked";
* @returns HTML
*/
export const convertTextToHtml = async (
text: string,
content_type?: string
text: string,
content_type?: string,
) => {
if (content_type === "text/markdown") {
return linkifyHtml(await sanitizeHtml(await parse(text)));
} else if (content_type === "text/x.misskeymarkdown") {
// Parse as MFM
// TODO: Implement MFM
return text;
} else {
// Parse as plaintext
return linkifyStr(text)
.split("\n")
.map(line => `<p>${line}</p>`)
.join("\n");
}
if (content_type === "text/markdown") {
return linkifyHtml(await sanitizeHtml(await parse(text)));
}
if (content_type === "text/x.misskeymarkdown") {
// Parse as MFM
// TODO: Implement MFM
return text;
}
// Parse as plaintext
return linkifyStr(text)
.split("\n")
.map((line) => `<p>${line}</p>`)
.join("\n");
};

View file

@ -1,174 +1,174 @@
import chalk from "chalk";
import { client } from "~database/datasource";
import { Meilisearch } from "meilisearch";
import type { Status, User } from "@prisma/client";
import chalk from "chalk";
import { config } from "config-manager";
import { LogLevel, type LogManager, type MultiLogManager } from "log-manager";
import { Meilisearch } from "meilisearch";
import { client } from "~database/datasource";
export const meilisearch = new Meilisearch({
host: `${config.meilisearch.host}:${config.meilisearch.port}`,
apiKey: config.meilisearch.api_key,
host: `${config.meilisearch.host}:${config.meilisearch.port}`,
apiKey: config.meilisearch.api_key,
});
export const connectMeili = async (logger: MultiLogManager | LogManager) => {
if (!config.meilisearch.enabled) return;
if (!config.meilisearch.enabled) return;
if (await meilisearch.isHealthy()) {
await meilisearch
.index(MeiliIndexType.Accounts)
.updateSortableAttributes(["createdAt"]);
if (await meilisearch.isHealthy()) {
await meilisearch
.index(MeiliIndexType.Accounts)
.updateSortableAttributes(["createdAt"]);
await meilisearch
.index(MeiliIndexType.Accounts)
.updateSearchableAttributes(["username", "displayName", "note"]);
await meilisearch
.index(MeiliIndexType.Accounts)
.updateSearchableAttributes(["username", "displayName", "note"]);
await meilisearch
.index(MeiliIndexType.Statuses)
.updateSortableAttributes(["createdAt"]);
await meilisearch
.index(MeiliIndexType.Statuses)
.updateSortableAttributes(["createdAt"]);
await meilisearch
.index(MeiliIndexType.Statuses)
.updateSearchableAttributes(["content"]);
await meilisearch
.index(MeiliIndexType.Statuses)
.updateSearchableAttributes(["content"]);
await logger.log(
LogLevel.INFO,
"Meilisearch",
"Connected to Meilisearch"
);
} else {
await logger.log(
LogLevel.CRITICAL,
"Meilisearch",
"Error while connecting to Meilisearch"
);
process.exit(1);
}
await logger.log(
LogLevel.INFO,
"Meilisearch",
"Connected to Meilisearch",
);
} else {
await logger.log(
LogLevel.CRITICAL,
"Meilisearch",
"Error while connecting to Meilisearch",
);
process.exit(1);
}
};
export enum MeiliIndexType {
Accounts = "accounts",
Statuses = "statuses",
Accounts = "accounts",
Statuses = "statuses",
}
export const addStausToMeilisearch = async (status: Status) => {
if (!config.meilisearch.enabled) return;
if (!config.meilisearch.enabled) return;
await meilisearch.index(MeiliIndexType.Statuses).addDocuments([
{
id: status.id,
content: status.content,
createdAt: status.createdAt,
},
]);
await meilisearch.index(MeiliIndexType.Statuses).addDocuments([
{
id: status.id,
content: status.content,
createdAt: status.createdAt,
},
]);
};
export const addUserToMeilisearch = async (user: User) => {
if (!config.meilisearch.enabled) return;
if (!config.meilisearch.enabled) return;
await meilisearch.index(MeiliIndexType.Accounts).addDocuments([
{
id: user.id,
username: user.username,
displayName: user.displayName,
note: user.note,
createdAt: user.createdAt,
},
]);
await meilisearch.index(MeiliIndexType.Accounts).addDocuments([
{
id: user.id,
username: user.username,
displayName: user.displayName,
note: user.note,
createdAt: user.createdAt,
},
]);
};
export const getNthDatabaseAccountBatch = (
n: number,
batchSize = 1000
n: number,
batchSize = 1000,
): Promise<Record<string, string | Date>[]> => {
return client.user.findMany({
skip: n * batchSize,
take: batchSize,
select: {
id: true,
username: true,
displayName: true,
note: true,
createdAt: true,
},
orderBy: {
createdAt: "asc",
},
});
return client.user.findMany({
skip: n * batchSize,
take: batchSize,
select: {
id: true,
username: true,
displayName: true,
note: true,
createdAt: true,
},
orderBy: {
createdAt: "asc",
},
});
};
export const getNthDatabaseStatusBatch = (
n: number,
batchSize = 1000
n: number,
batchSize = 1000,
): Promise<Record<string, string | Date>[]> => {
return client.status.findMany({
skip: n * batchSize,
take: batchSize,
select: {
id: true,
content: true,
createdAt: true,
},
orderBy: {
createdAt: "asc",
},
});
return client.status.findMany({
skip: n * batchSize,
take: batchSize,
select: {
id: true,
content: true,
createdAt: true,
},
orderBy: {
createdAt: "asc",
},
});
};
export const rebuildSearchIndexes = async (
indexes: MeiliIndexType[],
batchSize = 100
indexes: MeiliIndexType[],
batchSize = 100,
) => {
if (indexes.includes(MeiliIndexType.Accounts)) {
const accountCount = await client.user.count();
if (indexes.includes(MeiliIndexType.Accounts)) {
const accountCount = await client.user.count();
for (let i = 0; i < accountCount / batchSize; i++) {
const accounts = await getNthDatabaseAccountBatch(i, batchSize);
for (let i = 0; i < accountCount / batchSize; i++) {
const accounts = await getNthDatabaseAccountBatch(i, batchSize);
const progress = Math.round((i / (accountCount / batchSize)) * 100);
const progress = Math.round((i / (accountCount / batchSize)) * 100);
console.log(`${chalk.green(``)} ${progress}%`);
console.log(`${chalk.green("✓")} ${progress}%`);
// Sync with Meilisearch
await meilisearch
.index(MeiliIndexType.Accounts)
.addDocuments(accounts);
}
// Sync with Meilisearch
await meilisearch
.index(MeiliIndexType.Accounts)
.addDocuments(accounts);
}
const meiliAccountCount = (
await meilisearch.index(MeiliIndexType.Accounts).getStats()
).numberOfDocuments;
const meiliAccountCount = (
await meilisearch.index(MeiliIndexType.Accounts).getStats()
).numberOfDocuments;
console.log(
`${chalk.green(``)} ${chalk.bold(
`Done! ${meiliAccountCount} accounts indexed`
)}`
);
}
console.log(
`${chalk.green("✓")} ${chalk.bold(
`Done! ${meiliAccountCount} accounts indexed`,
)}`,
);
}
if (indexes.includes(MeiliIndexType.Statuses)) {
const statusCount = await client.status.count();
if (indexes.includes(MeiliIndexType.Statuses)) {
const statusCount = await client.status.count();
for (let i = 0; i < statusCount / batchSize; i++) {
const statuses = await getNthDatabaseStatusBatch(i, batchSize);
for (let i = 0; i < statusCount / batchSize; i++) {
const statuses = await getNthDatabaseStatusBatch(i, batchSize);
const progress = Math.round((i / (statusCount / batchSize)) * 100);
const progress = Math.round((i / (statusCount / batchSize)) * 100);
console.log(`${chalk.green(``)} ${progress}%`);
console.log(`${chalk.green("✓")} ${progress}%`);
// Sync with Meilisearch
await meilisearch
.index(MeiliIndexType.Statuses)
.addDocuments(statuses);
}
// Sync with Meilisearch
await meilisearch
.index(MeiliIndexType.Statuses)
.addDocuments(statuses);
}
const meiliStatusCount = (
await meilisearch.index(MeiliIndexType.Statuses).getStats()
).numberOfDocuments;
const meiliStatusCount = (
await meilisearch.index(MeiliIndexType.Statuses).getStats()
).numberOfDocuments;
console.log(
`${chalk.green(``)} ${chalk.bold(
`Done! ${meiliStatusCount} statuses indexed`
)}`
);
}
console.log(
`${chalk.green("✓")} ${chalk.bold(
`Done! ${meiliStatusCount} statuses indexed`,
)}`,
);
}
};

View file

@ -1,16 +1,17 @@
export const deepMerge = (
target: Record<string, any>,
source: Record<string, any>
target: Record<string, unknown>,
source: Record<string, unknown>,
) => {
const result = { ...target, ...source };
for (const key of Object.keys(result)) {
result[key] =
typeof target[key] == "object" && typeof source[key] == "object"
? deepMerge(target[key], source[key])
: structuredClone(result[key]);
}
return result;
const result = { ...target, ...source };
for (const key of Object.keys(result)) {
result[key] =
typeof target[key] === "object" && typeof source[key] === "object"
? // @ts-expect-error deepMerge is recursive
deepMerge(target[key], source[key])
: structuredClone(result[key]);
}
return result;
};
export const deepMergeArray = (array: Record<string, any>[]) =>
array.reduce((ci, ni) => deepMerge(ci, ni), {});
export const deepMergeArray = (array: Record<string, unknown>[]) =>
array.reduce((ci, ni) => deepMerge(ci, ni), {});

View file

@ -1,4 +1,4 @@
import { fileURLToPath } from "url";
import { fileURLToPath } from "node:url";
/**
* Determines whether a module is the entry point for the running node process.
@ -19,13 +19,13 @@ import { fileURLToPath } from "url";
* ```
*/
export const moduleIsEntry = (moduleOrImportMetaUrl: NodeModule | string) => {
if (typeof moduleOrImportMetaUrl === "string") {
return process.argv[1] === fileURLToPath(moduleOrImportMetaUrl);
}
if (typeof moduleOrImportMetaUrl === "string") {
return process.argv[1] === fileURLToPath(moduleOrImportMetaUrl);
}
if (typeof require !== "undefined" && "exports" in moduleOrImportMetaUrl) {
return require.main === moduleOrImportMetaUrl;
}
if (typeof require !== "undefined" && "exports" in moduleOrImportMetaUrl) {
return require.main === moduleOrImportMetaUrl;
}
return false;
return false;
};

View file

@ -7,57 +7,57 @@ import type { Application } from "@prisma/client";
* @returns Whether the OAuth application is valid for the route
*/
export const checkIfOauthIsValid = (
application: Application,
routeScopes: string[]
application: Application,
routeScopes: string[],
) => {
if (routeScopes.length === 0) {
return true;
}
if (routeScopes.length === 0) {
return true;
}
const hasAllWriteScopes =
application.scopes.split(" ").includes("write:*") ||
application.scopes.split(" ").includes("write");
const hasAllWriteScopes =
application.scopes.split(" ").includes("write:*") ||
application.scopes.split(" ").includes("write");
const hasAllReadScopes =
application.scopes.split(" ").includes("read:*") ||
application.scopes.split(" ").includes("read");
const hasAllReadScopes =
application.scopes.split(" ").includes("read:*") ||
application.scopes.split(" ").includes("read");
if (hasAllWriteScopes && hasAllReadScopes) {
return true;
}
if (hasAllWriteScopes && hasAllReadScopes) {
return true;
}
let nonMatchedScopes = routeScopes;
let nonMatchedScopes = routeScopes;
if (hasAllWriteScopes) {
// Filter out all write scopes as valid
nonMatchedScopes = routeScopes.filter(
scope => !scope.startsWith("write:")
);
}
if (hasAllWriteScopes) {
// Filter out all write scopes as valid
nonMatchedScopes = routeScopes.filter(
(scope) => !scope.startsWith("write:"),
);
}
if (hasAllReadScopes) {
// Filter out all read scopes as valid
nonMatchedScopes = routeScopes.filter(
scope => !scope.startsWith("read:")
);
}
if (hasAllReadScopes) {
// Filter out all read scopes as valid
nonMatchedScopes = routeScopes.filter(
(scope) => !scope.startsWith("read:"),
);
}
// If there are still scopes left, check if they match
// If there are no scopes left, return true
if (nonMatchedScopes.length === 0) {
return true;
}
// If there are still scopes left, check if they match
// If there are no scopes left, return true
if (nonMatchedScopes.length === 0) {
return true;
}
// If there are scopes left, check if they match
if (
nonMatchedScopes.every(scope =>
application.scopes.split(" ").includes(scope)
)
) {
return true;
}
// If there are scopes left, check if they match
if (
nonMatchedScopes.every((scope) =>
application.scopes.split(" ").includes(scope),
)
) {
return true;
}
return false;
return false;
};
export const oauthCodeVerifiers: Record<string, string> = {};

View file

@ -5,54 +5,54 @@ import Redis from "ioredis";
import { createPrismaRedisCache } from "prisma-redis-middleware";
const cacheRedis = config.redis.cache.enabled
? new Redis({
host: config.redis.cache.host,
port: Number(config.redis.cache.port),
password: config.redis.cache.password,
db: Number(config.redis.cache.database),
})
: null;
? new Redis({
host: config.redis.cache.host,
port: Number(config.redis.cache.port),
password: config.redis.cache.password,
db: Number(config.redis.cache.database),
})
: null;
cacheRedis?.on("error", e => {
console.log(e);
cacheRedis?.on("error", (e) => {
console.log(e);
});
export { cacheRedis };
export const initializeRedisCache = async () => {
if (cacheRedis) {
// Test connection
try {
await cacheRedis.ping();
} catch (e) {
console.error(
`${chalk.red(``)} ${chalk.bold(
`Error while connecting to Redis`
)}`
);
throw e;
}
if (cacheRedis) {
// Test connection
try {
await cacheRedis.ping();
} catch (e) {
console.error(
`${chalk.red("✗")} ${chalk.bold(
"Error while connecting to Redis",
)}`,
);
throw e;
}
console.log(`${chalk.green(``)} ${chalk.bold(`Connected to Redis`)}`);
console.log(`${chalk.green("✓")} ${chalk.bold("Connected to Redis")}`);
const cacheMiddleware: Prisma.Middleware = createPrismaRedisCache({
storage: {
type: "redis",
options: {
client: cacheRedis,
invalidation: {
referencesTTL: 300,
},
},
},
cacheTime: 300,
onError: e => {
console.error(e);
},
});
const cacheMiddleware: Prisma.Middleware = createPrismaRedisCache({
storage: {
type: "redis",
options: {
client: cacheRedis,
invalidation: {
referencesTTL: 300,
},
},
},
cacheTime: 300,
onError: (e) => {
console.error(e);
},
});
return cacheMiddleware;
}
return cacheMiddleware;
}
return null;
return null;
};

View file

@ -2,54 +2,54 @@ import type { APActivity, APObject } from "activitypub-types";
import type { NodeObject } from "jsonld";
export const jsonResponse = (
data: object,
status = 200,
headers: Record<string, string> = {}
data: object,
status = 200,
headers: Record<string, string> = {},
) => {
return new Response(JSON.stringify(data), {
headers: {
"Content-Type": "application/json",
"X-Frame-Options": "DENY",
"X-Permitted-Cross-Domain-Policies": "none",
"Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Headers":
"Authorization,Content-Type,Idempotency-Key",
"Access-Control-Allow-Methods": "POST,PUT,DELETE,GET,PATCH,OPTIONS",
"Access-Control-Allow-Origin": "*",
"Access-Control-Expose-Headers":
"Link,X-RateLimit-Reset,X-RateLimit-Limit,X-RateLimit-Remaining,X-Request-Id,Idempotency-Key",
...headers,
},
status,
});
return new Response(JSON.stringify(data), {
headers: {
"Content-Type": "application/json",
"X-Frame-Options": "DENY",
"X-Permitted-Cross-Domain-Policies": "none",
"Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Headers":
"Authorization,Content-Type,Idempotency-Key",
"Access-Control-Allow-Methods": "POST,PUT,DELETE,GET,PATCH,OPTIONS",
"Access-Control-Allow-Origin": "*",
"Access-Control-Expose-Headers":
"Link,X-RateLimit-Reset,X-RateLimit-Limit,X-RateLimit-Remaining,X-Request-Id,Idempotency-Key",
...headers,
},
status,
});
};
export const xmlResponse = (data: string, status = 200) => {
return new Response(data, {
headers: {
"Content-Type": "application/xml",
},
status,
});
return new Response(data, {
headers: {
"Content-Type": "application/xml",
},
status,
});
};
export const jsonLdResponse = (
data: NodeObject | APActivity | APObject,
status = 200
data: NodeObject | APActivity | APObject,
status = 200,
) => {
return new Response(JSON.stringify(data), {
headers: {
"Content-Type": "application/activity+json",
},
status,
});
return new Response(JSON.stringify(data), {
headers: {
"Content-Type": "application/activity+json",
},
status,
});
};
export const errorResponse = (error: string, status = 500) => {
return jsonResponse(
{
error: error,
},
status
);
return jsonResponse(
{
error: error,
},
status,
);
};

View file

@ -2,73 +2,73 @@ import { config } from "config-manager";
import { sanitize } from "isomorphic-dompurify";
export const sanitizeHtml = async (html: string) => {
const sanitizedHtml = sanitize(html, {
ALLOWED_TAGS: [
"a",
"p",
"br",
"b",
"i",
"em",
"strong",
"del",
"code",
"u",
"pre",
"ul",
"ol",
"li",
"blockquote",
],
ALLOWED_ATTR: [
"href",
"target",
"title",
"rel",
"class",
"start",
"reversed",
"value",
],
ALLOWED_URI_REGEXP: new RegExp(
`/^(?:(?:${config.validation.url_scheme_whitelist.join(
"|"
)}):|[^a-z]|[a-z+.-]+(?:[^a-z+.-:]|$))/i`
),
USE_PROFILES: {
mathMl: true,
},
});
const sanitizedHtml = sanitize(html, {
ALLOWED_TAGS: [
"a",
"p",
"br",
"b",
"i",
"em",
"strong",
"del",
"code",
"u",
"pre",
"ul",
"ol",
"li",
"blockquote",
],
ALLOWED_ATTR: [
"href",
"target",
"title",
"rel",
"class",
"start",
"reversed",
"value",
],
ALLOWED_URI_REGEXP: new RegExp(
`/^(?:(?:${config.validation.url_scheme_whitelist.join(
"|",
)}):|[^a-z]|[a-z+.-]+(?:[^a-z+.-:]|$))/i`,
),
USE_PROFILES: {
mathMl: true,
},
});
// Check text to only allow h-*, p-*, u-*, dt-*, e-*, mention, hashtag, ellipsis, invisible classes
const allowedClasses = [
"h-",
"p-",
"u-",
"dt-",
"e-",
"mention",
"hashtag",
"ellipsis",
"invisible",
];
// Check text to only allow h-*, p-*, u-*, dt-*, e-*, mention, hashtag, ellipsis, invisible classes
const allowedClasses = [
"h-",
"p-",
"u-",
"dt-",
"e-",
"mention",
"hashtag",
"ellipsis",
"invisible",
];
return await new HTMLRewriter()
.on("*[class]", {
element(element) {
const classes = element.getAttribute("class")?.split(" ") ?? [];
return await new HTMLRewriter()
.on("*[class]", {
element(element) {
const classes = element.getAttribute("class")?.split(" ") ?? [];
classes.forEach(className => {
if (
!allowedClasses.some(allowedClass =>
className.startsWith(allowedClass)
)
) {
element.removeAttribute("class");
}
});
},
})
.transform(new Response(sanitizedHtml))
.text();
for (const className of classes) {
if (
!allowedClasses.some((allowedClass) =>
className.startsWith(allowedClass),
)
) {
element.removeAttribute("class");
}
}
},
})
.transform(new Response(sanitizedHtml))
.text();
};

View file

@ -1,20 +1,20 @@
import { join } from "path";
import { exists, mkdir, writeFile, readFile } from "fs/promises";
import { exists, mkdir, readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
export const writeToTempDirectory = async (filename: string, data: string) => {
const tempDir = join("/tmp/", "lysand");
if (!(await exists(tempDir))) await mkdir(tempDir);
const tempDir = join("/tmp/", "lysand");
if (!(await exists(tempDir))) await mkdir(tempDir);
const tempFile = join(tempDir, filename);
await writeFile(tempFile, data);
const tempFile = join(tempDir, filename);
await writeFile(tempFile, data);
return tempFile;
return tempFile;
};
export const readFromTempDirectory = async (filename: string) => {
const tempDir = join("/tmp/", "lysand");
if (!(await exists(tempDir))) await mkdir(tempDir);
const tempDir = join("/tmp/", "lysand");
if (!(await exists(tempDir))) await mkdir(tempDir);
const tempFile = join(tempDir, filename);
return readFile(tempFile, "utf-8");
const tempFile = join(tempDir, filename);
return readFile(tempFile, "utf-8");
};

File diff suppressed because it is too large Load diff