refactor: 🚚 Organize code into sub-packages, instead of a single large package

This commit is contained in:
Jesse Wierzbinski 2025-06-15 04:38:20 +02:00
parent 79742f47dc
commit a6d3ebbeef
No known key found for this signature in database
366 changed files with 942 additions and 833 deletions

View file

@ -1,785 +0,0 @@
import { type BunFile, env, file } from "bun";
import ISO6391 from "iso-639-1";
import { types as mimeTypes } from "mime-types";
import { generateVAPIDKeys } from "web-push";
import { z } from "zod";
import { ProxiableUrl } from "~/classes/media/url.ts";
import { RolePermission } from "~/packages/client/schemas/permissions.ts";
export const DEFAULT_ROLES = [
RolePermission.ManageOwnNotes,
RolePermission.ViewNotes,
RolePermission.ViewNoteLikes,
RolePermission.ViewNoteBoosts,
RolePermission.ManageOwnAccount,
RolePermission.ViewAccountFollows,
RolePermission.ManageOwnLikes,
RolePermission.ManageOwnBoosts,
RolePermission.ViewAccounts,
RolePermission.ManageOwnEmojis,
RolePermission.ViewReactions,
RolePermission.ManageOwnReactions,
RolePermission.ViewEmojis,
RolePermission.ManageOwnMedia,
RolePermission.ManageOwnBlocks,
RolePermission.ManageOwnFilters,
RolePermission.ManageOwnMutes,
RolePermission.ManageOwnReports,
RolePermission.ManageOwnSettings,
RolePermission.ManageOwnNotifications,
RolePermission.ManageOwnFollows,
RolePermission.ManageOwnApps,
RolePermission.Search,
RolePermission.UsePushNotifications,
RolePermission.ViewPublicTimelines,
RolePermission.ViewPrivateTimelines,
RolePermission.OAuth,
];
export const ADMIN_ROLES = [
...DEFAULT_ROLES,
RolePermission.ManageNotes,
RolePermission.ManageAccounts,
RolePermission.ManageLikes,
RolePermission.ManageBoosts,
RolePermission.ManageEmojis,
RolePermission.ManageReactions,
RolePermission.ManageMedia,
RolePermission.ManageBlocks,
RolePermission.ManageFilters,
RolePermission.ManageMutes,
RolePermission.ManageReports,
RolePermission.ManageSettings,
RolePermission.ManageRoles,
RolePermission.ManageNotifications,
RolePermission.ManageFollows,
RolePermission.Impersonate,
RolePermission.IgnoreRateLimits,
RolePermission.ManageInstance,
RolePermission.ManageInstanceFederation,
RolePermission.ManageInstanceSettings,
];
export enum MediaBackendType {
Local = "local",
S3 = "s3",
}
// Need to declare this here instead of importing it otherwise we get cyclical import errors
export const iso631 = z.enum(ISO6391.getAllCodes() as [string, ...string[]]);
export const urlPath = z
.string()
.trim()
.min(1)
// Remove trailing slashes, but keep the root slash
.transform((arg) => (arg === "/" ? arg : arg.replace(/\/$/, "")));
export const url = z
.string()
.trim()
.min(1)
.refine((arg) => URL.canParse(arg), "Invalid url")
.transform((arg) => new ProxiableUrl(arg));
export const unixPort = z
.number()
.int()
.min(1)
.max(2 ** 16 - 1);
const fileFromPathString = (text: string): BunFile => file(text.slice(5));
// Not using .ip() because we allow CIDR ranges and wildcards and such
const ip = z
.string()
.describe("An IPv6/v4 address or CIDR range. Wildcards are also allowed");
const regex = z
.string()
.transform((arg) => new RegExp(arg))
.describe("JavaScript regular expression");
export const sensitiveString = z
.string()
.refine(
(text) =>
text.startsWith("PATH:") ? fileFromPathString(text).exists() : true,
(text) => ({
message: `Path ${
fileFromPathString(text).name
} does not exist, is a directory or is not accessible`,
}),
)
.transform((text) =>
text.startsWith("PATH:") ? fileFromPathString(text).text() : text,
)
.describe("You can use PATH:/path/to/file to load this value from a file");
export const filePathString = z
.string()
.transform((s) => file(s))
.refine(
(file) => file.exists(),
(file) => ({
message: `Path ${file.name} does not exist, is a directory or is not accessible`,
}),
)
.transform(async (file) => ({
content: await file.text(),
file,
}))
.describe("This value must be a file path");
export const keyPair = z
.strictObject({
public: sensitiveString.optional(),
private: sensitiveString.optional(),
})
.optional()
.transform(async (k, ctx) => {
if (!(k?.public && k?.private)) {
const keys = await crypto.subtle.generateKey("Ed25519", true, [
"sign",
"verify",
]);
const privateKey = Buffer.from(
await crypto.subtle.exportKey("pkcs8", keys.privateKey),
).toString("base64");
const publicKey = Buffer.from(
await crypto.subtle.exportKey("spki", keys.publicKey),
).toString("base64");
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: `Public and private keys are not set. Here are generated keys for you to copy.\n\nPublic: ${publicKey}\nPrivate: ${privateKey}`,
});
return z.NEVER;
}
let publicKey: CryptoKey;
let privateKey: CryptoKey;
try {
publicKey = await crypto.subtle.importKey(
"spki",
Buffer.from(k.public, "base64"),
"Ed25519",
true,
["verify"],
);
} catch {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Public key is invalid",
});
return z.NEVER;
}
try {
privateKey = await crypto.subtle.importKey(
"pkcs8",
Buffer.from(k.private, "base64"),
"Ed25519",
true,
["sign"],
);
} catch {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Private key is invalid",
});
return z.NEVER;
}
return {
public: publicKey,
private: privateKey,
};
});
export const vapidKeyPair = z
.strictObject({
public: sensitiveString.optional(),
private: sensitiveString.optional(),
})
.optional()
.transform((k, ctx) => {
if (!(k?.public && k?.private)) {
const keys = generateVAPIDKeys();
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: `VAPID keys are not set. Here are generated keys for you to copy.\n\nPublic: ${keys.publicKey}\nPrivate: ${keys.privateKey}`,
});
return z.NEVER;
}
return k;
});
export const hmacKey = sensitiveString.transform(async (text, ctx) => {
if (!text) {
const key = await crypto.subtle.generateKey(
{
name: "HMAC",
hash: "SHA-256",
},
true,
["sign"],
);
const exported = await crypto.subtle.exportKey("raw", key);
const base64 = Buffer.from(exported).toString("base64");
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: `HMAC key is not set. Here is a generated key for you to copy: ${base64}`,
});
return z.NEVER;
}
try {
await crypto.subtle.importKey(
"raw",
Buffer.from(text, "base64"),
{
name: "HMAC",
hash: "SHA-256",
},
true,
["sign"],
);
} catch {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "HMAC key is invalid",
});
return z.NEVER;
}
return text;
});
export const ConfigSchema = z
.strictObject({
postgres: z
.strictObject({
host: z.string().min(1).default("localhost"),
port: unixPort.default(5432),
username: z.string().min(1),
password: sensitiveString.default(""),
database: z.string().min(1).default("versia"),
replicas: z
.array(
z.strictObject({
host: z.string().min(1),
port: unixPort.default(5432),
username: z.string().min(1),
password: sensitiveString.default(""),
database: z.string().min(1).default("versia"),
}),
)
.describe("Additional read-only replicas")
.default([]),
})
.describe("PostgreSQL database configuration"),
redis: z
.strictObject({
queue: z
.strictObject({
host: z.string().min(1).default("localhost"),
port: unixPort.default(6379),
password: sensitiveString.default(""),
database: z.number().int().default(0),
})
.describe("A Redis database used for managing queues."),
cache: z
.strictObject({
host: z.string().min(1).default("localhost"),
port: unixPort.default(6379),
password: sensitiveString.default(""),
database: z.number().int().default(1),
})
.optional()
.describe(
"A Redis database used for caching SQL queries. Optional.",
),
})
.describe("Redis configuration. Used for queues and caching."),
search: z
.strictObject({
enabled: z
.boolean()
.default(false)
.describe("Enable indexing and searching?"),
sonic: z
.strictObject({
host: z.string().min(1).default("localhost"),
port: unixPort.default(7700),
password: sensitiveString,
})
.describe("Sonic database configuration")
.optional(),
})
.refine(
(o) => !o.enabled || o.sonic,
"When search is enabled, Sonic configuration must be set",
)
.describe("Search and indexing configuration"),
registration: z.strictObject({
allow: z
.boolean()
.default(true)
.describe("Can users sign up freely?"),
require_approval: z.boolean().default(false),
message: z
.string()
.optional()
.describe(
"Message to show to users when registration is disabled",
),
}),
http: z.strictObject({
base_url: url.describe(
"URL that the instance will be accessible at",
),
bind: z.string().min(1).default("0.0.0.0"),
bind_port: unixPort.default(8080),
banned_ips: z.array(ip).default([]),
banned_user_agents: z.array(regex).default([]),
proxy_address: url
.optional()
.describe("URL to an eventual HTTP proxy")
.refine(async (url) => {
if (!url) {
return true;
}
// Test the proxy
const response = await fetch(
"https://api.ipify.org?format=json",
{
proxy: url.origin,
},
);
return response.ok;
}, "The HTTP proxy address is not reachable"),
tls: z
.strictObject({
key: filePathString,
cert: filePathString,
passphrase: sensitiveString.optional(),
ca: filePathString.optional(),
})
.describe(
"TLS configuration. You should probably be using a reverse proxy instead of this",
)
.optional(),
}),
frontend: z.strictObject({
enabled: z.boolean().default(true),
path: z.string().default(env.VERSIA_FRONTEND_PATH || "frontend"),
routes: z.strictObject({
home: urlPath.default("/"),
login: urlPath.default("/oauth/authorize"),
consent: urlPath.default("/oauth/consent"),
register: urlPath.default("/register"),
password_reset: urlPath.default("/oauth/reset"),
}),
settings: z.record(z.string(), z.any()).default({}),
}),
email: z
.strictObject({
send_emails: z.boolean().default(false),
smtp: z
.strictObject({
server: z.string().min(1),
port: unixPort.default(465),
username: z.string().min(1),
password: sensitiveString.optional(),
tls: z.boolean().default(true),
})
.optional(),
})
.refine(
(o) => o.send_emails || !o.smtp,
"When send_emails is enabled, SMTP configuration must be set",
),
media: z.strictObject({
backend: z
.nativeEnum(MediaBackendType)
.default(MediaBackendType.Local),
uploads_path: z.string().min(1).default("uploads"),
conversion: z.strictObject({
convert_images: z.boolean().default(false),
convert_to: z.string().default("image/webp"),
convert_vectors: z.boolean().default(false),
}),
}),
s3: z
.strictObject({
endpoint: url,
access_key: sensitiveString,
secret_access_key: sensitiveString,
region: z.string().optional(),
bucket_name: z.string().optional(),
public_url: url.describe(
"Public URL that uploaded media will be accessible at",
),
path: z.string().optional(),
path_style: z.boolean().default(true),
})
.optional(),
validation: z.strictObject({
accounts: z.strictObject({
max_displayname_characters: z
.number()
.int()
.nonnegative()
.default(50),
max_username_characters: z
.number()
.int()
.nonnegative()
.default(30),
max_bio_characters: z
.number()
.int()
.nonnegative()
.default(5000),
max_avatar_bytes: z
.number()
.int()
.nonnegative()
.default(5_000_000),
max_header_bytes: z
.number()
.int()
.nonnegative()
.default(5_000_000),
disallowed_usernames: z
.array(regex)
.default([
"well-known",
"about",
"activities",
"api",
"auth",
"dev",
"inbox",
"internal",
"main",
"media",
"nodeinfo",
"notice",
"oauth",
"objects",
"proxy",
"push",
"registration",
"relay",
"settings",
"status",
"tag",
"users",
"web",
"search",
"mfa",
]),
max_field_count: z.number().int().default(10),
max_field_name_characters: z.number().int().default(1000),
max_field_value_characters: z.number().int().default(1000),
max_pinned_notes: z.number().int().default(20),
}),
notes: z.strictObject({
max_characters: z.number().int().nonnegative().default(5000),
allowed_url_schemes: z
.array(z.string())
.default([
"http",
"https",
"ftp",
"dat",
"dweb",
"gopher",
"hyper",
"ipfs",
"ipns",
"irc",
"xmpp",
"ircs",
"magnet",
"mailto",
"mumble",
"ssb",
"gemini",
]),
max_attachments: z.number().int().default(16),
}),
media: z.strictObject({
max_bytes: z.number().int().nonnegative().default(40_000_000),
max_description_characters: z
.number()
.int()
.nonnegative()
.default(1000),
allowed_mime_types: z
.array(z.string())
.default(Object.values(mimeTypes)),
}),
emojis: z.strictObject({
max_bytes: z.number().int().nonnegative().default(1_000_000),
max_shortcode_characters: z
.number()
.int()
.nonnegative()
.default(100),
max_description_characters: z
.number()
.int()
.nonnegative()
.default(1_000),
}),
polls: z.strictObject({
max_options: z.number().int().nonnegative().default(20),
max_option_characters: z
.number()
.int()
.nonnegative()
.default(500),
min_duration_seconds: z
.number()
.int()
.nonnegative()
.default(60),
max_duration_seconds: z
.number()
.int()
.nonnegative()
.default(100 * 24 * 60 * 60),
}),
emails: z.strictObject({
disallow_tempmail: z
.boolean()
.default(false)
.describe("Blocks over 10,000 common tempmail domains"),
disallowed_domains: z.array(regex).default([]),
}),
challenges: z
.strictObject({
difficulty: z.number().int().positive().default(50000),
expiration: z.number().int().positive().default(300),
key: hmacKey,
})
.optional()
.describe(
"CAPTCHA challenge configuration. Challenges are disabled if not provided.",
),
filters: z
.strictObject({
note_content: z.array(regex).default([]),
emoji_shortcode: z.array(regex).default([]),
username: z.array(regex).default([]),
displayname: z.array(regex).default([]),
bio: z.array(regex).default([]),
})
.describe(
"Block content that matches these regular expressions",
),
}),
notifications: z.strictObject({
push: z
.strictObject({
vapid_keys: vapidKeyPair,
subject: z
.string()
.optional()
.describe(
"Subject field embedded in the push notification. Example: 'mailto:contact@example.com'",
),
})
.describe(
"Web Push Notifications configuration. Leave out to disable.",
)
.optional(),
}),
defaults: z.strictObject({
visibility: z
.enum(["public", "unlisted", "private", "direct"])
.default("public"),
language: z.string().default("en"),
avatar: url.optional(),
header: url.optional(),
placeholder_style: z
.string()
.default("thumbs")
.describe("A style name from https://www.dicebear.com/styles"),
}),
federation: z.strictObject({
blocked: z.array(z.string()).default([]),
followers_only: z.array(z.string()).default([]),
discard: z.strictObject({
reports: z.array(z.string()).default([]),
deletes: z.array(z.string()).default([]),
updates: z.array(z.string()).default([]),
media: z.array(z.string()).default([]),
follows: z.array(z.string()).default([]),
likes: z.array(z.string()).default([]),
reactions: z.array(z.string()).default([]),
banners: z.array(z.string()).default([]),
avatars: z.array(z.string()).default([]),
}),
bridge: z
.strictObject({
software: z.enum(["versia-ap"]).or(z.string()),
allowed_ips: z.array(ip).default([]),
token: sensitiveString,
url,
})
.optional(),
}),
queues: z.record(
z.enum(["delivery", "inbox", "fetch", "push", "media"]),
z.strictObject({
remove_after_complete_seconds: z
.number()
.int()
.nonnegative()
// 1 year
.default(60 * 60 * 24 * 365),
remove_after_failure_seconds: z
.number()
.int()
.nonnegative()
// 1 year
.default(60 * 60 * 24 * 365),
}),
),
instance: z.strictObject({
name: z.string().min(1).default("Versia Server"),
description: z.string().min(1).default("A Versia instance"),
extended_description_path: filePathString.optional(),
tos_path: filePathString.optional(),
privacy_policy_path: filePathString.optional(),
branding: z.strictObject({
logo: url.optional(),
banner: url.optional(),
}),
languages: z
.array(iso631)
.describe("Primary instance languages. ISO 639-1 codes."),
contact: z.strictObject({
email: z
.string()
.email()
.describe("Email to contact the instance administration"),
}),
rules: z
.array(
z.strictObject({
text: z
.string()
.min(1)
.max(255)
.describe("Short description of the rule"),
hint: z
.string()
.min(1)
.max(4096)
.optional()
.describe(
"Longer version of the rule with additional information",
),
}),
)
.default([]),
keys: keyPair,
}),
permissions: z.strictObject({
anonymous: z
.array(z.nativeEnum(RolePermission))
.default(DEFAULT_ROLES),
default: z
.array(z.nativeEnum(RolePermission))
.default(DEFAULT_ROLES),
admin: z.array(z.nativeEnum(RolePermission)).default(ADMIN_ROLES),
}),
logging: z.strictObject({
types: z.record(
z.enum([
"requests",
"responses",
"requests_content",
"filters",
]),
z
.boolean()
.default(false)
.or(
z.strictObject({
level: z
.enum([
"debug",
"info",
"warning",
"error",
"fatal",
])
.default("info"),
log_file_path: z.string().optional(),
}),
),
),
log_level: z
.enum(["debug", "info", "warning", "error", "fatal"])
.default("info"),
sentry: z
.strictObject({
dsn: url,
debug: z.boolean().default(false),
sample_rate: z.number().min(0).max(1.0).default(1.0),
traces_sample_rate: z.number().min(0).max(1.0).default(1.0),
trace_propagation_targets: z.array(z.string()).default([]),
max_breadcrumbs: z.number().default(100),
environment: z.string().optional(),
})
.optional(),
log_file_path: z.string().default("logs/versia.log"),
}),
debug: z
.strictObject({
federation: z.boolean().default(false),
})
.optional(),
plugins: z.strictObject({
autoload: z.boolean().default(true),
overrides: z
.strictObject({
enabled: z.array(z.string()).default([]),
disabled: z.array(z.string()).default([]),
})
.refine(
// Only one of enabled or disabled can be set
(arg) =>
arg.enabled.length === 0 || arg.disabled.length === 0,
"Only one of enabled or disabled can be set",
),
config: z.record(z.string(), z.any()).optional(),
}),
})
.refine(
// If media backend is S3, s3 config must be set
(arg) => arg.media.backend === MediaBackendType.Local || !!arg.s3,
"When media backend is S3, S3 configuration must be set",
);

View file

@ -1,12 +0,0 @@
import { zodToJsonSchema } from "zod-to-json-schema";
await import("~/config.ts");
// This is an awkward way to avoid import cycles for some reason
await (async () => {
const { ConfigSchema } = await import("./schema.ts");
const jsonSchema = zodToJsonSchema(ConfigSchema, {});
console.write(`${JSON.stringify(jsonSchema, null, 4)}\n`);
})();

View file

@ -1,164 +0,0 @@
import type {
Application as ApplicationSchema,
CredentialApplication,
} from "@versia/client/schemas";
import { db, Token } from "@versia/kit/db";
import { Applications } from "@versia/kit/tables";
import {
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import type { z } from "zod";
import { BaseInterface } from "./base.ts";
type ApplicationType = InferSelectModel<typeof Applications>;
export class Application extends BaseInterface<typeof Applications> {
public static $type: ApplicationType;
public async reload(): Promise<void> {
const reloaded = await Application.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload application");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Application | null> {
if (!id) {
return null;
}
return await Application.fromSql(eq(Applications.id, id));
}
public static async fromIds(ids: string[]): Promise<Application[]> {
return await Application.manyFromSql(inArray(Applications.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Applications.id),
): Promise<Application | null> {
const found = await db.query.Applications.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new Application(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Applications.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Applications.findMany>[0],
): Promise<Application[]> {
const found = await db.query.Applications.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new Application(s));
}
public static async getFromToken(
token: string,
): Promise<Application | null> {
const result = await Token.fromAccessToken(token);
return result?.data.application
? new Application(result.data.application)
: null;
}
public static fromClientId(clientId: string): Promise<Application | null> {
return Application.fromSql(eq(Applications.clientId, clientId));
}
public async update(
newApplication: Partial<ApplicationType>,
): Promise<ApplicationType> {
await db
.update(Applications)
.set(newApplication)
.where(eq(Applications.id, this.id));
const updated = await Application.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update application");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<ApplicationType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Applications).where(inArray(Applications.id, ids));
} else {
await db.delete(Applications).where(eq(Applications.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Applications>,
): Promise<Application> {
const inserted = (
await db.insert(Applications).values(data).returning()
)[0];
const application = await Application.fromId(inserted.id);
if (!application) {
throw new Error("Failed to insert application");
}
return application;
}
public get id(): string {
return this.data.id;
}
public toApi(): z.infer<typeof ApplicationSchema> {
return {
name: this.data.name,
website: this.data.website,
scopes: this.data.scopes.split(" "),
redirect_uri: this.data.redirectUri,
redirect_uris: this.data.redirectUri.split("\n"),
};
}
public toApiCredential(): z.infer<typeof CredentialApplication> {
return {
name: this.data.name,
website: this.data.website,
client_id: this.data.clientId,
client_secret: this.data.secret,
client_secret_expires_at: "0",
scopes: this.data.scopes.split(" "),
redirect_uri: this.data.redirectUri,
redirect_uris: this.data.redirectUri.split("\n"),
};
}
}

View file

@ -1,54 +0,0 @@
import type { InferModelFromColumns, InferSelectModel } from "drizzle-orm";
import type { PgTableWithColumns } from "drizzle-orm/pg-core";
/**
* BaseInterface is an abstract class that provides a common interface for all models.
* It includes methods for saving, deleting, updating, and reloading data.
*
* @template Table - The type of the table with columns.
* @template Columns - The type of the columns inferred from the table.
*/
export abstract class BaseInterface<
// biome-ignore lint/suspicious/noExplicitAny: This is just an extended interface
Table extends PgTableWithColumns<any>,
Columns = InferModelFromColumns<Table["_"]["columns"]>,
> {
/**
* Constructs a new instance of the BaseInterface.
*
* @param data - The data for the model.
*/
public constructor(public data: Columns) {}
/**
* Saves the current state of the model to the database.
*
* @returns A promise that resolves with the saved model.
*/
public abstract save(): Promise<Columns>;
/**
* Deletes the model from the database.
*
* @param ids - The ids of the models to delete. If not provided, the current model will be deleted.
* @returns A promise that resolves when the deletion is complete.
*/
public abstract delete(ids?: string[]): Promise<void>;
/**
* Updates the model with new data.
*
* @param newData - The new data for the model.
* @returns A promise that resolves with the updated model.
*/
public abstract update(
newData: Partial<InferSelectModel<Table>>,
): Promise<Columns>;
/**
* Reloads the model from the database.
*
* @returns A promise that resolves when the reloading is complete.
*/
public abstract reload(): Promise<void>;
}

View file

@ -1,238 +0,0 @@
import {
type CustomEmoji,
emojiWithColonsRegex,
emojiWithIdentifiersRegex,
} from "@versia/client/schemas";
import { db, type Instance, Media } from "@versia/kit/db";
import { Emojis, type Instances, type Medias } from "@versia/kit/tables";
import { randomUUIDv7 } from "bun";
import {
and,
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
isNull,
type SQL,
} from "drizzle-orm";
import type { z } from "zod";
import * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import type { ImageContentFormatSchema } from "~/packages/sdk/schemas/index.ts";
import { BaseInterface } from "./base.ts";
type EmojiType = InferSelectModel<typeof Emojis> & {
media: InferSelectModel<typeof Medias>;
instance: InferSelectModel<typeof Instances> | null;
};
export class Emoji extends BaseInterface<typeof Emojis, EmojiType> {
public static $type: EmojiType;
public media: Media;
public constructor(data: EmojiType) {
super(data);
this.media = new Media(data.media);
}
public async reload(): Promise<void> {
const reloaded = await Emoji.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload emoji");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Emoji | null> {
if (!id) {
return null;
}
return await Emoji.fromSql(eq(Emojis.id, id));
}
public static async fromIds(ids: string[]): Promise<Emoji[]> {
return await Emoji.manyFromSql(inArray(Emojis.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Emojis.id),
): Promise<Emoji | null> {
const found = await db.query.Emojis.findFirst({
where: sql,
orderBy,
with: {
instance: true,
media: true,
},
});
if (!found) {
return null;
}
return new Emoji(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Emojis.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Emojis.findMany>[0],
): Promise<Emoji[]> {
const found = await db.query.Emojis.findMany({
where: sql,
orderBy,
limit,
offset,
with: { ...extra?.with, instance: true, media: true },
});
return found.map((s) => new Emoji(s));
}
public async update(newEmoji: Partial<EmojiType>): Promise<EmojiType> {
await db.update(Emojis).set(newEmoji).where(eq(Emojis.id, this.id));
const updated = await Emoji.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update emoji");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<EmojiType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Emojis).where(inArray(Emojis.id, ids));
} else {
await db.delete(Emojis).where(eq(Emojis.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Emojis>,
): Promise<Emoji> {
const inserted = (await db.insert(Emojis).values(data).returning())[0];
const emoji = await Emoji.fromId(inserted.id);
if (!emoji) {
throw new Error("Failed to insert emoji");
}
return emoji;
}
public static async fetchFromRemote(
emojiToFetch: {
name: string;
url: z.infer<typeof ImageContentFormatSchema>;
},
instance: Instance,
): Promise<Emoji> {
const existingEmoji = await Emoji.fromSql(
and(
eq(Emojis.shortcode, emojiToFetch.name),
eq(Emojis.instanceId, instance.id),
),
);
if (existingEmoji) {
return existingEmoji;
}
return await Emoji.fromVersia(emojiToFetch, instance);
}
public get id(): string {
return this.data.id;
}
/**
* Parse emojis from text
*
* @param text The text to parse
* @returns An array of emojis
*/
public static parseFromText(text: string): Promise<Emoji[]> {
const matches = text.match(emojiWithColonsRegex);
if (!matches || matches.length === 0) {
return Promise.resolve([]);
}
return Emoji.manyFromSql(
and(
inArray(
Emojis.shortcode,
matches.map((match) => match.replace(/:/g, "")),
),
isNull(Emojis.instanceId),
),
);
}
public toApi(): z.infer<typeof CustomEmoji> {
return {
id: this.id,
shortcode: this.data.shortcode,
static_url: this.media.getUrl().proxied,
url: this.media.getUrl().proxied,
visible_in_picker: this.data.visibleInPicker,
category: this.data.category,
global: this.data.ownerId === null,
description:
this.media.data.content[this.media.getPreferredMimeType()]
.description ?? null,
};
}
public toVersia(): {
name: string;
url: z.infer<typeof ImageContentFormatSchema>;
} {
return {
name: `:${this.data.shortcode}:`,
url: this.media.toVersia().data as z.infer<
typeof ImageContentFormatSchema
>,
};
}
public static async fromVersia(
emoji: {
name: string;
url: z.infer<typeof ImageContentFormatSchema>;
},
instance: Instance,
): Promise<Emoji> {
// Extracts the shortcode from the emoji name (e.g. :shortcode: -> shortcode)
const shortcode = [...emoji.name.matchAll(emojiWithIdentifiersRegex)][0]
.groups.shortcode;
if (!shortcode) {
throw new Error("Could not extract shortcode from emoji name");
}
const media = await Media.fromVersia(
new VersiaEntities.ImageContentFormat(emoji.url),
);
return Emoji.insert({
id: randomUUIDv7(),
shortcode,
mediaId: media.id,
visibleInPicker: true,
instanceId: instance.id,
});
}
}

View file

@ -1,372 +0,0 @@
import { getLogger } from "@logtape/logtape";
import { db } from "@versia/kit/db";
import { Instances } from "@versia/kit/tables";
import { randomUUIDv7 } from "bun";
import chalk from "chalk";
import {
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import { config } from "~/config.ts";
import * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import { ApiError } from "../errors/api-error.ts";
import { BaseInterface } from "./base.ts";
import { User } from "./user.ts";
type InstanceType = InferSelectModel<typeof Instances>;
export class Instance extends BaseInterface<typeof Instances> {
public static $type: InstanceType;
public async reload(): Promise<void> {
const reloaded = await Instance.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload instance");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Instance | null> {
if (!id) {
return null;
}
return await Instance.fromSql(eq(Instances.id, id));
}
public static async fromIds(ids: string[]): Promise<Instance[]> {
return await Instance.manyFromSql(inArray(Instances.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Instances.id),
): Promise<Instance | null> {
const found = await db.query.Instances.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new Instance(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Instances.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Instances.findMany>[0],
): Promise<Instance[]> {
const found = await db.query.Instances.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new Instance(s));
}
public async update(
newInstance: Partial<InstanceType>,
): Promise<InstanceType> {
await db
.update(Instances)
.set(newInstance)
.where(eq(Instances.id, this.id));
const updated = await Instance.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update instance");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<InstanceType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Instances).where(inArray(Instances.id, ids));
} else {
await db.delete(Instances).where(eq(Instances.id, this.id));
}
}
public static async fromUser(user: User): Promise<Instance | null> {
if (!user.data.instanceId) {
return null;
}
return await Instance.fromId(user.data.instanceId);
}
public static async insert(
data: InferInsertModel<typeof Instances>,
): Promise<Instance> {
const inserted = (
await db.insert(Instances).values(data).returning()
)[0];
const instance = await Instance.fromId(inserted.id);
if (!instance) {
throw new Error("Failed to insert instance");
}
return instance;
}
public get id(): string {
return this.data.id;
}
public static async fetchMetadata(url: URL): Promise<{
metadata: VersiaEntities.InstanceMetadata;
protocol: "versia" | "activitypub";
}> {
const origin = new URL(url).origin;
const wellKnownUrl = new URL("/.well-known/versia", origin);
try {
const metadata = await User.federationRequester.fetchEntity(
wellKnownUrl,
VersiaEntities.InstanceMetadata,
);
return { metadata, protocol: "versia" };
} catch {
// If the server doesn't have a Versia well-known endpoint, it's not a Versia instance
// Try to resolve ActivityPub metadata instead
const data = await Instance.fetchActivityPubMetadata(url);
if (!data) {
throw new ApiError(
404,
`Instance at ${origin} is not reachable or does not exist`,
);
}
return {
metadata: data,
protocol: "activitypub",
};
}
}
private static async fetchActivityPubMetadata(
url: URL,
): Promise<VersiaEntities.InstanceMetadata | null> {
const origin = new URL(url).origin;
const wellKnownUrl = new URL("/.well-known/nodeinfo", origin);
// Go to endpoint, then follow the links to the actual metadata
const logger = getLogger(["federation", "resolvers"]);
try {
const { json, ok, status } = await fetch(wellKnownUrl, {
// @ts-expect-error Bun extension
proxy: config.http.proxy_address,
});
if (!ok) {
logger.error`Failed to fetch ActivityPub metadata for instance ${chalk.bold(
origin,
)} - HTTP ${status}`;
return null;
}
const wellKnown = (await json()) as {
links: { rel: string; href: string }[];
};
if (!wellKnown.links) {
logger.error`Failed to fetch ActivityPub metadata for instance ${chalk.bold(
origin,
)} - No links found`;
return null;
}
const metadataUrl = wellKnown.links.find(
(link: { rel: string }) =>
link.rel ===
"http://nodeinfo.diaspora.software/ns/schema/2.0",
);
if (!metadataUrl) {
logger.error`Failed to fetch ActivityPub metadata for instance ${chalk.bold(
origin,
)} - No metadata URL found`;
return null;
}
const {
json: json2,
ok: ok2,
status: status2,
} = await fetch(metadataUrl.href, {
// @ts-expect-error Bun extension
proxy: config.http.proxy_address,
});
if (!ok2) {
logger.error`Failed to fetch ActivityPub metadata for instance ${chalk.bold(
origin,
)} - HTTP ${status2}`;
return null;
}
const metadata = (await json2()) as {
metadata: {
nodeName?: string;
title?: string;
nodeDescription?: string;
description?: string;
};
software: { version: string };
};
return new VersiaEntities.InstanceMetadata({
name:
metadata.metadata.nodeName || metadata.metadata.title || "",
description:
metadata.metadata.nodeDescription ||
metadata.metadata.description,
type: "InstanceMetadata",
software: {
name: "Unknown ActivityPub software",
version: metadata.software.version,
},
created_at: new Date().toISOString(),
public_key: {
key: "",
algorithm: "ed25519",
},
host: new URL(url).host,
compatibility: {
extensions: [],
versions: [],
},
});
} catch (error) {
logger.error`Failed to fetch ActivityPub metadata for instance ${chalk.bold(
origin,
)} - Error! ${error}`;
return null;
}
}
public static resolveFromHost(host: string): Promise<Instance> {
if (host.startsWith("http")) {
const url = new URL(host);
return Instance.resolve(url);
}
const url = new URL(`https://${host}`);
return Instance.resolve(url);
}
public static async resolve(url: URL): Promise<Instance> {
const host = url.host;
const existingInstance = await Instance.fromSql(
eq(Instances.baseUrl, host),
);
if (existingInstance) {
return existingInstance;
}
const output = await Instance.fetchMetadata(url);
const { metadata, protocol } = output;
return Instance.insert({
id: randomUUIDv7(),
baseUrl: host,
name: metadata.data.name,
version: metadata.data.software.version,
logo: metadata.data.logo,
protocol,
publicKey: metadata.data.public_key,
inbox: metadata.data.shared_inbox ?? null,
extensions: metadata.data.extensions ?? null,
});
}
public async updateFromRemote(): Promise<Instance> {
const logger = getLogger(["federation", "resolvers"]);
const output = await Instance.fetchMetadata(
new URL(`https://${this.data.baseUrl}`),
);
if (!output) {
logger.error`Failed to update instance ${chalk.bold(
this.data.baseUrl,
)}`;
throw new Error("Failed to update instance");
}
const { metadata, protocol } = output;
await this.update({
name: metadata.data.name,
version: metadata.data.software.version,
logo: metadata.data.logo,
protocol,
publicKey: metadata.data.public_key,
inbox: metadata.data.shared_inbox ?? null,
extensions: metadata.data.extensions ?? null,
});
return this;
}
public async sendMessage(content: string): Promise<void> {
const logger = getLogger(["federation", "messaging"]);
if (
!this.data.extensions?.["pub.versia:instance_messaging"]?.endpoint
) {
logger.info`Instance ${chalk.gray(
this.data.baseUrl,
)} does not support Instance Messaging, skipping message`;
return;
}
const endpoint = new URL(
this.data.extensions["pub.versia:instance_messaging"].endpoint,
);
await fetch(endpoint.href, {
method: "POST",
headers: {
"Content-Type": "text/plain",
},
body: content,
});
}
public static getCount(): Promise<number> {
return db.$count(Instances);
}
}

View file

@ -1,182 +0,0 @@
import { db } from "@versia/kit/db";
import {
Likes,
type Notes,
Notifications,
type Users,
} from "@versia/kit/tables";
import {
and,
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import { config } from "~/config.ts";
import * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import { BaseInterface } from "./base.ts";
import { User } from "./user.ts";
type LikeType = InferSelectModel<typeof Likes> & {
liker: InferSelectModel<typeof Users>;
liked: InferSelectModel<typeof Notes>;
};
export class Like extends BaseInterface<typeof Likes, LikeType> {
public static $type: LikeType;
public async reload(): Promise<void> {
const reloaded = await Like.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload like");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Like | null> {
if (!id) {
return null;
}
return await Like.fromSql(eq(Likes.id, id));
}
public static async fromIds(ids: string[]): Promise<Like[]> {
return await Like.manyFromSql(inArray(Likes.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Likes.id),
): Promise<Like | null> {
const found = await db.query.Likes.findFirst({
where: sql,
orderBy,
with: {
liked: true,
liker: true,
},
});
if (!found) {
return null;
}
return new Like(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Likes.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Likes.findMany>[0],
): Promise<Like[]> {
const found = await db.query.Likes.findMany({
where: sql,
orderBy,
limit,
offset,
with: {
liked: true,
liker: true,
...extra?.with,
},
});
return found.map((s) => new Like(s));
}
public async update(newRole: Partial<LikeType>): Promise<LikeType> {
await db.update(Likes).set(newRole).where(eq(Likes.id, this.id));
const updated = await Like.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update like");
}
return updated.data;
}
public save(): Promise<LikeType> {
return this.update(this.data);
}
public async delete(): Promise<void> {
await db.delete(Likes).where(eq(Likes.id, this.id));
}
public static async insert(
data: InferInsertModel<typeof Likes>,
): Promise<Like> {
const inserted = (await db.insert(Likes).values(data).returning())[0];
const like = await Like.fromId(inserted.id);
if (!like) {
throw new Error("Failed to insert like");
}
return like;
}
public get id(): string {
return this.data.id;
}
public async clearRelatedNotifications(): Promise<void> {
await db
.delete(Notifications)
.where(
and(
eq(Notifications.accountId, this.id),
eq(Notifications.type, "favourite"),
eq(Notifications.notifiedId, this.data.liked.authorId),
eq(Notifications.noteId, this.data.liked.id),
),
);
}
public getUri(): URL {
return new URL(`/likes/${this.data.id}`, config.http.base_url);
}
public toVersia(): VersiaEntities.Like {
return new VersiaEntities.Like({
id: this.data.id,
author: User.getUri(
this.data.liker.id,
this.data.liker.uri ? new URL(this.data.liker.uri) : null,
).href,
type: "pub.versia:likes/Like",
created_at: new Date(this.data.createdAt).toISOString(),
liked: this.data.liked.uri
? new URL(this.data.liked.uri).href
: new URL(`/notes/${this.data.liked.id}`, config.http.base_url)
.href,
uri: this.getUri().href,
});
}
public unlikeToVersia(unliker?: User): VersiaEntities.Delete {
return new VersiaEntities.Delete({
type: "Delete",
id: crypto.randomUUID(),
created_at: new Date().toISOString(),
author: User.getUri(
unliker?.id ?? this.data.liker.id,
unliker?.data.uri
? new URL(unliker.data.uri)
: this.data.liker.uri
? new URL(this.data.liker.uri)
: null,
).href,
deleted_type: "pub.versia:likes/Like",
deleted: this.getUri().href,
});
}
}

View file

@ -1,556 +0,0 @@
import { join } from "node:path";
import type { Attachment as AttachmentSchema } from "@versia/client/schemas";
import { db } from "@versia/kit/db";
import { Medias } from "@versia/kit/tables";
import { randomUUIDv7, S3Client, SHA256, write } from "bun";
import {
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import sharp from "sharp";
import type { z } from "zod";
import { mimeLookup } from "@/content_types.ts";
import { MediaBackendType } from "~/classes/config/schema.ts";
import { config } from "~/config.ts";
import * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import type {
ContentFormatSchema,
ImageContentFormatSchema,
} from "~/packages/sdk/schemas/index.ts";
import { ApiError } from "../errors/api-error.ts";
import { getMediaHash } from "../media/media-hasher.ts";
import { ProxiableUrl } from "../media/url.ts";
import { MediaJobType, mediaQueue } from "../queues/media.ts";
import { BaseInterface } from "./base.ts";
type MediaType = InferSelectModel<typeof Medias>;
export class Media extends BaseInterface<typeof Medias> {
public static $type: MediaType;
public async reload(): Promise<void> {
const reloaded = await Media.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload attachment");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Media | null> {
if (!id) {
return null;
}
return await Media.fromSql(eq(Medias.id, id));
}
public static async fromIds(ids: string[]): Promise<Media[]> {
return await Media.manyFromSql(inArray(Medias.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Medias.id),
): Promise<Media | null> {
const found = await db.query.Medias.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new Media(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Medias.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Medias.findMany>[0],
): Promise<Media[]> {
const found = await db.query.Medias.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new Media(s));
}
public async update(newAttachment: Partial<MediaType>): Promise<MediaType> {
await db
.update(Medias)
.set(newAttachment)
.where(eq(Medias.id, this.id));
const updated = await Media.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update attachment");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<MediaType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Medias).where(inArray(Medias.id, ids));
} else {
await db.delete(Medias).where(eq(Medias.id, this.id));
}
// TODO: Also delete the file from the media manager
}
public static async insert(
data: InferInsertModel<typeof Medias>,
): Promise<Media> {
const inserted = (await db.insert(Medias).values(data).returning())[0];
const attachment = await Media.fromId(inserted.id);
if (!attachment) {
throw new Error("Failed to insert attachment");
}
return attachment;
}
private static async upload(file: File): Promise<{
path: string;
}> {
const fileName = file.name ?? randomUUIDv7();
const hash = await getMediaHash(file);
switch (config.media.backend) {
case MediaBackendType.Local: {
const path = join(config.media.uploads_path, hash, fileName);
await write(path, file);
return { path: join(hash, fileName) };
}
case MediaBackendType.S3: {
const path = join(hash, fileName);
if (!config.s3) {
throw new ApiError(500, "S3 configuration missing");
}
const client = new S3Client({
endpoint: config.s3.endpoint.origin,
region: config.s3.region,
bucket: config.s3.bucket_name,
accessKeyId: config.s3.access_key,
secretAccessKey: config.s3.secret_access_key,
virtualHostedStyle: !config.s3.path_style,
});
await client.write(path, file);
const finalPath = config.s3.path
? join(config.s3.path, path)
: path;
return { path: finalPath };
}
}
}
public static async fromFile(
file: File,
options?: {
description?: string;
thumbnail?: File;
},
): Promise<Media> {
Media.checkFile(file);
const { path } = await Media.upload(file);
const url = Media.getUrl(path);
let thumbnailUrl: URL | null = null;
if (options?.thumbnail) {
const { path } = await Media.upload(options.thumbnail);
thumbnailUrl = Media.getUrl(path);
}
const content = await Media.fileToContentFormat(file, url, {
description: options?.description,
});
const thumbnailContent =
thumbnailUrl && options?.thumbnail
? await Media.fileToContentFormat(
options.thumbnail,
thumbnailUrl,
{
description: options?.description,
},
)
: undefined;
const newAttachment = await Media.insert({
id: randomUUIDv7(),
content,
thumbnail: thumbnailContent as z.infer<
typeof ImageContentFormatSchema
>,
});
if (config.media.conversion.convert_images) {
await mediaQueue.add(MediaJobType.ConvertMedia, {
attachmentId: newAttachment.id,
filename: file.name,
});
}
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: newAttachment.id,
filename: file.name,
});
return newAttachment;
}
/**
* Creates and adds a new media attachment from a URL
* @param uri
* @param options
* @returns
*/
public static async fromUrl(
uri: URL,
options?: {
description?: string;
},
): Promise<Media> {
const mimeType = await mimeLookup(uri);
const content: z.infer<typeof ContentFormatSchema> = {
[mimeType]: {
content: uri.toString(),
remote: true,
description: options?.description,
},
};
const newAttachment = await Media.insert({
id: randomUUIDv7(),
content,
});
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: newAttachment.id,
// CalculateMetadata doesn't use the filename, but the type is annoying
// and requires it anyway
filename: "blank",
});
return newAttachment;
}
private static checkFile(file: File): void {
if (file.size > config.validation.media.max_bytes) {
throw new ApiError(
413,
`File too large, max size is ${config.validation.media.max_bytes} bytes`,
);
}
if (
config.validation.media.allowed_mime_types.length > 0 &&
!config.validation.media.allowed_mime_types.includes(file.type)
) {
throw new ApiError(
415,
`File type ${file.type} is not allowed`,
`Allowed types: ${config.validation.media.allowed_mime_types.join(
", ",
)}`,
);
}
}
public async updateFromFile(file: File): Promise<void> {
Media.checkFile(file);
const { path } = await Media.upload(file);
const url = Media.getUrl(path);
const content = await Media.fileToContentFormat(file, url, {
description:
this.data.content[Object.keys(this.data.content)[0]]
.description || undefined,
});
await this.update({
content,
});
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: this.id,
filename: file.name,
});
}
public async updateFromUrl(uri: URL): Promise<void> {
const mimeType = await mimeLookup(uri);
const content: z.infer<typeof ContentFormatSchema> = {
[mimeType]: {
content: uri.toString(),
remote: true,
description:
this.data.content[Object.keys(this.data.content)[0]]
.description || undefined,
},
};
await this.update({
content,
});
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: this.id,
filename: "blank",
});
}
public async updateThumbnail(file: File): Promise<void> {
Media.checkFile(file);
const { path } = await Media.upload(file);
const url = Media.getUrl(path);
const content = await Media.fileToContentFormat(file, url);
await this.update({
thumbnail: content as z.infer<typeof ImageContentFormatSchema>,
});
}
public async updateMetadata(
metadata: Partial<
Omit<
z.infer<typeof ContentFormatSchema>[keyof z.infer<
typeof ContentFormatSchema
>],
"content"
>
>,
): Promise<void> {
const content = this.data.content;
for (const type of Object.keys(content)) {
content[type] = {
...content[type],
...metadata,
};
}
await this.update({
content,
});
}
public get id(): string {
return this.data.id;
}
public static getUrl(name: string): URL {
if (config.media.backend === MediaBackendType.Local) {
return new URL(`/media/${name}`, config.http.base_url);
}
if (config.media.backend === MediaBackendType.S3) {
return new URL(`/${name}`, config.s3?.public_url);
}
throw new Error("Unknown media backend");
}
public getUrl(): ProxiableUrl {
const type = this.getPreferredMimeType();
return new ProxiableUrl(this.data.content[type]?.content ?? "");
}
/**
* Gets favourite MIME type for the attachment
* Uses a hardcoded list of preferred types, for images
*
* @returns {string} Preferred MIME type
*/
public getPreferredMimeType(): string {
return Media.getPreferredMimeType(Object.keys(this.data.content));
}
/**
* Gets favourite MIME type from a list
* Uses a hardcoded list of preferred types, for images
*
* @returns {string} Preferred MIME type
*/
public static getPreferredMimeType(types: string[]): string {
const ranking = [
"image/svg+xml",
"image/avif",
"image/jxl",
"image/webp",
"image/heif",
"image/heif-sequence",
"image/heic",
"image/heic-sequence",
"image/apng",
"image/gif",
"image/png",
"image/jpeg",
"image/bmp",
];
return ranking.find((type) => types.includes(type)) ?? types[0];
}
/**
* Maps MIME type to Mastodon attachment type
*
* @returns
*/
public getMastodonType(): z.infer<typeof AttachmentSchema.shape.type> {
const type = this.getPreferredMimeType();
if (type.startsWith("image/")) {
return "image";
}
if (type.startsWith("video/")) {
return "video";
}
if (type.startsWith("audio/")) {
return "audio";
}
return "unknown";
}
/**
* Extracts metadata from a file and outputs as ContentFormat
*
* Does not calculate thumbhash (do this in a worker)
* @param file
* @param uri Uploaded file URI
* @param options Extra metadata, such as description
* @returns
*/
public static async fileToContentFormat(
file: File,
uri: URL,
options?: Partial<{
description: string;
}>,
): Promise<z.infer<typeof ContentFormatSchema>> {
const buffer = await file.arrayBuffer();
const isImage = file.type.startsWith("image/");
const { width, height } = isImage ? await sharp(buffer).metadata() : {};
const hash = new SHA256().update(file).digest("hex");
// Missing: fps, duration
// Thumbhash should be added in a worker after the file is uploaded
return {
[file.type]: {
content: uri.toString(),
remote: true,
hash: {
sha256: hash,
},
width,
height,
description: options?.description,
size: file.size,
},
};
}
public toApiMeta(): z.infer<typeof AttachmentSchema.shape.meta> {
const type = this.getPreferredMimeType();
const data = this.data.content[type];
const size =
data.width && data.height
? `${data.width}x${data.height}`
: undefined;
const aspect =
data.width && data.height ? data.width / data.height : undefined;
return {
width: data.width || undefined,
height: data.height || undefined,
fps: data.fps || undefined,
size,
// Idk whether size or length is the right value
duration: data.duration || undefined,
// Versia doesn't have a concept of length in ContentFormat
length: undefined,
aspect,
original: {
width: data.width || undefined,
height: data.height || undefined,
size,
aspect,
},
};
}
public toApi(): z.infer<typeof AttachmentSchema> {
const type = this.getPreferredMimeType();
const data = this.data.content[type];
// Thumbnail should only have a single MIME type
const thumbnailData =
this.data.thumbnail?.[Object.keys(this.data.thumbnail)[0]];
return {
id: this.data.id,
type: this.getMastodonType(),
url: this.getUrl().proxied,
remote_url: null,
preview_url: thumbnailData?.content
? new ProxiableUrl(thumbnailData.content).proxied
: null,
meta: this.toApiMeta(),
description: data.description || null,
blurhash: this.data.blurhash,
};
}
public toVersia(): VersiaEntities.ContentFormat {
return new VersiaEntities.ContentFormat(this.data.content);
}
public static fromVersia(
contentFormat: VersiaEntities.ContentFormat,
): Promise<Media> {
return Media.insert({
id: randomUUIDv7(),
content: contentFormat.data,
originalContent: contentFormat.data,
});
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,196 +0,0 @@
import type { Notification as NotificationSchema } from "@versia/client/schemas";
import { db, Note, User } from "@versia/kit/db";
import { Notifications } from "@versia/kit/tables";
import {
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import type { z } from "zod";
import {
transformOutputToUserWithRelations,
userRelations,
} from "../functions/user.ts";
import { BaseInterface } from "./base.ts";
export type NotificationType = InferSelectModel<typeof Notifications> & {
status: typeof Note.$type | null;
account: typeof User.$type;
};
export class Notification extends BaseInterface<
typeof Notifications,
NotificationType
> {
public async reload(): Promise<void> {
const reloaded = await Notification.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload notification");
}
this.data = reloaded.data;
}
public static async fromId(
id: string | null,
userId?: string,
): Promise<Notification | null> {
if (!id) {
return null;
}
return await Notification.fromSql(
eq(Notifications.id, id),
undefined,
userId,
);
}
public static async fromIds(
ids: string[],
userId?: string,
): Promise<Notification[]> {
return await Notification.manyFromSql(
inArray(Notifications.id, ids),
undefined,
undefined,
undefined,
undefined,
userId,
);
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Notifications.id),
userId?: string,
): Promise<Notification | null> {
const found = await db.query.Notifications.findFirst({
where: sql,
orderBy,
with: {
account: {
with: {
...userRelations,
},
},
},
});
if (!found) {
return null;
}
return new Notification({
...found,
account: transformOutputToUserWithRelations(found.account),
status: (await Note.fromId(found.noteId, userId))?.data ?? null,
});
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Notifications.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Notifications.findMany>[0],
userId?: string,
): Promise<Notification[]> {
const found = await db.query.Notifications.findMany({
where: sql,
orderBy,
limit,
offset,
with: {
...extra?.with,
account: {
with: {
...userRelations,
},
},
},
extras: extra?.extras,
});
return (
await Promise.all(
found.map(async (notif) => ({
...notif,
account: transformOutputToUserWithRelations(notif.account),
status:
(await Note.fromId(notif.noteId, userId))?.data ?? null,
})),
)
).map((s) => new Notification(s));
}
public async update(
newAttachment: Partial<NotificationType>,
): Promise<NotificationType> {
await db
.update(Notifications)
.set(newAttachment)
.where(eq(Notifications.id, this.id));
const updated = await Notification.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update notification");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<NotificationType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db
.delete(Notifications)
.where(inArray(Notifications.id, ids));
} else {
await db.delete(Notifications).where(eq(Notifications.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Notifications>,
): Promise<Notification> {
const inserted = (
await db.insert(Notifications).values(data).returning()
)[0];
const notification = await Notification.fromId(inserted.id);
if (!notification) {
throw new Error("Failed to insert notification");
}
return notification;
}
public get id(): string {
return this.data.id;
}
public async toApi(): Promise<z.infer<typeof NotificationSchema>> {
const account = new User(this.data.account);
return {
account: account.toApi(),
created_at: new Date(this.data.createdAt).toISOString(),
id: this.data.id,
type: this.data.type,
status: this.data.status
? await new Note(this.data.status).toApi(account)
: undefined,
group_key: `ungrouped-${this.data.id}`,
};
}
}

View file

@ -1,190 +0,0 @@
import type { WebPushSubscription as WebPushSubscriptionSchema } from "@versia/client/schemas";
import { db, type Token, type User } from "@versia/kit/db";
import { PushSubscriptions, Tokens } from "@versia/kit/tables";
import {
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import type { z } from "zod";
import { BaseInterface } from "./base.ts";
type PushSubscriptionType = InferSelectModel<typeof PushSubscriptions>;
export class PushSubscription extends BaseInterface<
typeof PushSubscriptions,
PushSubscriptionType
> {
public static $type: PushSubscriptionType;
public async reload(): Promise<void> {
const reloaded = await PushSubscription.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload subscription");
}
this.data = reloaded.data;
}
public static async fromId(
id: string | null,
): Promise<PushSubscription | null> {
if (!id) {
return null;
}
return await PushSubscription.fromSql(eq(PushSubscriptions.id, id));
}
public static async fromIds(ids: string[]): Promise<PushSubscription[]> {
return await PushSubscription.manyFromSql(
inArray(PushSubscriptions.id, ids),
);
}
public static async fromToken(
token: Token,
): Promise<PushSubscription | null> {
return await PushSubscription.fromSql(
eq(PushSubscriptions.tokenId, token.id),
);
}
public static async manyFromUser(
user: User,
limit?: number,
offset?: number,
): Promise<PushSubscription[]> {
const found = await db
.select()
.from(PushSubscriptions)
.leftJoin(Tokens, eq(Tokens.id, PushSubscriptions.tokenId))
.where(eq(Tokens.userId, user.id))
.limit(limit ?? 9e10)
.offset(offset ?? 0);
return found.map((s) => new PushSubscription(s.PushSubscriptions));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(PushSubscriptions.id),
): Promise<PushSubscription | null> {
const found = await db.query.PushSubscriptions.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new PushSubscription(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(PushSubscriptions.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.PushSubscriptions.findMany>[0],
): Promise<PushSubscription[]> {
const found = await db.query.PushSubscriptions.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new PushSubscription(s));
}
public async update(
newSubscription: Partial<PushSubscriptionType>,
): Promise<PushSubscriptionType> {
await db
.update(PushSubscriptions)
.set(newSubscription)
.where(eq(PushSubscriptions.id, this.id));
const updated = await PushSubscription.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update subscription");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<PushSubscriptionType> {
return this.update(this.data);
}
public static async clearAllOfToken(token: Token): Promise<void> {
await db
.delete(PushSubscriptions)
.where(eq(PushSubscriptions.tokenId, token.id));
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db
.delete(PushSubscriptions)
.where(inArray(PushSubscriptions.id, ids));
} else {
await db
.delete(PushSubscriptions)
.where(eq(PushSubscriptions.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof PushSubscriptions>,
): Promise<PushSubscription> {
const inserted = (
await db.insert(PushSubscriptions).values(data).returning()
)[0];
const subscription = await PushSubscription.fromId(inserted.id);
if (!subscription) {
throw new Error("Failed to insert subscription");
}
return subscription;
}
public get id(): string {
return this.data.id;
}
public getAlerts(): z.infer<typeof WebPushSubscriptionSchema.shape.alerts> {
return {
mention: this.data.alerts.mention ?? false,
favourite: this.data.alerts.favourite ?? false,
reblog: this.data.alerts.reblog ?? false,
follow: this.data.alerts.follow ?? false,
poll: this.data.alerts.poll ?? false,
follow_request: this.data.alerts.follow_request ?? false,
status: this.data.alerts.status ?? false,
update: this.data.alerts.update ?? false,
"admin.sign_up": this.data.alerts["admin.sign_up"] ?? false,
"admin.report": this.data.alerts["admin.report"] ?? false,
};
}
public toApi(): z.infer<typeof WebPushSubscriptionSchema> {
return {
id: this.data.id,
alerts: this.getAlerts(),
endpoint: this.data.endpoint,
// FIXME: Add real key
server_key: "",
};
}
}

View file

@ -1,285 +0,0 @@
import { db, Emoji, Instance, type Note, User } from "@versia/kit/db";
import { type Notes, Reactions, type Users } from "@versia/kit/tables";
import { randomUUIDv7 } from "bun";
import {
and,
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
isNull,
type SQL,
} from "drizzle-orm";
import { config } from "~/config.ts";
import * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import { BaseInterface } from "./base.ts";
type ReactionType = InferSelectModel<typeof Reactions> & {
emoji: typeof Emoji.$type | null;
author: InferSelectModel<typeof Users>;
note: InferSelectModel<typeof Notes>;
};
export class Reaction extends BaseInterface<typeof Reactions, ReactionType> {
public static $type: ReactionType;
public async reload(): Promise<void> {
const reloaded = await Reaction.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload reaction");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Reaction | null> {
if (!id) {
return null;
}
return await Reaction.fromSql(eq(Reactions.id, id));
}
public static async fromIds(ids: string[]): Promise<Reaction[]> {
return await Reaction.manyFromSql(inArray(Reactions.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Reactions.id),
): Promise<Reaction | null> {
const found = await db.query.Reactions.findFirst({
where: sql,
with: {
emoji: {
with: {
instance: true,
media: true,
},
},
author: true,
note: true,
},
orderBy,
});
if (!found) {
return null;
}
return new Reaction(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Reactions.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Reactions.findMany>[0],
): Promise<Reaction[]> {
const found = await db.query.Reactions.findMany({
where: sql,
orderBy,
limit,
offset,
with: {
...extra?.with,
emoji: {
with: {
instance: true,
media: true,
},
},
author: true,
note: true,
},
});
return found.map((s) => new Reaction(s));
}
public async update(
newReaction: Partial<ReactionType>,
): Promise<ReactionType> {
await db
.update(Reactions)
.set(newReaction)
.where(eq(Reactions.id, this.id));
const updated = await Reaction.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update reaction");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<ReactionType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Reactions).where(inArray(Reactions.id, ids));
} else {
await db.delete(Reactions).where(eq(Reactions.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Reactions>,
): Promise<Reaction> {
// Needs one of emojiId or emojiText, but not both
if (!(data.emojiId || data.emojiText)) {
throw new Error("EmojiID or emojiText is required");
}
if (data.emojiId && data.emojiText) {
throw new Error("Cannot have both emojiId and emojiText");
}
const inserted = (
await db.insert(Reactions).values(data).returning()
)[0];
const reaction = await Reaction.fromId(inserted.id);
if (!reaction) {
throw new Error("Failed to insert reaction");
}
return reaction;
}
public get id(): string {
return this.data.id;
}
public static fromEmoji(
emoji: Emoji | string,
author: User,
note: Note,
): Promise<Reaction | null> {
if (emoji instanceof Emoji) {
return Reaction.fromSql(
and(
eq(Reactions.authorId, author.id),
eq(Reactions.noteId, note.id),
isNull(Reactions.emojiText),
eq(Reactions.emojiId, emoji.id),
),
);
}
return Reaction.fromSql(
and(
eq(Reactions.authorId, author.id),
eq(Reactions.noteId, note.id),
eq(Reactions.emojiText, emoji),
isNull(Reactions.emojiId),
),
);
}
public getUri(baseUrl: URL): URL {
return this.data.uri
? new URL(this.data.uri)
: new URL(
`/notes/${this.data.noteId}/reactions/${this.id}`,
baseUrl,
);
}
public get local(): boolean {
return this.data.author.instanceId === null;
}
public hasCustomEmoji(): boolean {
return !!this.data.emoji || !this.data.emojiText;
}
public toVersia(): VersiaEntities.Reaction {
if (!this.local) {
throw new Error("Cannot convert a non-local reaction to Versia");
}
return new VersiaEntities.Reaction({
uri: this.getUri(config.http.base_url).href,
type: "pub.versia:reactions/Reaction",
author: User.getUri(
this.data.authorId,
this.data.author.uri ? new URL(this.data.author.uri) : null,
).href,
created_at: new Date(this.data.createdAt).toISOString(),
id: this.id,
object: this.data.note.uri
? new URL(this.data.note.uri).href
: new URL(`/notes/${this.data.noteId}`, config.http.base_url)
.href,
content: this.hasCustomEmoji()
? `:${this.data.emoji?.shortcode}:`
: this.data.emojiText || "",
extensions: this.hasCustomEmoji()
? {
"pub.versia:custom_emojis": {
emojis: [
new Emoji(
this.data.emoji as typeof Emoji.$type,
).toVersia(),
],
},
}
: undefined,
});
}
public toVersiaUnreact(): VersiaEntities.Delete {
return new VersiaEntities.Delete({
type: "Delete",
id: crypto.randomUUID(),
created_at: new Date().toISOString(),
author: User.getUri(
this.data.authorId,
this.data.author.uri ? new URL(this.data.author.uri) : null,
).href,
deleted_type: "pub.versia:reactions/Reaction",
deleted: this.getUri(config.http.base_url).href,
});
}
public static async fromVersia(
reactionToConvert: VersiaEntities.Reaction,
author: User,
note: Note,
): Promise<Reaction> {
if (author.local) {
throw new Error("Cannot process a reaction from a local user");
}
const emojiEntity =
reactionToConvert.data.extensions?.["pub.versia:custom_emojis"]
?.emojis[0];
const emoji = emojiEntity
? await Emoji.fetchFromRemote(
emojiEntity,
new Instance(
author.data.instance as NonNullable<
(typeof User.$type)["instance"]
>,
),
)
: null;
return Reaction.insert({
id: randomUUIDv7(),
uri: reactionToConvert.data.uri,
authorId: author.id,
noteId: note.id,
emojiId: emoji ? emoji.id : null,
emojiText: emoji ? null : reactionToConvert.data.content,
});
}
}

View file

@ -1,353 +0,0 @@
import type { Relationship as RelationshipSchema } from "@versia/client/schemas";
import { db } from "@versia/kit/db";
import { Relationships, Users } from "@versia/kit/tables";
import { randomUUIDv7 } from "bun";
import {
and,
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
sql,
} from "drizzle-orm";
import { z } from "zod";
import { BaseInterface } from "./base.ts";
import type { User } from "./user.ts";
type RelationshipType = InferSelectModel<typeof Relationships>;
type RelationshipWithOpposite = RelationshipType & {
followedBy: boolean;
blockedBy: boolean;
requestedBy: boolean;
};
export class Relationship extends BaseInterface<
typeof Relationships,
RelationshipWithOpposite
> {
public static schema = z.object({
id: z.string(),
blocked_by: z.boolean(),
blocking: z.boolean(),
domain_blocking: z.boolean(),
endorsed: z.boolean(),
followed_by: z.boolean(),
following: z.boolean(),
muting_notifications: z.boolean(),
muting: z.boolean(),
note: z.string().nullable(),
notifying: z.boolean(),
requested_by: z.boolean(),
requested: z.boolean(),
showing_reblogs: z.boolean(),
});
public static $type: RelationshipWithOpposite;
public async reload(): Promise<void> {
const reloaded = await Relationship.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload relationship");
}
this.data = reloaded.data;
}
public static async fromId(
id: string | null,
): Promise<Relationship | null> {
if (!id) {
return null;
}
return await Relationship.fromSql(eq(Relationships.id, id));
}
public static async fromIds(ids: string[]): Promise<Relationship[]> {
return await Relationship.manyFromSql(inArray(Relationships.id, ids));
}
public static async fromOwnerAndSubject(
owner: User,
subject: User,
): Promise<Relationship> {
const found = await Relationship.fromSql(
and(
eq(Relationships.ownerId, owner.id),
eq(Relationships.subjectId, subject.id),
),
);
if (!found) {
// Create a new relationship if one doesn't exist
return await Relationship.insert({
id: randomUUIDv7(),
ownerId: owner.id,
subjectId: subject.id,
languages: [],
following: false,
showingReblogs: false,
notifying: false,
blocking: false,
muting: false,
mutingNotifications: false,
requested: false,
domainBlocking: false,
endorsed: false,
note: "",
});
}
return found;
}
public static async fromOwnerAndSubjects(
owner: User,
subjectIds: string[],
): Promise<Relationship[]> {
const found = await Relationship.manyFromSql(
and(
eq(Relationships.ownerId, owner.id),
inArray(Relationships.subjectId, subjectIds),
),
);
const missingSubjectsIds = subjectIds.filter(
(id) => !found.find((rel) => rel.data.subjectId === id),
);
for (const subjectId of missingSubjectsIds) {
await Relationship.insert({
id: randomUUIDv7(),
ownerId: owner.id,
subjectId,
languages: [],
following: false,
showingReblogs: false,
notifying: false,
blocking: false,
muting: false,
mutingNotifications: false,
requested: false,
domainBlocking: false,
endorsed: false,
note: "",
});
}
return await Relationship.manyFromSql(
and(
eq(Relationships.ownerId, owner.id),
inArray(Relationships.subjectId, subjectIds),
),
);
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Relationships.id),
): Promise<Relationship | null> {
const found = await db.query.Relationships.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
const opposite = await Relationship.getOpposite(found);
return new Relationship({
...found,
followedBy: opposite.following,
blockedBy: opposite.blocking,
requestedBy: opposite.requested,
});
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Relationships.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Relationships.findMany>[0],
): Promise<Relationship[]> {
const found = await db.query.Relationships.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
const opposites = await Promise.all(
found.map((rel) => Relationship.getOpposite(rel)),
);
return found.map((s, i) => {
return new Relationship({
...s,
followedBy: opposites[i].following,
blockedBy: opposites[i].blocking,
requestedBy: opposites[i].requested,
});
});
}
public static async getOpposite(oppositeTo: {
subjectId: string;
ownerId: string;
}): Promise<RelationshipType> {
let output = await db.query.Relationships.findFirst({
where: (rel): SQL | undefined =>
and(
eq(rel.ownerId, oppositeTo.subjectId),
eq(rel.subjectId, oppositeTo.ownerId),
),
});
// If the opposite relationship doesn't exist, create it
if (!output) {
output = (
await db
.insert(Relationships)
.values({
id: randomUUIDv7(),
ownerId: oppositeTo.subjectId,
subjectId: oppositeTo.ownerId,
languages: [],
following: false,
showingReblogs: false,
notifying: false,
blocking: false,
domainBlocking: false,
endorsed: false,
note: "",
muting: false,
mutingNotifications: false,
requested: false,
})
.returning()
)[0];
}
return output;
}
public async update(
newRelationship: Partial<RelationshipType>,
): Promise<RelationshipWithOpposite> {
await db
.update(Relationships)
.set(newRelationship)
.where(eq(Relationships.id, this.id));
// If a user follows another user, update followerCount and followingCount
if (newRelationship.following && !this.data.following) {
await db
.update(Users)
.set({
followingCount: sql`${Users.followingCount} + 1`,
})
.where(eq(Users.id, this.data.ownerId));
await db
.update(Users)
.set({
followerCount: sql`${Users.followerCount} + 1`,
})
.where(eq(Users.id, this.data.subjectId));
}
// If a user unfollows another user, update followerCount and followingCount
if (!newRelationship.following && this.data.following) {
await db
.update(Users)
.set({
followingCount: sql`${Users.followingCount} - 1`,
})
.where(eq(Users.id, this.data.ownerId));
await db
.update(Users)
.set({
followerCount: sql`${Users.followerCount} - 1`,
})
.where(eq(Users.id, this.data.subjectId));
}
const updated = await Relationship.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update relationship");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<RelationshipWithOpposite> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db
.delete(Relationships)
.where(inArray(Relationships.id, ids));
} else {
await db.delete(Relationships).where(eq(Relationships.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Relationships>,
): Promise<Relationship> {
const inserted = (
await db.insert(Relationships).values(data).returning()
)[0];
const relationship = await Relationship.fromId(inserted.id);
if (!relationship) {
throw new Error("Failed to insert relationship");
}
// Create opposite relationship if necessary
await Relationship.getOpposite({
subjectId: relationship.data.subjectId,
ownerId: relationship.data.ownerId,
});
return relationship;
}
public get id(): string {
return this.data.id;
}
public toApi(): z.infer<typeof RelationshipSchema> {
return {
id: this.data.subjectId,
blocked_by: this.data.blockedBy,
blocking: this.data.blocking,
domain_blocking: this.data.domainBlocking,
endorsed: this.data.endorsed,
followed_by: this.data.followedBy,
following: this.data.following,
muting_notifications: this.data.mutingNotifications,
muting: this.data.muting,
note: this.data.note,
notifying: this.data.notifying,
requested_by: this.data.requestedBy,
requested: this.data.requested,
showing_reblogs: this.data.showingReblogs,
languages: this.data.languages ?? [],
};
}
}

View file

@ -1,226 +0,0 @@
import type {
RolePermission,
Role as RoleSchema,
} from "@versia/client/schemas";
import { db } from "@versia/kit/db";
import { Roles, RoleToUsers } from "@versia/kit/tables";
import {
and,
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import type { z } from "zod";
import { config } from "~/config.ts";
import { ProxiableUrl } from "../media/url.ts";
import { BaseInterface } from "./base.ts";
type RoleType = InferSelectModel<typeof Roles>;
export class Role extends BaseInterface<typeof Roles> {
public static $type: RoleType;
public static defaultRole = new Role({
id: "default",
name: "Default",
permissions: config.permissions.default,
priority: 0,
description: "Default role for all users",
visible: false,
icon: null,
});
public static adminRole = new Role({
id: "admin",
name: "Admin",
permissions: config.permissions.admin,
priority: 2 ** 31 - 1,
description: "Default role for all administrators",
visible: false,
icon: null,
});
public async reload(): Promise<void> {
const reloaded = await Role.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload role");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Role | null> {
if (!id) {
return null;
}
return await Role.fromSql(eq(Roles.id, id));
}
public static async fromIds(ids: string[]): Promise<Role[]> {
return await Role.manyFromSql(inArray(Roles.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Roles.id),
): Promise<Role | null> {
const found = await db.query.Roles.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new Role(found);
}
public static async getAll(): Promise<Role[]> {
return (await Role.manyFromSql(undefined)).concat(
Role.defaultRole,
Role.adminRole,
);
}
public static async getUserRoles(
userId: string,
isAdmin: boolean,
): Promise<Role[]> {
return (
await db.query.RoleToUsers.findMany({
where: (role): SQL | undefined => eq(role.userId, userId),
with: {
role: true,
user: {
columns: {
isAdmin: true,
},
},
},
})
)
.map((r) => new Role(r.role))
.concat(
new Role({
id: "default",
name: "Default",
permissions: config.permissions.default,
priority: 0,
description: "Default role for all users",
visible: false,
icon: null,
}),
)
.concat(
isAdmin
? [
new Role({
id: "admin",
name: "Admin",
permissions: config.permissions.admin,
priority: 2 ** 31 - 1,
description:
"Default role for all administrators",
visible: false,
icon: null,
}),
]
: [],
);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Roles.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Roles.findMany>[0],
): Promise<Role[]> {
const found = await db.query.Roles.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new Role(s));
}
public async update(newRole: Partial<RoleType>): Promise<RoleType> {
await db.update(Roles).set(newRole).where(eq(Roles.id, this.id));
const updated = await Role.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update role");
}
return updated.data;
}
public save(): Promise<RoleType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Roles).where(inArray(Roles.id, ids));
} else {
await db.delete(Roles).where(eq(Roles.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Roles>,
): Promise<Role> {
const inserted = (await db.insert(Roles).values(data).returning())[0];
const role = await Role.fromId(inserted.id);
if (!role) {
throw new Error("Failed to insert role");
}
return role;
}
public async linkUser(userId: string): Promise<void> {
await db.insert(RoleToUsers).values({
userId,
roleId: this.id,
});
}
public async unlinkUser(userId: string): Promise<void> {
await db
.delete(RoleToUsers)
.where(
and(
eq(RoleToUsers.roleId, this.id),
eq(RoleToUsers.userId, userId),
),
);
}
public get id(): string {
return this.data.id;
}
public toApi(): z.infer<typeof RoleSchema> {
return {
id: this.id,
name: this.data.name,
permissions: this.data.permissions as unknown as RolePermission[],
priority: this.data.priority,
description: this.data.description ?? undefined,
visible: this.data.visible,
icon: this.data.icon
? new ProxiableUrl(this.data.icon).proxied
: undefined,
};
}
}

View file

@ -1,241 +0,0 @@
import { Notes, Notifications, Users } from "@versia/kit/tables";
import { gt, type SQL } from "drizzle-orm";
import { config } from "~/config.ts";
import { Note } from "./note.ts";
import { Notification } from "./notification.ts";
import { User } from "./user.ts";
enum TimelineType {
Note = "Note",
User = "User",
Notification = "Notification",
}
export class Timeline<Type extends Note | User | Notification> {
public constructor(private type: TimelineType) {}
public static getNoteTimeline(
sql: SQL<unknown> | undefined,
limit: number,
url: URL,
userId?: string,
): Promise<{ link: string; objects: Note[] }> {
return new Timeline<Note>(TimelineType.Note).fetchTimeline(
sql,
limit,
url,
userId,
);
}
public static getUserTimeline(
sql: SQL<unknown> | undefined,
limit: number,
url: URL,
): Promise<{ link: string; objects: User[] }> {
return new Timeline<User>(TimelineType.User).fetchTimeline(
sql,
limit,
url,
);
}
public static getNotificationTimeline(
sql: SQL<unknown> | undefined,
limit: number,
url: URL,
userId?: string,
): Promise<{ link: string; objects: Notification[] }> {
return new Timeline<Notification>(
TimelineType.Notification,
).fetchTimeline(sql, limit, url, userId);
}
private async fetchObjects(
sql: SQL<unknown> | undefined,
limit: number,
userId?: string,
): Promise<Type[]> {
switch (this.type) {
case TimelineType.Note:
return (await Note.manyFromSql(
sql,
undefined,
limit,
undefined,
userId,
)) as Type[];
case TimelineType.User:
return (await User.manyFromSql(
sql,
undefined,
limit,
)) as Type[];
case TimelineType.Notification:
return (await Notification.manyFromSql(
sql,
undefined,
limit,
undefined,
undefined,
userId,
)) as Type[];
}
}
private async fetchLinkHeader(
objects: Type[],
url: URL,
limit: number,
): Promise<string> {
const linkHeader: string[] = [];
const urlWithoutQuery = new URL(url.pathname, config.http.base_url);
if (objects.length > 0) {
switch (this.type) {
case TimelineType.Note:
linkHeader.push(
...(await Timeline.fetchNoteLinkHeader(
objects as Note[],
urlWithoutQuery,
limit,
)),
);
break;
case TimelineType.User:
linkHeader.push(
...(await Timeline.fetchUserLinkHeader(
objects as User[],
urlWithoutQuery,
limit,
)),
);
break;
case TimelineType.Notification:
linkHeader.push(
...(await Timeline.fetchNotificationLinkHeader(
objects as Notification[],
urlWithoutQuery,
limit,
)),
);
}
}
return linkHeader.join(", ");
}
private static async fetchNoteLinkHeader(
notes: Note[],
urlWithoutQuery: URL,
limit: number,
): Promise<string[]> {
const linkHeader: string[] = [];
const objectBefore = await Note.fromSql(gt(Notes.id, notes[0].data.id));
if (objectBefore) {
linkHeader.push(
`<${urlWithoutQuery}?limit=${limit ?? 20}&min_id=${notes[0].data.id}>; rel="prev"`,
);
}
if (notes.length >= (limit ?? 20)) {
const objectAfter = await Note.fromSql(
gt(Notes.id, notes.at(-1)?.data.id ?? ""),
);
if (objectAfter) {
linkHeader.push(
`<${urlWithoutQuery}?limit=${limit ?? 20}&max_id=${notes.at(-1)?.data.id}>; rel="next"`,
);
}
}
return linkHeader;
}
private static async fetchUserLinkHeader(
users: User[],
urlWithoutQuery: URL,
limit: number,
): Promise<string[]> {
const linkHeader: string[] = [];
const objectBefore = await User.fromSql(gt(Users.id, users[0].id));
if (objectBefore) {
linkHeader.push(
`<${urlWithoutQuery}?limit=${limit ?? 20}&min_id=${users[0].id}>; rel="prev"`,
);
}
if (users.length >= (limit ?? 20)) {
const objectAfter = await User.fromSql(
gt(Users.id, users.at(-1)?.id ?? ""),
);
if (objectAfter) {
linkHeader.push(
`<${urlWithoutQuery}?limit=${limit ?? 20}&max_id=${users.at(-1)?.id}>; rel="next"`,
);
}
}
return linkHeader;
}
private static async fetchNotificationLinkHeader(
notifications: Notification[],
urlWithoutQuery: URL,
limit: number,
): Promise<string[]> {
const linkHeader: string[] = [];
const objectBefore = await Notification.fromSql(
gt(Notifications.id, notifications[0].data.id),
);
if (objectBefore) {
linkHeader.push(
`<${urlWithoutQuery}?limit=${limit ?? 20}&min_id=${notifications[0].data.id}>; rel="prev"`,
);
}
if (notifications.length >= (limit ?? 20)) {
const objectAfter = await Notification.fromSql(
gt(Notifications.id, notifications.at(-1)?.data.id ?? ""),
);
if (objectAfter) {
linkHeader.push(
`<${urlWithoutQuery}?limit=${limit ?? 20}&max_id=${notifications.at(-1)?.data.id}>; rel="next"`,
);
}
}
return linkHeader;
}
private async fetchTimeline(
sql: SQL<unknown> | undefined,
limit: number,
url: URL,
userId?: string,
): Promise<{ link: string; objects: Type[] }> {
const objects = await this.fetchObjects(sql, limit, userId);
const link = await this.fetchLinkHeader(objects, url, limit);
switch (this.type) {
case TimelineType.Note:
return {
link,
objects,
};
case TimelineType.User:
return {
link,
objects,
};
case TimelineType.Notification:
return {
link,
objects,
};
}
}
}

View file

@ -1,166 +0,0 @@
import type { Token as TokenSchema } from "@versia/client/schemas";
import { type Application, db, User } from "@versia/kit/db";
import { Tokens } from "@versia/kit/tables";
import {
desc,
eq,
type InferInsertModel,
type InferSelectModel,
inArray,
type SQL,
} from "drizzle-orm";
import type { z } from "zod";
import { BaseInterface } from "./base.ts";
type TokenType = InferSelectModel<typeof Tokens> & {
application: typeof Application.$type | null;
};
export class Token extends BaseInterface<typeof Tokens, TokenType> {
public static $type: TokenType;
public async reload(): Promise<void> {
const reloaded = await Token.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload token");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Token | null> {
if (!id) {
return null;
}
return await Token.fromSql(eq(Tokens.id, id));
}
public static async fromIds(ids: string[]): Promise<Token[]> {
return await Token.manyFromSql(inArray(Tokens.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Tokens.id),
): Promise<Token | null> {
const found = await db.query.Tokens.findFirst({
where: sql,
orderBy,
with: {
application: true,
},
});
if (!found) {
return null;
}
return new Token(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Tokens.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Tokens.findMany>[0],
): Promise<Token[]> {
const found = await db.query.Tokens.findMany({
where: sql,
orderBy,
limit,
offset,
with: {
application: true,
...extra?.with,
},
});
return found.map((s) => new Token(s));
}
public async update(newAttachment: Partial<TokenType>): Promise<TokenType> {
await db
.update(Tokens)
.set(newAttachment)
.where(eq(Tokens.id, this.id));
const updated = await Token.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update token");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<TokenType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Tokens).where(inArray(Tokens.id, ids));
} else {
await db.delete(Tokens).where(eq(Tokens.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Tokens>,
): Promise<Token> {
const inserted = (await db.insert(Tokens).values(data).returning())[0];
const token = await Token.fromId(inserted.id);
if (!token) {
throw new Error("Failed to insert token");
}
return token;
}
public static async insertMany(
data: InferInsertModel<typeof Tokens>[],
): Promise<Token[]> {
const inserted = await db.insert(Tokens).values(data).returning();
return await Token.fromIds(inserted.map((i) => i.id));
}
public get id(): string {
return this.data.id;
}
public static async fromAccessToken(
accessToken: string,
): Promise<Token | null> {
return await Token.fromSql(eq(Tokens.accessToken, accessToken));
}
/**
* Retrieves the associated user from this token
*
* @returns The user associated with this token
*/
public async getUser(): Promise<User | null> {
if (!this.data.userId) {
return null;
}
return await User.fromId(this.data.userId);
}
public toApi(): z.infer<typeof TokenSchema> {
return {
access_token: this.data.accessToken,
token_type: "Bearer",
scope: this.data.scope,
created_at: Math.floor(
new Date(this.data.createdAt).getTime() / 1000,
),
};
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,171 +0,0 @@
import type { ContentfulStatusCode } from "hono/utils/http-status";
import type { JSONObject } from "hono/utils/types";
import type { DescribeRouteOptions } from "hono-openapi";
import { resolver } from "hono-openapi/zod";
import { z } from "zod";
/**
* API Error
*
* Custom error class used to throw errors in the API. Includes a status code, a message and an optional description.
* @extends Error
*/
export class ApiError extends Error {
/**
* @param {ContentfulStatusCode} status - The status code of the error
* @param {string} message - The message of the error
* @param {string | JSONObject} [details] - The description of the error
*/
public constructor(
public status: ContentfulStatusCode,
public override message: string,
public details?: string | JSONObject,
) {
super(message);
this.name = "ApiError";
}
public static zodSchema = z.object({
error: z.string(),
details: z
.string()
.or(z.record(z.string(), z.string().or(z.number())))
.optional(),
});
public get schema(): NonNullable<
DescribeRouteOptions["responses"]
>[number] {
return {
description: this.message,
content: {
"application/json": {
schema: resolver(ApiError.zodSchema),
},
},
};
}
public static missingAuthentication(): ApiError {
return new ApiError(
401,
"Missing authentication",
"The Authorization header is missing or could not be parsed.",
);
}
public static forbidden(): ApiError {
return new ApiError(
403,
"Missing permissions",
"You do not have permission to access or modify this resource.",
);
}
public static notFound(): ApiError {
return new ApiError(
404,
"Not found",
"The requested resource could not be found.",
);
}
public static noteNotFound(): ApiError {
return new ApiError(
404,
"Note not found",
"The requested note could not be found.",
);
}
public static accountNotFound(): ApiError {
return new ApiError(
404,
"Account not found",
"The requested account could not be found.",
);
}
public static roleNotFound(): ApiError {
return new ApiError(
404,
"Role not found",
"The requested role could not be found.",
);
}
public static instanceNotFound(): ApiError {
return new ApiError(
404,
"Instance not found",
"The requested instance could not be found.",
);
}
public static likeNotFound(): ApiError {
return new ApiError(
404,
"Like not found",
"The requested like could not be found.",
);
}
public static pushSubscriptionNotFound(): ApiError {
return new ApiError(
404,
"Push subscription not found",
"No push subscription associated with this access token",
);
}
public static tokenNotFound(): ApiError {
return new ApiError(
404,
"Token not found",
"The requested token could not be found.",
);
}
public static mediaNotFound(): ApiError {
return new ApiError(
404,
"Media not found",
"The requested media could not be found.",
);
}
public static applicationNotFound(): ApiError {
return new ApiError(
404,
"Application not found",
"The requested application could not be found.",
);
}
public static emojiNotFound(): ApiError {
return new ApiError(
404,
"Emoji not found",
"The requested emoji could not be found.",
);
}
public static notificationNotFound(): ApiError {
return new ApiError(
404,
"Notification not found",
"The requested notification could not be found.",
);
}
public static validationFailed(): ApiError {
return new ApiError(422, "Invalid values in request");
}
public static internalServerError(): ApiError {
return new ApiError(
500,
"Internal server error. This is likely a bug.",
);
}
}

View file

@ -1,7 +1,9 @@
import markdownItTaskLists from "@hackmd/markdown-it-task-lists";
import { db, type Note, User } from "@versia/kit/db";
import { Instances, Users } from "@versia/kit/tables";
import type * as VersiaEntities from "@versia/sdk/entities";
import { FederationRequester } from "@versia/sdk/http";
import { config } from "@versia-server/config";
import { and, eq, inArray, isNull, or, sql } from "drizzle-orm";
import linkifyHtml from "linkify-html";
import {
@ -18,8 +20,6 @@ import markdownItContainer from "markdown-it-container";
import markdownItTocDoneRight from "markdown-it-toc-done-right";
import { mentionValidator } from "@/api";
import { sanitizeHtml, sanitizeHtmlInline } from "@/sanitization";
import { config } from "~/config.ts";
import type * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import { transformOutputToUserWithRelations, userRelations } from "./user.ts";
/**

View file

@ -1,4 +1,5 @@
import { getLogger, type Logger } from "@logtape/logtape";
import { ApiError } from "@versia/kit";
import {
type Instance,
Like,
@ -8,6 +9,10 @@ import {
User,
} from "@versia/kit/db";
import { Likes, Notes } from "@versia/kit/tables";
import { EntitySorter, type JSONObject } from "@versia/sdk";
import { verify } from "@versia/sdk/crypto";
import * as VersiaEntities from "@versia/sdk/entities";
import { config } from "@versia-server/config";
import type { SocketAddress } from "bun";
import { Glob } from "bun";
import chalk from "chalk";
@ -15,12 +20,6 @@ import { and, eq } from "drizzle-orm";
import { matches } from "ip-matching";
import { isValidationError } from "zod-validation-error";
import { sentry } from "@/sentry";
import { config } from "~/config.ts";
import { verify } from "~/packages/sdk/crypto.ts";
import * as VersiaEntities from "~/packages/sdk/entities/index.ts";
import { EntitySorter } from "~/packages/sdk/inbox-processor.ts";
import type { JSONObject } from "~/packages/sdk/types.ts";
import { ApiError } from "../errors/api-error.ts";
/**
* Checks if the hostname is defederated using glob matching.

View file

@ -1,25 +0,0 @@
import { config } from "~/config.ts";
export class ProxiableUrl extends URL {
private isAllowedOrigin(): boolean {
const allowedOrigins: URL[] = [config.http.base_url].concat(
config.s3?.public_url ?? [],
);
return allowedOrigins.some((origin) =>
this.hostname.endsWith(origin.hostname),
);
}
public get proxied(): string {
// Don't proxy from CDN and self, since those sources are trusted
if (this.isAllowedOrigin()) {
return this.href;
}
const urlAsBase64Url = Buffer.from(this.href).toString("base64url");
return new URL(`/media/proxy/${urlAsBase64Url}`, config.http.base_url)
.href;
}
}

View file

@ -1,221 +0,0 @@
/* import {
afterEach,
beforeEach,
describe,
expect,
jest,
mock,
test,
} from "bun:test";
import { ZodError, type ZodTypeAny, z } from "zod";
import { Plugin } from "~/packages/plugin-kit";
import { type Manifest, manifestSchema } from "~/packages/plugin-kit/schema";
import { PluginLoader } from "./loader.ts";
const mockReaddir = jest.fn();
const mockGetLogger = jest.fn(() => ({
fatal: jest.fn(),
}));
const mockParseJSON5 = jest.fn();
const mockParseJSONC = jest.fn();
const mockFromZodError = jest.fn();
mock.module("node:fs/promises", () => ({
readdir: mockReaddir,
}));
mock.module("@logtape/logtape", () => ({
getLogger: mockGetLogger,
}));
mock.module("confbox", () => ({
parseJSON5: mockParseJSON5,
parseJSONC: mockParseJSONC,
}));
mock.module("zod-validation-error", () => ({
fromZodError: mockFromZodError,
}));
describe("PluginLoader", () => {
let pluginLoader: PluginLoader;
beforeEach(() => {
pluginLoader = new PluginLoader();
});
afterEach(() => {
jest.clearAllMocks();
});
test("getDirectories should return directories", async () => {
mockReaddir.mockResolvedValue([
{ name: "dir1", isDirectory: (): true => true },
{ name: "file1", isDirectory: (): false => false },
{ name: "dir2", isDirectory: (): true => true },
]);
// biome-ignore lint/complexity/useLiteralKeys: Private method
const directories = await PluginLoader["getDirectories"]("/some/path");
expect(directories).toEqual(["dir1", "dir2"]);
});
test("findManifestFile should return manifest file if found", async () => {
mockReaddir.mockResolvedValue(["manifest.json", "otherfile.txt"]);
const manifestFile =
// biome-ignore lint/complexity/useLiteralKeys: Private method
await PluginLoader["findManifestFile"]("/some/path");
expect(manifestFile).toBe("manifest.json");
});
test("hasEntrypoint should return true if entrypoint file is found", async () => {
mockReaddir.mockResolvedValue(["index.ts", "otherfile.txt"]);
// biome-ignore lint/complexity/useLiteralKeys: Private method
const hasEntrypoint = await PluginLoader["hasEntrypoint"]("/some/path");
expect(hasEntrypoint).toBe(true);
});
test("parseManifestFile should parse JSON manifest", async () => {
const manifestContent = { name: "test-plugin" };
Bun.file = jest.fn().mockReturnValue({
text: (): Promise<string> =>
Promise.resolve(JSON.stringify(manifestContent)),
});
// biome-ignore lint/complexity/useLiteralKeys: Private method
const manifest = await pluginLoader["parseManifestFile"](
"/some/path/manifest.json",
"manifest.json",
);
expect(manifest).toEqual(manifestContent);
});
test("findPlugins should return plugin directories with valid manifest and entrypoint", async () => {
mockReaddir
.mockResolvedValueOnce([
{ name: "plugin1", isDirectory: (): true => true },
{ name: "plugin2", isDirectory: (): true => true },
])
.mockResolvedValue(["manifest.json", "index.ts"]);
const plugins = await PluginLoader.findPlugins("/some/path");
expect(plugins).toEqual(["plugin1", "plugin2"]);
});
test("parseManifest should parse and validate manifest", async () => {
const manifestContent: Manifest = {
name: "test-plugin",
version: "1.1.0",
description: "Doobaee",
};
mockReaddir.mockResolvedValue(["manifest.json"]);
Bun.file = jest.fn().mockReturnValue({
text: (): Promise<string> =>
Promise.resolve(JSON.stringify(manifestContent)),
});
manifestSchema.safeParseAsync = jest.fn().mockResolvedValue({
success: true,
data: manifestContent,
});
const manifest = await pluginLoader.parseManifest(
"/some/path",
"plugin1",
);
expect(manifest).toEqual(manifestContent);
});
test("parseManifest should throw error if manifest is missing", async () => {
mockReaddir.mockResolvedValue([]);
await expect(
pluginLoader.parseManifest("/some/path", "plugin1"),
).rejects.toThrow("Plugin plugin1 is missing a manifest file");
});
test("parseManifest should throw error if manifest is invalid", async () => {
// @ts-expect-error trying to cause a type error here
const manifestContent: Manifest = {
name: "test-plugin",
version: "1.1.0",
};
mockReaddir.mockResolvedValue(["manifest.json"]);
Bun.file = jest.fn().mockReturnValue({
text: (): Promise<string> =>
Promise.resolve(JSON.stringify(manifestContent)),
});
manifestSchema.safeParseAsync = jest.fn().mockResolvedValue({
success: false,
error: new ZodError([]),
});
await expect(
pluginLoader.parseManifest("/some/path", "plugin1"),
).rejects.toThrow();
});
test("loadPlugin should load and return a Plugin instance", async () => {
const mockPlugin = new Plugin(z.object({}));
mock.module("/some/path/index.ts", () => ({
default: mockPlugin,
}));
const plugin = await pluginLoader.loadPlugin("/some/path", "index.ts");
expect(plugin).toBeInstanceOf(Plugin);
});
test("loadPlugin should throw error if default export is not a Plugin", async () => {
mock.module("/some/path/index.ts", () => ({
default: "cheese",
}));
await expect(
pluginLoader.loadPlugin("/some/path", "index.ts"),
).rejects.toThrow("Entrypoint is not a Plugin");
});
test("loadPlugins should load all plugins in a directory", async () => {
const manifestContent: Manifest = {
name: "test-plugin",
version: "1.1.0",
description: "Doobaee",
};
const mockPlugin = new Plugin(z.object({}));
mockReaddir
.mockResolvedValueOnce([
{ name: "plugin1", isDirectory: (): true => true },
{ name: "plugin2", isDirectory: (): true => true },
])
.mockResolvedValue(["manifest.json", "index.ts"]);
Bun.file = jest.fn().mockReturnValue({
text: (): Promise<string> =>
Promise.resolve(JSON.stringify(manifestContent)),
});
manifestSchema.safeParseAsync = jest.fn().mockResolvedValue({
success: true,
data: manifestContent,
});
mock.module("/some/path/plugin1/index", () => ({
default: mockPlugin,
}));
mock.module("/some/path/plugin2/index", () => ({
default: mockPlugin,
}));
const plugins = await pluginLoader.loadPlugins("/some/path", true);
expect(plugins).toEqual([
{
manifest: manifestContent,
plugin: mockPlugin as unknown as Plugin<ZodTypeAny>,
},
{
manifest: manifestContent,
plugin: mockPlugin as unknown as Plugin<ZodTypeAny>,
},
]);
});
});
*/

View file

@ -1,14 +1,13 @@
import { readdir } from "node:fs/promises";
import { getLogger, type Logger } from "@logtape/logtape";
import { type Manifest, manifestSchema, Plugin } from "@versia/kit";
import { config } from "@versia-server/config";
import { file, sleep } from "bun";
import chalk from "chalk";
import { parseJSON5, parseJSONC } from "confbox";
import type { Hono } from "hono";
import type { ZodTypeAny } from "zod";
import { fromZodError, type ValidationError } from "zod-validation-error";
import { config } from "~/config.ts";
import { Plugin } from "~/packages/plugin-kit/plugin";
import { type Manifest, manifestSchema } from "~/packages/plugin-kit/schema";
import type { HonoEnv } from "~/types/api";
/**

View file

@ -1,9 +1,9 @@
import { User } from "@versia/kit/db";
import type { JSONObject } from "@versia/sdk";
import * as VersiaEntities from "@versia/sdk/entities";
import { config } from "@versia-server/config";
import { Queue, Worker } from "bullmq";
import chalk from "chalk";
import { config } from "~/config.ts";
import * as VersiaEntities from "~/packages/sdk/entities";
import type { JSONObject } from "~/packages/sdk/types";
import { connection } from "~/utils/redis.ts";
export enum DeliveryJobType {

View file

@ -1,8 +1,8 @@
import { Instance } from "@versia/kit/db";
import { Instances } from "@versia/kit/tables";
import { config } from "@versia-server/config";
import { Queue, Worker } from "bullmq";
import { eq } from "drizzle-orm";
import { config } from "~/config.ts";
import { connection } from "~/utils/redis.ts";
export enum FetchJobType {

View file

@ -1,11 +1,11 @@
import { getLogger } from "@logtape/logtape";
import { ApiError } from "@versia/kit";
import { Instance, User } from "@versia/kit/db";
import type { JSONObject } from "@versia/sdk";
import { config } from "@versia-server/config";
import { Queue, Worker } from "bullmq";
import type { SocketAddress } from "bun";
import { config } from "~/config.ts";
import type { JSONObject } from "~/packages/sdk/types.ts";
import { connection } from "~/utils/redis.ts";
import { ApiError } from "../errors/api-error.ts";
import { InboxProcessor } from "../inbox/processor.ts";
export enum InboxJobType {

View file

@ -1,6 +1,6 @@
import { Media } from "@versia/kit/db";
import { config } from "@versia-server/config";
import { Queue, Worker } from "bullmq";
import { config } from "~/config.ts";
import { connection } from "~/utils/redis.ts";
import { calculateBlurhash } from "../media/preprocessors/blurhash.ts";
import { convertImage } from "../media/preprocessors/image-conversion.ts";

View file

@ -1,8 +1,8 @@
import { Note, PushSubscription, Token, User } from "@versia/kit/db";
import { config } from "@versia-server/config";
import { Queue, Worker } from "bullmq";
import { sendNotification } from "web-push";
import { htmlToText } from "@/content_types.ts";
import { config } from "~/config.ts";
import { connection } from "~/utils/redis.ts";
export enum PushJobType {

View file

@ -1,6 +1,6 @@
import { Relationship, User } from "@versia/kit/db";
import { config } from "@versia-server/config";
import { Queue, Worker } from "bullmq";
import { config } from "~/config.ts";
import { connection } from "~/utils/redis.ts";
export enum RelationshipJobType {

View file

@ -5,12 +5,12 @@
import { getLogger } from "@logtape/logtape";
import { db, Note, User } from "@versia/kit/db";
import { config } from "@versia-server/config";
import type { SQL, ValueOrArray } from "drizzle-orm";
import {
Ingest as SonicChannelIngest,
Search as SonicChannelSearch,
} from "sonic-channel";
import { config } from "~/config.ts";
/**
* Enum for Sonic index types