Merge pull request #32 from versia-pub/refactor/media

Refactor and simplify the media pipeline
This commit is contained in:
Gaspard Wierzbinski 2025-02-01 11:02:28 +01:00 committed by GitHub
commit 450058213d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
60 changed files with 10789 additions and 2118 deletions

View file

@ -1,5 +1,5 @@
# Bun doesn't run well on Musl but this seems to work
FROM oven/bun:1.2.0-alpine as base
FROM oven/bun:1.2.1-alpine as base
# Switch to Bash by editing /etc/passwd
RUN apk add --no-cache libstdc++ git bash curl openssh cloc && \

View file

@ -7,7 +7,8 @@
"federation",
"config",
"plugin",
"worker"
"worker",
"media"
],
"languageToolLinter.languageTool.ignoredWordsInWorkspace": ["versia"]
}

View file

@ -10,7 +10,7 @@ Versia Server `0.8.0` is fully backwards compatible with `0.7.0`.
- Added an administration UI for managing the queue.
- Media processing is now also handled by a queue system.
- Added [Push Notifications](https://docs.joinmastodon.org/methods/push) support.
- Upgraded Bun to `1.2.0`.
- Upgraded Bun to `1.2.1`.
- Implemented support for the [**Instance Messaging Extension**](https://versia.pub/extensions/instance-messaging)
- Implement [**Shared Inboxes**](https://versia.pub/federation#inboxes) support.
- Allowed `<div>` and `<span>` tags in Markdown.

View file

@ -26,7 +26,7 @@ RUN bun run build
WORKDIR /temp/dist
# Copy production dependencies and source code into final image
FROM oven/bun:1.2.0-alpine
FROM oven/bun:1.2.1-alpine
# Install libstdc++ for Bun and create app directory
RUN apk add --no-cache libstdc++ && \

View file

@ -26,7 +26,7 @@ RUN bun run build:worker
WORKDIR /temp/dist
# Copy production dependencies and source code into final image
FROM oven/bun:1.2.0-alpine
FROM oven/bun:1.2.1-alpine
# Install libstdc++ for Bun and create app directory
RUN apk add --no-cache libstdc++ && \

View file

@ -1,16 +1,14 @@
import { apiRoute, auth, jsonOrForm } from "@/api";
import { mimeLookup } from "@/content_types";
import { mergeAndDeduplicate } from "@/lib";
import { sanitizedHtmlStrip } from "@/sanitization";
import { createRoute } from "@hono/zod-openapi";
import { Emoji, Media, User } from "@versia/kit/db";
import { Emoji, User } from "@versia/kit/db";
import { RolePermissions, Users } from "@versia/kit/tables";
import { and, eq, isNull } from "drizzle-orm";
import ISO6391 from "iso-639-1";
import { z } from "zod";
import { ApiError } from "~/classes/errors/api-error";
import { contentToHtml } from "~/classes/functions/status";
import { MediaManager } from "~/classes/media/media-manager";
import { config } from "~/packages/config-manager/index.ts";
import { ErrorSchema } from "~/types/api";
@ -61,6 +59,7 @@ const schemas = {
.min(1)
.max(2000)
.url()
.transform((a) => new URL(a))
.or(
z
.instanceof(File)
@ -76,6 +75,7 @@ const schemas = {
.min(1)
.max(2000)
.url()
.transform((v) => new URL(v))
.or(
z
.instanceof(File)
@ -204,8 +204,6 @@ export default apiRoute((app) =>
display_name ?? "",
);
const mediaManager = new MediaManager(config);
if (display_name) {
self.displayName = sanitizedDisplayName;
}
@ -247,37 +245,17 @@ export default apiRoute((app) =>
if (avatar) {
if (avatar instanceof File) {
const { path, uploadedFile } =
await mediaManager.addFile(avatar);
const contentType = uploadedFile.type;
self.avatar = Media.getUrl(path);
self.source.avatar = {
content_type: contentType,
};
await user.avatar?.updateFromFile(avatar);
} else {
self.avatar = avatar;
self.source.avatar = {
content_type: await mimeLookup(avatar),
};
await user.avatar?.updateFromUrl(avatar);
}
}
if (header) {
if (header instanceof File) {
const { path, uploadedFile } =
await mediaManager.addFile(header);
const contentType = uploadedFile.type;
self.header = Media.getUrl(path);
self.source.header = {
content_type: contentType,
};
await user.header?.updateFromFile(header);
} else {
self.header = header;
self.source.header = {
content_type: await mimeLookup(header),
};
await user.header?.updateFromUrl(header);
}
}

View file

@ -1,12 +1,10 @@
import { apiRoute, auth, emojiValidator, jsonOrForm } from "@/api";
import { mimeLookup } from "@/content_types";
import { createRoute } from "@hono/zod-openapi";
import { Emoji, Media, db } from "@versia/kit/db";
import { Emojis, RolePermissions } from "@versia/kit/tables";
import { eq } from "drizzle-orm";
import { Emoji } from "@versia/kit/db";
import { RolePermissions } from "@versia/kit/tables";
import { z } from "zod";
import { ApiError } from "~/classes/errors/api-error";
import { MediaManager } from "~/classes/media/media-manager";
import { config } from "~/packages/config-manager";
import { ErrorSchema } from "~/types/api";
@ -31,6 +29,7 @@ const schemas = {
.min(1)
.max(2000)
.url()
.transform((a) => new URL(a))
.or(
z
.instanceof(File)
@ -230,8 +229,6 @@ export default apiRoute((app) => {
);
}
const mediaManager = new MediaManager(config);
const {
global: emojiGlobal,
alt,
@ -248,11 +245,9 @@ export default apiRoute((app) => {
);
}
const modified = structuredClone(emoji.data);
if (element) {
// Check of emoji is an image
let contentType =
const contentType =
element instanceof File
? element.type
: await mimeLookup(element);
@ -265,30 +260,24 @@ export default apiRoute((app) => {
);
}
let url = "";
if (element instanceof File) {
const uploaded = await mediaManager.addFile(element);
url = Media.getUrl(uploaded.path);
contentType = uploaded.uploadedFile.type;
await emoji.media.updateFromFile(element);
} else {
url = element;
await emoji.media.updateFromUrl(element);
}
}
modified.url = url;
modified.contentType = contentType;
if (alt) {
await emoji.media.updateMetadata({
description: alt,
});
}
modified.shortcode = shortcode ?? modified.shortcode;
modified.alt = alt ?? modified.alt;
modified.category = category ?? modified.category;
if (emojiGlobal !== undefined) {
modified.ownerId = emojiGlobal ? null : user.data.id;
}
await emoji.update(modified);
await emoji.update({
shortcode,
ownerId: emojiGlobal ? null : user.data.id,
category,
});
return context.json(emoji.toApi(), 200);
});
@ -315,11 +304,7 @@ export default apiRoute((app) => {
);
}
const mediaManager = new MediaManager(config);
await mediaManager.deleteFileByUrl(emoji.data.url);
await db.delete(Emojis).where(eq(Emojis.id, id));
await emoji.delete();
return context.body(null, 204);
});

View file

@ -6,7 +6,6 @@ import { Emojis, RolePermissions } from "@versia/kit/tables";
import { and, eq, isNull, or } from "drizzle-orm";
import { z } from "zod";
import { ApiError } from "~/classes/errors/api-error";
import { MediaManager } from "~/classes/media/media-manager";
import { config } from "~/packages/config-manager";
import { ErrorSchema } from "~/types/api";
@ -27,6 +26,7 @@ const schemas = {
.min(1)
.max(2000)
.url()
.transform((a) => new URL(a))
.or(
z
.instanceof(File)
@ -130,10 +130,8 @@ export default apiRoute((app) =>
);
}
let url = "";
// Check of emoji is an image
let contentType =
const contentType =
element instanceof File ? element.type : await mimeLookup(element);
if (!contentType.startsWith("image/")) {
@ -144,25 +142,21 @@ export default apiRoute((app) =>
);
}
if (element instanceof File) {
const mediaManager = new MediaManager(config);
const uploaded = await mediaManager.addFile(element);
url = Media.getUrl(uploaded.path);
contentType = uploaded.uploadedFile.type;
} else {
url = element;
}
const media =
element instanceof File
? await Media.fromFile(element, {
description: alt,
})
: await Media.fromUrl(element, {
description: alt,
});
const emoji = await Emoji.insert({
shortcode,
url,
mediaId: media.id,
visibleInPicker: true,
ownerId: global ? null : user.id,
category,
contentType,
alt,
});
return context.json(emoji.toApi(), 201);

View file

@ -4,7 +4,6 @@ import { Media } from "@versia/kit/db";
import { RolePermissions } from "@versia/kit/tables";
import { z } from "zod";
import { ApiError } from "~/classes/errors/api-error";
import { MediaManager } from "~/classes/media/media-manager";
import { config } from "~/packages/config-manager/index.ts";
import { ErrorSchema } from "~/types/api";
@ -101,38 +100,26 @@ export default apiRoute((app) => {
app.openapi(routePut, async (context) => {
const { id } = context.req.valid("param");
const attachment = await Media.fromId(id);
const media = await Media.fromId(id);
if (!attachment) {
if (!media) {
throw new ApiError(404, "Media not found");
}
const { description, thumbnail } = context.req.valid("form");
const { description, thumbnail: thumbnailFile } =
context.req.valid("form");
let thumbnailUrl = attachment.data.thumbnailUrl;
const mediaManager = new MediaManager(config);
if (thumbnail) {
const { path } = await mediaManager.addFile(thumbnail);
thumbnailUrl = Media.getUrl(path);
if (thumbnailFile) {
await media.updateThumbnail(thumbnailFile);
}
const descriptionText = description || attachment.data.description;
if (
descriptionText !== attachment.data.description ||
thumbnailUrl !== attachment.data.thumbnailUrl
) {
await attachment.update({
description: descriptionText,
thumbnailUrl,
if (description) {
await media.updateMetadata({
description,
});
return context.json(attachment.toApi(), 200);
}
return context.json(attachment.toApi(), 200);
return context.json(media.toApi(), 200);
});
app.openapi(routeGet, async (context) => {

View file

@ -30,9 +30,7 @@ export default apiRoute((app) =>
app.openapi(route, async (context) => {
const { user } = context.get("auth");
await user.update({
avatar: "",
});
await user.header?.delete();
return context.json(user.toApi(true), 200);
}),

View file

@ -30,9 +30,7 @@ export default apiRoute((app) =>
app.openapi(route, async (context) => {
const { user } = context.get("auth");
await user.update({
header: "",
});
await user.header?.delete();
return context.json(user.toApi(true), 200);
}),

View file

@ -1,12 +1,13 @@
import { afterAll, beforeAll, describe, expect, test } from "bun:test";
import type { Status as ApiStatus } from "@versia/client/types";
import { db } from "@versia/kit/db";
import { Media, db } from "@versia/kit/db";
import { Emojis } from "@versia/kit/tables";
import { eq } from "drizzle-orm";
import { config } from "~/packages/config-manager/index.ts";
import { fakeRequest, getTestUsers } from "~/tests/utils";
const { users, tokens, deleteUsers } = await getTestUsers(5);
let media: Media;
afterAll(async () => {
await deleteUsers();
@ -14,10 +15,17 @@ afterAll(async () => {
});
beforeAll(async () => {
media = await Media.insert({
content: {
"image/png": {
content: "https://example.com/test.png",
remote: true,
},
},
});
await db.insert(Emojis).values({
contentType: "image/png",
shortcode: "test",
url: "https://example.com/test.png",
mediaId: media.id,
visibleInPicker: true,
});
});

View file

@ -11,11 +11,6 @@ await Bun.build({
target: "bun",
splitting: true,
minify: false,
}).then((output) => {
if (!output.success) {
console.error(output.logs);
throw new Error("Build failed");
}
});
buildSpinner.text = "Transforming";

View file

@ -26,11 +26,6 @@ await Bun.build({
splitting: true,
minify: false,
external: ["unzipit", "acorn", "@bull-board/ui"],
}).then((output) => {
if (!output.success) {
console.error(output.logs);
throw new Error("Build failed");
}
});
buildSpinner.text = "Transforming";

467
bun.lock

File diff suppressed because it is too large Load diff

View file

@ -1,336 +0,0 @@
import { proxyUrl } from "@/response";
import type { Attachment as ApiAttachment } from "@versia/client/types";
import type { ContentFormat } from "@versia/federation/types";
import { db } from "@versia/kit/db";
import { Medias } from "@versia/kit/tables";
import {
type InferInsertModel,
type InferSelectModel,
type SQL,
desc,
eq,
inArray,
} from "drizzle-orm";
import sharp from "sharp";
import { z } from "zod";
import { MediaBackendType } from "~/packages/config-manager/config.type";
import { config } from "~/packages/config-manager/index.ts";
import { ApiError } from "../errors/api-error.ts";
import { MediaManager } from "../media/media-manager.ts";
import { MediaJobType, mediaQueue } from "../queues/media.ts";
import { BaseInterface } from "./base.ts";
type MediaType = InferSelectModel<typeof Medias>;
export class Media extends BaseInterface<typeof Medias> {
public static schema: z.ZodType<ApiAttachment> = z.object({
id: z.string().uuid(),
type: z.enum(["unknown", "image", "gifv", "video", "audio"]),
url: z.string().url(),
remote_url: z.string().url().nullable(),
preview_url: z.string().url().nullable(),
text_url: z.string().url().nullable(),
meta: z
.object({
width: z.number().optional(),
height: z.number().optional(),
fps: z.number().optional(),
size: z.string().optional(),
duration: z.number().optional(),
length: z.string().optional(),
aspect: z.number().optional(),
original: z.object({
width: z.number().optional(),
height: z.number().optional(),
size: z.string().optional(),
aspect: z.number().optional(),
}),
})
.nullable(),
description: z.string().nullable(),
blurhash: z.string().nullable(),
});
public static $type: MediaType;
public async reload(): Promise<void> {
const reloaded = await Media.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload attachment");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Media | null> {
if (!id) {
return null;
}
return await Media.fromSql(eq(Medias.id, id));
}
public static async fromIds(ids: string[]): Promise<Media[]> {
return await Media.manyFromSql(inArray(Medias.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Medias.id),
): Promise<Media | null> {
const found = await db.query.Medias.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new Media(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Medias.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Medias.findMany>[0],
): Promise<Media[]> {
const found = await db.query.Medias.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new Media(s));
}
public async update(newAttachment: Partial<MediaType>): Promise<MediaType> {
await db
.update(Medias)
.set(newAttachment)
.where(eq(Medias.id, this.id));
const updated = await Media.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update attachment");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<MediaType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Medias).where(inArray(Medias.id, ids));
} else {
await db.delete(Medias).where(eq(Medias.id, this.id));
}
}
public static async insert(
data: InferInsertModel<typeof Medias>,
): Promise<Media> {
const inserted = (await db.insert(Medias).values(data).returning())[0];
const attachment = await Media.fromId(inserted.id);
if (!attachment) {
throw new Error("Failed to insert attachment");
}
return attachment;
}
public static async fromFile(
file: File,
options?: {
description?: string;
thumbnail?: File;
},
): Promise<Media> {
if (file.size > config.validation.max_media_size) {
throw new ApiError(
413,
`File too large, max size is ${config.validation.max_media_size} bytes`,
);
}
if (
config.validation.enforce_mime_types &&
!config.validation.allowed_mime_types.includes(file.type)
) {
throw new ApiError(
415,
`File type ${file.type} is not allowed`,
`Allowed types: ${config.validation.allowed_mime_types.join(", ")}`,
);
}
const sha256 = new Bun.SHA256();
const isImage = file.type.startsWith("image/");
const metadata = isImage
? await sharp(await file.arrayBuffer()).metadata()
: null;
const mediaManager = new MediaManager(config);
const { path } = await mediaManager.addFile(file);
const url = Media.getUrl(path);
let thumbnailUrl = "";
if (options?.thumbnail) {
const { path } = await mediaManager.addFile(options.thumbnail);
thumbnailUrl = Media.getUrl(path);
}
const newAttachment = await Media.insert({
url,
thumbnailUrl: thumbnailUrl || undefined,
sha256: sha256.update(await file.arrayBuffer()).digest("hex"),
mimeType: file.type,
description: options?.description ?? "",
size: file.size,
width: metadata?.width ?? undefined,
height: metadata?.height ?? undefined,
});
if (config.media.conversion.convert_images) {
await mediaQueue.add(MediaJobType.ConvertMedia, {
attachmentId: newAttachment.id,
filename: file.name,
});
}
return newAttachment;
}
public get id(): string {
return this.data.id;
}
public static getUrl(name: string): string {
if (config.media.backend === MediaBackendType.Local) {
return new URL(`/media/${name}`, config.http.base_url).toString();
}
if (config.media.backend === MediaBackendType.S3) {
return new URL(`/${name}`, config.s3.public_url).toString();
}
return "";
}
public getMastodonType(): ApiAttachment["type"] {
if (this.data.mimeType.startsWith("image/")) {
return "image";
}
if (this.data.mimeType.startsWith("video/")) {
return "video";
}
if (this.data.mimeType.startsWith("audio/")) {
return "audio";
}
return "unknown";
}
public toApiMeta(): ApiAttachment["meta"] {
return {
width: this.data.width || undefined,
height: this.data.height || undefined,
fps: this.data.fps || undefined,
size:
this.data.width && this.data.height
? `${this.data.width}x${this.data.height}`
: undefined,
duration: this.data.duration || undefined,
length: undefined,
aspect:
this.data.width && this.data.height
? this.data.width / this.data.height
: undefined,
original: {
width: this.data.width || undefined,
height: this.data.height || undefined,
size:
this.data.width && this.data.height
? `${this.data.width}x${this.data.height}`
: undefined,
aspect:
this.data.width && this.data.height
? this.data.width / this.data.height
: undefined,
},
// Idk whether size or length is the right value
};
}
public toApi(): ApiAttachment {
return {
id: this.data.id,
type: this.getMastodonType(),
url: proxyUrl(this.data.url) ?? "",
remote_url: proxyUrl(this.data.remoteUrl),
preview_url: proxyUrl(this.data.thumbnailUrl || this.data.url),
text_url: null,
meta: this.toApiMeta(),
description: this.data.description,
blurhash: this.data.blurhash,
};
}
public toVersia(): ContentFormat {
return {
[this.data.mimeType]: {
content: this.data.url,
remote: true,
// TODO: Replace BlurHash with thumbhash
// thumbhash: this.data.blurhash ?? undefined,
description: this.data.description ?? undefined,
duration: this.data.duration ?? undefined,
fps: this.data.fps ?? undefined,
height: this.data.height ?? undefined,
size: this.data.size ?? undefined,
hash: this.data.sha256
? {
sha256: this.data.sha256,
}
: undefined,
width: this.data.width ?? undefined,
},
};
}
public static fromVersia(
attachmentToConvert: ContentFormat,
): Promise<Media> {
const key = Object.keys(attachmentToConvert)[0];
const value = attachmentToConvert[key];
return Media.insert({
mimeType: key,
url: value.content,
description: value.description || undefined,
duration: value.duration || undefined,
fps: value.fps || undefined,
height: value.height || undefined,
// biome-ignore lint/style/useExplicitLengthCheck: Biome thinks we're checking if size is not zero
size: value.size || undefined,
width: value.width || undefined,
sha256: value.hash?.sha256 || undefined,
// blurhash: value.blurhash || undefined,
});
}
}

View file

@ -2,8 +2,8 @@ import { emojiValidatorWithColons, emojiValidatorWithIdentifiers } from "@/api";
import { proxyUrl } from "@/response";
import type { Emoji as APIEmoji } from "@versia/client/types";
import type { CustomEmojiExtension } from "@versia/federation/types";
import { type Instance, db } from "@versia/kit/db";
import { Emojis, type Instances } from "@versia/kit/tables";
import { type Instance, Media, db } from "@versia/kit/db";
import { Emojis, type Instances, type Medias } from "@versia/kit/tables";
import {
type InferInsertModel,
type InferSelectModel,
@ -17,11 +17,12 @@ import {
import { z } from "zod";
import { BaseInterface } from "./base.ts";
type EmojiWithInstance = InferSelectModel<typeof Emojis> & {
type EmojiType = InferSelectModel<typeof Emojis> & {
media: InferSelectModel<typeof Medias>;
instance: InferSelectModel<typeof Instances> | null;
};
export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
export class Emoji extends BaseInterface<typeof Emojis, EmojiType> {
public static schema = z.object({
id: z.string(),
shortcode: z.string(),
@ -32,7 +33,13 @@ export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
global: z.boolean(),
});
public static $type: EmojiWithInstance;
public static $type: EmojiType;
public media: Media;
public constructor(data: EmojiType) {
super(data);
this.media = new Media(data.media);
}
public async reload(): Promise<void> {
const reloaded = await Emoji.fromId(this.data.id);
@ -65,6 +72,7 @@ export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
orderBy,
with: {
instance: true,
media: true,
},
});
@ -86,15 +94,13 @@ export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
orderBy,
limit,
offset,
with: { ...extra?.with, instance: true },
with: { ...extra?.with, instance: true, media: true },
});
return found.map((s) => new Emoji(s));
}
public async update(
newEmoji: Partial<EmojiWithInstance>,
): Promise<EmojiWithInstance> {
public async update(newEmoji: Partial<EmojiType>): Promise<EmojiType> {
await db.update(Emojis).set(newEmoji).where(eq(Emojis.id, this.id));
const updated = await Emoji.fromId(this.data.id);
@ -107,7 +113,7 @@ export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
return updated.data;
}
public save(): Promise<EmojiWithInstance> {
public save(): Promise<EmojiType> {
return this.update(this.data);
}
@ -182,29 +188,25 @@ export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
return {
id: this.id,
shortcode: this.data.shortcode,
static_url: proxyUrl(this.data.url) ?? "", // TODO: Add static version
url: proxyUrl(this.data.url) ?? "",
static_url: proxyUrl(this.media.getUrl()) ?? "", // TODO: Add static version
url: proxyUrl(this.media.getUrl()) ?? "",
visible_in_picker: this.data.visibleInPicker,
category: this.data.category ?? undefined,
global: this.data.ownerId === null,
description: this.data.alt ?? undefined,
description:
this.media.data.content[this.media.getPreferredMimeType()]
.description ?? undefined,
};
}
public toVersia(): CustomEmojiExtension["emojis"][0] {
return {
name: `:${this.data.shortcode}:`,
url: {
[this.data.contentType]: {
content: this.data.url,
description: this.data.alt || undefined,
remote: true,
},
},
url: this.media.toVersia(),
};
}
public static fromVersia(
public static async fromVersia(
emoji: CustomEmojiExtension["emojis"][0],
instance: Instance,
): Promise<Emoji> {
@ -217,11 +219,11 @@ export class Emoji extends BaseInterface<typeof Emojis, EmojiWithInstance> {
throw new Error("Could not extract shortcode from emoji name");
}
const media = await Media.fromVersia(emoji.url);
return Emoji.insert({
shortcode,
url: Object.entries(emoji.url)[0][1].content,
alt: Object.entries(emoji.url)[0][1].description || undefined,
contentType: Object.keys(emoji.url)[0],
mediaId: media.id,
visibleInPicker: true,
instanceId: instance.id,
});

551
classes/database/media.ts Normal file
View file

@ -0,0 +1,551 @@
import { join } from "node:path";
import { mimeLookup } from "@/content_types.ts";
import { proxyUrl } from "@/response";
import type { Attachment as ApiAttachment } from "@versia/client/types";
import type { ContentFormat } from "@versia/federation/types";
import { db } from "@versia/kit/db";
import { Medias } from "@versia/kit/tables";
import { S3Client, SHA256, randomUUIDv7, write } from "bun";
import {
type InferInsertModel,
type InferSelectModel,
type SQL,
desc,
eq,
inArray,
} from "drizzle-orm";
import sharp from "sharp";
import { z } from "zod";
import { MediaBackendType } from "~/packages/config-manager/config.type";
import { config } from "~/packages/config-manager/index.ts";
import { ApiError } from "../errors/api-error.ts";
import { getMediaHash } from "../media/media-hasher.ts";
import { MediaJobType, mediaQueue } from "../queues/media.ts";
import { BaseInterface } from "./base.ts";
type MediaType = InferSelectModel<typeof Medias>;
export class Media extends BaseInterface<typeof Medias> {
public static schema: z.ZodType<ApiAttachment> = z.object({
id: z.string().uuid(),
type: z.enum(["unknown", "image", "gifv", "video", "audio"]),
url: z.string().url(),
remote_url: z.string().url().nullable(),
preview_url: z.string().url().nullable(),
text_url: z.string().url().nullable(),
meta: z
.object({
width: z.number().optional(),
height: z.number().optional(),
fps: z.number().optional(),
size: z.string().optional(),
duration: z.number().optional(),
length: z.string().optional(),
aspect: z.number().optional(),
original: z.object({
width: z.number().optional(),
height: z.number().optional(),
size: z.string().optional(),
aspect: z.number().optional(),
}),
})
.nullable(),
description: z.string().nullable(),
blurhash: z.string().nullable(),
});
public static $type: MediaType;
public async reload(): Promise<void> {
const reloaded = await Media.fromId(this.data.id);
if (!reloaded) {
throw new Error("Failed to reload attachment");
}
this.data = reloaded.data;
}
public static async fromId(id: string | null): Promise<Media | null> {
if (!id) {
return null;
}
return await Media.fromSql(eq(Medias.id, id));
}
public static async fromIds(ids: string[]): Promise<Media[]> {
return await Media.manyFromSql(inArray(Medias.id, ids));
}
public static async fromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Medias.id),
): Promise<Media | null> {
const found = await db.query.Medias.findFirst({
where: sql,
orderBy,
});
if (!found) {
return null;
}
return new Media(found);
}
public static async manyFromSql(
sql: SQL<unknown> | undefined,
orderBy: SQL<unknown> | undefined = desc(Medias.id),
limit?: number,
offset?: number,
extra?: Parameters<typeof db.query.Medias.findMany>[0],
): Promise<Media[]> {
const found = await db.query.Medias.findMany({
where: sql,
orderBy,
limit,
offset,
with: extra?.with,
});
return found.map((s) => new Media(s));
}
public async update(newAttachment: Partial<MediaType>): Promise<MediaType> {
await db
.update(Medias)
.set(newAttachment)
.where(eq(Medias.id, this.id));
const updated = await Media.fromId(this.data.id);
if (!updated) {
throw new Error("Failed to update attachment");
}
this.data = updated.data;
return updated.data;
}
public save(): Promise<MediaType> {
return this.update(this.data);
}
public async delete(ids?: string[]): Promise<void> {
if (Array.isArray(ids)) {
await db.delete(Medias).where(inArray(Medias.id, ids));
} else {
await db.delete(Medias).where(eq(Medias.id, this.id));
}
// TODO: Also delete the file from the media manager
}
public static async insert(
data: InferInsertModel<typeof Medias>,
): Promise<Media> {
const inserted = (await db.insert(Medias).values(data).returning())[0];
const attachment = await Media.fromId(inserted.id);
if (!attachment) {
throw new Error("Failed to insert attachment");
}
return attachment;
}
private static async upload(file: File): Promise<{
path: string;
}> {
const fileName = file.name ?? randomUUIDv7();
const hash = await getMediaHash(file);
switch (config.media.backend) {
case MediaBackendType.Local: {
const path = join(
config.media.local_uploads_folder,
hash,
fileName,
);
await write(path, file);
return { path: join(hash, fileName) };
}
case MediaBackendType.S3: {
const path = join(hash, fileName);
if (!config.s3) {
throw new ApiError(500, "S3 configuration missing");
}
const client = new S3Client({
endpoint: config.s3.endpoint,
region: config.s3.region,
bucket: config.s3.bucket_name,
accessKeyId: config.s3.access_key,
secretAccessKey: config.s3.secret_access_key,
});
await client.write(path, file);
return { path };
}
}
}
public static async fromFile(
file: File,
options?: {
description?: string;
thumbnail?: File;
},
): Promise<Media> {
Media.checkFile(file);
const { path } = await Media.upload(file);
const url = Media.getUrl(path);
let thumbnailUrl = "";
if (options?.thumbnail) {
const { path } = await Media.upload(options.thumbnail);
thumbnailUrl = Media.getUrl(path);
}
const content = await Media.fileToContentFormat(file, url, {
description: options?.description,
});
const thumbnailContent = options?.thumbnail
? await Media.fileToContentFormat(options.thumbnail, thumbnailUrl, {
description: options?.description,
})
: undefined;
const newAttachment = await Media.insert({
content,
thumbnail: thumbnailContent,
});
if (config.media.conversion.convert_images) {
await mediaQueue.add(MediaJobType.ConvertMedia, {
attachmentId: newAttachment.id,
filename: file.name,
});
}
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: newAttachment.id,
filename: file.name,
});
return newAttachment;
}
public static async fromUrl(
uri: URL,
options?: {
description?: string;
},
): Promise<Media> {
const mimeType = await mimeLookup(uri);
const content: ContentFormat = {
[mimeType]: {
content: uri.toString(),
remote: true,
description: options?.description,
},
};
const newAttachment = await Media.insert({
content,
});
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: newAttachment.id,
// CalculateMetadata doesn't use the filename, but the type is annoying
// and requires it anyway
filename: "blank",
});
return newAttachment;
}
private static checkFile(file: File): void {
if (file.size > config.validation.max_media_size) {
throw new ApiError(
413,
`File too large, max size is ${config.validation.max_media_size} bytes`,
);
}
if (
config.validation.enforce_mime_types &&
!config.validation.allowed_mime_types.includes(file.type)
) {
throw new ApiError(
415,
`File type ${file.type} is not allowed`,
`Allowed types: ${config.validation.allowed_mime_types.join(", ")}`,
);
}
}
public async updateFromFile(file: File): Promise<void> {
Media.checkFile(file);
const { path } = await Media.upload(file);
const url = Media.getUrl(path);
const content = await Media.fileToContentFormat(file, url, {
description:
this.data.content[Object.keys(this.data.content)[0]]
.description || undefined,
});
await this.update({
content,
});
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: this.id,
filename: file.name,
});
}
public async updateFromUrl(uri: URL): Promise<void> {
const mimeType = await mimeLookup(uri);
const content: ContentFormat = {
[mimeType]: {
content: uri.toString(),
remote: true,
description:
this.data.content[Object.keys(this.data.content)[0]]
.description || undefined,
},
};
await this.update({
content,
});
await mediaQueue.add(MediaJobType.CalculateMetadata, {
attachmentId: this.id,
filename: "blank",
});
}
public async updateThumbnail(file: File): Promise<void> {
Media.checkFile(file);
const { path } = await Media.upload(file);
const url = Media.getUrl(path);
const content = await Media.fileToContentFormat(file, url);
await this.update({
thumbnail: content,
});
}
public async updateMetadata(
metadata: Partial<Omit<ContentFormat[keyof ContentFormat], "content">>,
): Promise<void> {
const content = this.data.content;
for (const type of Object.keys(content)) {
content[type] = {
...content[type],
...metadata,
};
}
await this.update({
content,
});
}
public get id(): string {
return this.data.id;
}
public static getUrl(name: string): string {
if (config.media.backend === MediaBackendType.Local) {
return new URL(`/media/${name}`, config.http.base_url).toString();
}
if (config.media.backend === MediaBackendType.S3) {
return new URL(`/${name}`, config.s3?.public_url).toString();
}
return "";
}
public getUrl(): string {
const type = this.getPreferredMimeType();
return this.data.content[type]?.content;
}
/**
* Gets favourite MIME type for the attachment
* Uses a hardcoded list of preferred types, for images
*
* @returns {string} Preferred MIME type
*/
public getPreferredMimeType(): string {
return Media.getPreferredMimeType(Object.keys(this.data.content));
}
/**
* Gets favourite MIME type from a list
* Uses a hardcoded list of preferred types, for images
*
* @returns {string} Preferred MIME type
*/
public static getPreferredMimeType(types: string[]): string {
const ranking = [
"image/svg+xml",
"image/avif",
"image/jxl",
"image/webp",
"image/heif",
"image/heif-sequence",
"image/heic",
"image/heic-sequence",
"image/apng",
"image/gif",
"image/png",
"image/jpeg",
"image/bmp",
];
return ranking.find((type) => types.includes(type)) ?? types[0];
}
/**
* Maps MIME type to Mastodon attachment type
*
* @returns
*/
public getMastodonType(): ApiAttachment["type"] {
const type = this.getPreferredMimeType();
if (type.startsWith("image/")) {
return "image";
}
if (type.startsWith("video/")) {
return "video";
}
if (type.startsWith("audio/")) {
return "audio";
}
return "unknown";
}
/**
* Extracts metadata from a file and outputs as ContentFormat
*
* Does not calculate thumbhash (do this in a worker)
* @param file
* @param uri Uploaded file URI
* @param options Extra metadata, such as description
* @returns
*/
public static async fileToContentFormat(
file: File,
uri: string,
options?: Partial<{
description: string;
}>,
): Promise<ContentFormat> {
const buffer = await file.arrayBuffer();
const isImage = file.type.startsWith("image/");
const { width, height } = isImage ? await sharp(buffer).metadata() : {};
const hash = new SHA256().update(file).digest("hex");
// Missing: fps, duration
// Thumbhash should be added in a worker after the file is uploaded
return {
[file.type]: {
content: uri,
remote: true,
hash: {
sha256: hash,
},
width,
height,
description: options?.description,
size: file.size,
},
};
}
public toApiMeta(): ApiAttachment["meta"] {
const type = this.getPreferredMimeType();
const data = this.data.content[type];
const size =
data.width && data.height
? `${data.width}x${data.height}`
: undefined;
const aspect =
data.width && data.height ? data.width / data.height : undefined;
return {
width: data.width || undefined,
height: data.height || undefined,
fps: data.fps || undefined,
size,
// Idk whether size or length is the right value
duration: data.duration || undefined,
// Versia doesn't have a concept of length in ContentFormat
length: undefined,
aspect,
original: {
width: data.width || undefined,
height: data.height || undefined,
size,
aspect,
},
};
}
public toApi(): ApiAttachment {
const type = this.getPreferredMimeType();
const data = this.data.content[type];
// Thumbnail should only have a single MIME type
const thumbnailData =
this.data.thumbnail?.[Object.keys(this.data.thumbnail)[0]];
return {
id: this.data.id,
type: this.getMastodonType(),
url: proxyUrl(data.content) ?? "",
remote_url: null,
preview_url: proxyUrl(thumbnailData?.content),
text_url: null,
meta: this.toApiMeta(),
description: data.description || null,
blurhash: this.data.blurhash,
};
}
public toVersia(): ContentFormat {
return this.data.content;
}
public static fromVersia(contentFormat: ContentFormat): Promise<Media> {
return Media.insert({
content: contentFormat,
originalContent: contentFormat,
});
}
}

View file

@ -17,7 +17,7 @@ import type {
import { Instance, db } from "@versia/kit/db";
import {
EmojiToNote,
Medias,
MediasToNotes,
NoteToMentions,
Notes,
Users,
@ -44,9 +44,9 @@ import {
import { config } from "~/packages/config-manager";
import { DeliveryJobType, deliveryQueue } from "../queues/delivery.ts";
import { Application } from "./application.ts";
import { Media } from "./attachment.ts";
import { BaseInterface } from "./base.ts";
import { Emoji } from "./emoji.ts";
import { Media } from "./media.ts";
import { User } from "./user.ts";
type NoteType = InferSelectModel<typeof Notes>;
@ -630,21 +630,14 @@ export class Note extends BaseInterface<typeof Notes, NoteTypeWithRelations> {
// Remove old attachments
await db
.update(Medias)
.set({
noteId: null,
})
.where(eq(Medias.noteId, this.data.id));
await db
.update(Medias)
.set({
.delete(MediasToNotes)
.where(eq(MediasToNotes.noteId, this.data.id));
await db.insert(MediasToNotes).values(
mediaAttachments.map((media) => ({
noteId: this.data.id,
})
.where(
inArray(
Medias.id,
mediaAttachments.map((i) => i.id),
),
mediaId: media.id,
})),
);
}

View file

@ -10,8 +10,6 @@ import {
inArray,
} from "drizzle-orm";
import { z } from "zod";
import { MediaBackendType } from "~/packages/config-manager/config.type";
import { config } from "~/packages/config-manager/index.ts";
import {
transformOutputToUserWithRelations,
userExtrasTemplate,
@ -215,16 +213,6 @@ export class Notification extends BaseInterface<
return this.data.id;
}
public static getUrl(name: string): string {
if (config.media.backend === MediaBackendType.Local) {
return new URL(`/media/${name}`, config.http.base_url).toString();
}
if (config.media.backend === MediaBackendType.S3) {
return new URL(`/${name}`, config.s3.public_url).toString();
}
return "";
}
public async toApi(): Promise<APINotification> {
const account = new User(this.data.account);

View file

@ -67,6 +67,7 @@ export class Reaction extends BaseInterface<typeof Reactions, ReactionType> {
emoji: {
with: {
instance: true,
media: true,
},
},
author: true,
@ -98,6 +99,7 @@ export class Reaction extends BaseInterface<typeof Reactions, ReactionType> {
emoji: {
with: {
instance: true,
media: true,
},
},
author: true,

View file

@ -21,7 +21,7 @@ import type {
FollowReject as VersiaFollowReject,
User as VersiaUser,
} from "@versia/federation/types";
import { Notification, PushSubscription, db } from "@versia/kit/db";
import { Media, Notification, PushSubscription, db } from "@versia/kit/db";
import {
EmojiToUser,
Likes,
@ -69,6 +69,8 @@ type UserWithInstance = InferSelectModel<typeof Users> & {
type UserWithRelations = UserWithInstance & {
emojis: (typeof Emoji.$type)[];
avatar: typeof Media.$type | null;
header: typeof Media.$type | null;
followerCount: number;
followingCount: number;
statusCount: number;
@ -149,6 +151,16 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
public static $type: UserWithRelations;
public avatar: Media | null;
public header: Media | null;
public constructor(data: UserWithRelations) {
super(data);
this.avatar = data.avatar ? new Media(data.avatar) : null;
this.header = data.header ? new Media(data.header) : null;
}
public async reload(): Promise<void> {
const reloaded = await User.fromId(this.data.id);
@ -728,9 +740,6 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
user: VersiaUser,
instance: Instance,
): Promise<User> {
const avatar = user.avatar ? Object.entries(user.avatar)[0] : null;
const header = user.header ? Object.entries(user.header)[0] : null;
const data = {
username: user.username,
uri: user.uri,
@ -748,8 +757,6 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
fields: user.fields ?? [],
updatedAt: new Date(user.created_at).toISOString(),
instanceId: instance.id,
avatar: avatar?.[1].content || "",
header: header?.[1].content || "",
displayName: user.display_name ?? "",
note: getBestContentType(user.bio).content,
publicKey: user.public_key.key,
@ -759,16 +766,6 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
privacy: "public",
sensitive: false,
fields: [],
avatar: avatar
? {
content_type: avatar[0],
}
: undefined,
header: header
? {
content_type: header[0],
}
: undefined,
},
};
@ -784,14 +781,65 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
// If it exists, simply update it
if (foundUser) {
await foundUser.update(data);
let avatar: Media | null = null;
let header: Media | null = null;
if (user.avatar) {
if (foundUser.avatar) {
avatar = new Media(
await foundUser.avatar.update({
content: user.avatar,
}),
);
} else {
avatar = await Media.insert({
content: user.avatar,
});
}
}
if (user.header) {
if (foundUser.header) {
header = new Media(
await foundUser.header.update({
content: user.header,
}),
);
} else {
header = await Media.insert({
content: user.header,
});
}
}
await foundUser.update({
...data,
avatarId: avatar?.id,
headerId: header?.id,
});
await foundUser.updateEmojis(emojis);
return foundUser;
}
// Else, create a new user
const newUser = await User.insert(data);
const avatar = user.avatar
? await Media.insert({
content: user.avatar,
})
: null;
const header = user.header
? await Media.insert({
content: user.header,
})
: null;
const newUser = await User.insert({
...data,
avatarId: avatar?.id,
headerId: header?.id,
});
await newUser.updateEmojis(emojis);
return newUser;
@ -846,13 +894,13 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
* @returns The raw URL for the user's avatar
*/
public getAvatarUrl(config: Config): string {
if (!this.data.avatar) {
if (!this.avatar) {
return (
config.defaults.avatar ||
`https://api.dicebear.com/8.x/${config.defaults.placeholder_style}/svg?seed=${this.data.username}`
);
}
return this.data.avatar;
return this.avatar?.getUrl();
}
public static async generateKeys(): Promise<{
@ -886,14 +934,8 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
password: string | undefined;
email: string | undefined;
bio?: string;
avatar?: {
url: string;
content_type: string;
};
header?: {
url: string;
content_type: string;
};
avatar?: Media;
header?: Media;
admin?: boolean;
skipPasswordHash?: boolean;
}): Promise<User> {
@ -911,8 +953,8 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
: await Bun.password.hash(data.password),
email: data.email,
note: data.bio ?? "",
avatar: data.avatar?.url ?? config.defaults.avatar ?? "",
header: data.header?.url ?? config.defaults.avatar ?? "",
avatarId: data.avatar?.id,
headerId: data.header?.id,
isAdmin: data.admin ?? false,
publicKey: keys.public_key,
fields: [],
@ -924,16 +966,6 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
privacy: "public",
sensitive: false,
fields: [],
avatar: data.avatar
? {
content_type: data.avatar.content_type,
}
: undefined,
header: data.header
? {
content_type: data.header.content_type,
}
: undefined,
},
})
.returning()
@ -957,10 +989,10 @@ export class User extends BaseInterface<typeof Users, UserWithRelations> {
* @returns The raw URL for the user's header
*/
public getHeaderUrl(config: Config): string {
if (!this.data.header) {
if (!this.header) {
return config.defaults.header || "";
}
return this.data.header;
return this.header.getUrl();
}
public getAcct(): string {

View file

@ -38,12 +38,17 @@ export const findManyNotes = async (
...query,
with: {
...query?.with,
attachments: true,
attachments: {
with: {
media: true,
},
},
emojis: {
with: {
emoji: {
with: {
instance: true,
media: true,
},
},
},
@ -65,12 +70,17 @@ export const findManyNotes = async (
},
reblog: {
with: {
attachments: true,
attachments: {
with: {
media: true,
},
},
emojis: {
with: {
emoji: {
with: {
instance: true,
media: true,
},
},
},
@ -176,6 +186,7 @@ export const findManyNotes = async (
...mention.user,
endpoints: mention.user.endpoints,
})),
attachments: post.attachments.map((attachment) => attachment.media),
emojis: (post.emojis ?? []).map((emoji) => emoji.emoji),
reblog: post.reblog && {
...post.reblog,
@ -184,6 +195,9 @@ export const findManyNotes = async (
...mention.user,
endpoints: mention.user.endpoints,
})),
attachments: post.reblog.attachments.map(
(attachment) => attachment.media,
),
emojis: (post.reblog.emojis ?? []).map((emoji) => emoji.emoji),
reblogCount: Number(post.reblog.reblogCount),
likeCount: Number(post.reblog.likeCount),

View file

@ -2,6 +2,7 @@ import {
type Application,
type Emoji,
type Instance,
type Media,
type Role,
type Token,
type User,
@ -17,10 +18,13 @@ export const userRelations = {
emoji: {
with: {
instance: true,
media: true,
},
},
},
},
avatar: true,
header: true,
roles: {
with: {
role: true,
@ -75,6 +79,8 @@ export const transformOutputToUserWithRelations = (
followerCount: unknown;
followingCount: unknown;
statusCount: unknown;
avatar: typeof Media.$type | null;
header: typeof Media.$type | null;
emojis: {
userId: string;
emojiId: string;

View file

@ -1,136 +0,0 @@
/**
* @packageDocumentation
* @module Tests/DiskMediaDriver
*/
import {
type Mock,
beforeEach,
describe,
expect,
it,
mock,
spyOn,
} from "bun:test";
import { rm } from "node:fs/promises";
import { join } from "node:path";
import type { Config } from "~/packages/config-manager/config.type";
import type { getMediaHash } from "../media-hasher.ts";
import { DiskMediaDriver } from "./disk.ts";
describe("DiskMediaDriver", () => {
let diskDriver: DiskMediaDriver;
let mockConfig: Config;
let mockMediaHasher: Mock<typeof getMediaHash>;
let bunWriteSpy: Mock<typeof Bun.write>;
beforeEach(() => {
mockConfig = {
media: {
local_uploads_folder: "/test/uploads",
},
http: {
base_url: "http://localhost:3000",
},
} as Config;
mockMediaHasher = mock(() => Promise.resolve("testhash"));
mock.module("../media-hasher", () => ({
getMediaHash: mockMediaHasher,
}));
diskDriver = new DiskMediaDriver(mockConfig);
// @ts-expect-error: Replacing private property for testing
diskDriver.mediaHasher = mockMediaHasher;
// Mock fs.promises methods
mock.module("node:fs/promises", () => ({
writeFile: mock(() => Promise.resolve()),
rm: mock(() => {
return Promise.resolve();
}),
}));
spyOn(Bun, "file").mockImplementation(
mock(() => ({
exists: mock(() => Promise.resolve(true)),
arrayBuffer: mock(() => Promise.resolve(new ArrayBuffer(8))),
type: "image/webp",
lastModified: Date.now(),
})) as unknown as typeof Bun.file,
);
bunWriteSpy = spyOn(Bun, "write").mockImplementation(
mock(() => Promise.resolve(0)),
);
});
it("should add a file", async () => {
const file = new File(["test"], "test.webp", { type: "image/webp" });
const result = await diskDriver.addFile(file);
expect(mockMediaHasher).toHaveBeenCalledWith(file);
expect(bunWriteSpy).toHaveBeenCalledWith(
join("/test/uploads", "testhash", "test.webp"),
expect.any(ArrayBuffer),
);
expect(result).toEqual({
uploadedFile: file,
path: join("testhash", "test.webp"),
hash: "testhash",
});
});
it("should properly handle a Blob instead of a File", async () => {
const file = new Blob(["test"], { type: "image/webp" });
const result = await diskDriver.addFile(file as File);
expect(mockMediaHasher).toHaveBeenCalledWith(file);
expect(bunWriteSpy).toHaveBeenCalledWith(
expect.stringContaining("testhash"),
expect.any(ArrayBuffer),
);
expect(result).toEqual({
uploadedFile: expect.any(Blob),
path: expect.stringContaining("testhash"),
hash: "testhash",
});
});
it("should get a file by hash", async () => {
const hash = "testhash";
const databaseHashFetcher = mock(() => Promise.resolve("test.webp"));
const result = await diskDriver.getFileByHash(
hash,
databaseHashFetcher,
);
expect(databaseHashFetcher).toHaveBeenCalledWith(hash);
expect(Bun.file).toHaveBeenCalledWith(
join("/test/uploads", "test.webp"),
);
expect(result).toBeInstanceOf(File);
expect(result?.name).toBe("test.webp");
expect(result?.type).toBe("image/webp");
});
it("should get a file by filename", async () => {
const filename = "test.webp";
const result = await diskDriver.getFile(filename);
expect(Bun.file).toHaveBeenCalledWith(join("/test/uploads", filename));
expect(result).toBeInstanceOf(File);
expect(result?.name).toBe(filename);
expect(result?.type).toBe("image/webp");
});
it("should delete a file by URL", async () => {
const url = "http://localhost:3000/uploads/testhash/test.webp";
await diskDriver.deleteFileByUrl(url);
expect(rm).toHaveBeenCalledWith(join("/test/uploads", "testhash"), {
recursive: true,
});
});
});

View file

@ -1,96 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager/Drivers
*/
import { rm } from "node:fs/promises";
import { join } from "node:path";
import type { Config } from "~/packages/config-manager/config.type";
import { getMediaHash } from "../media-hasher.ts";
import type { UploadedFileMetadata } from "../media-manager.ts";
import type { MediaDriver } from "./media-driver.ts";
/**
* Implements the MediaDriver interface for disk storage.
*/
export class DiskMediaDriver implements MediaDriver {
/**
* Creates a new DiskMediaDriver instance.
* @param config - The configuration object.
*/
public constructor(private config: Config) {}
/**
* @inheritdoc
*/
public async addFile(
file: File,
): Promise<Omit<UploadedFileMetadata, "blurhash">> {
// Sometimes the file name is not available, so we generate a random name
const fileName = file.name ?? crypto.randomUUID();
const hash = await getMediaHash(file);
const path = join(hash, fileName);
const fullPath = join(this.config.media.local_uploads_folder, path);
await Bun.write(fullPath, await file.arrayBuffer());
return {
uploadedFile: file,
path,
hash,
};
}
/**
* @inheritdoc
*/
public async getFileByHash(
hash: string,
databaseHashFetcher: (sha256: string) => Promise<string | null>,
): Promise<File | null> {
const filename = await databaseHashFetcher(hash);
if (!filename) {
return null;
}
return this.getFile(filename);
}
/**
* @inheritdoc
*/
public async getFile(filename: string): Promise<File | null> {
const fullPath = join(this.config.media.local_uploads_folder, filename);
try {
const file = Bun.file(fullPath);
if (await file.exists()) {
return new File([await file.arrayBuffer()], filename, {
type: file.type,
lastModified: file.lastModified,
});
}
} catch {
// File doesn't exist or can't be read
}
return null;
}
/**
* @inheritdoc
*/
public async deleteFileByUrl(url: string): Promise<void> {
const urlObj = new URL(url);
// Check if URL is from the local uploads folder
if (urlObj.host !== new URL(this.config.http.base_url).host) {
return Promise.resolve();
}
const hash = urlObj.pathname.split("/").at(-2);
if (!hash) {
throw new Error("Invalid URL");
}
const dirPath = join(this.config.media.local_uploads_folder, hash);
await rm(dirPath, { recursive: true });
}
}

View file

@ -1,43 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager/Drivers
*/
import type { UploadedFileMetadata } from "../media-manager.ts";
/**
* Represents a media storage driver.
*/
export interface MediaDriver {
/**
* Adds a file to the media storage.
* @param file - The file to add.
* @returns A promise that resolves to the metadata of the uploaded file.
*/
addFile(file: File): Promise<Omit<UploadedFileMetadata, "blurhash">>;
/**
* Retrieves a file from the media storage by its hash.
* @param hash - The hash of the file to retrieve.
* @param databaseHashFetcher - A function to fetch the filename from the database.
* @returns A promise that resolves to the file or null if not found.
*/
getFileByHash(
hash: string,
databaseHashFetcher: (sha256: string) => Promise<string | null>,
): Promise<File | null>;
/**
* Retrieves a file from the media storage by its filename.
* @param filename - The name of the file to retrieve.
* @returns A promise that resolves to the file or null if not found.
*/
getFile(filename: string): Promise<File | null>;
/**
* Deletes a file from the media storage by its URL.
* @param url - The URL of the file to delete.
* @returns A promise that resolves when the file is deleted.
*/
deleteFileByUrl(url: string): Promise<void>;
}

View file

@ -1,126 +0,0 @@
/**
* @packageDocumentation
* @module Tests/S3MediaDriver
*/
import { type Mock, beforeEach, describe, expect, it, mock } from "bun:test";
import type { S3Client } from "@bradenmacdonald/s3-lite-client";
import type { Config } from "~/packages/config-manager/config.type";
import type { getMediaHash } from "../media-hasher.ts";
import { S3MediaDriver } from "./s3.ts";
describe("S3MediaDriver", () => {
let s3Driver: S3MediaDriver;
let mockConfig: Config;
let mockS3Client: S3Client;
let mockMediaHasher: Mock<typeof getMediaHash>;
beforeEach(() => {
mockConfig = {
s3: {
endpoint: "s3.amazonaws.com",
region: "us-west-2",
bucket_name: "test-bucket",
access_key: "test-key",
secret_access_key: "test-secret",
},
} as Config;
mockS3Client = mock(() => ({
putObject: mock(() => Promise.resolve()),
getObject: mock(() =>
Promise.resolve({
arrayBuffer: (): Promise<ArrayBuffer> =>
Promise.resolve(new ArrayBuffer(8)),
headers: new Headers({ "Content-Type": "image/webp" }),
}),
),
statObject: mock(() => Promise.resolve()),
deleteObject: mock(() => Promise.resolve()),
}))() as unknown as S3Client;
mockMediaHasher = mock(() => Promise.resolve("testhash"));
mock.module("../media-hasher", () => ({
getMediaHash: mockMediaHasher,
}));
s3Driver = new S3MediaDriver(mockConfig);
// @ts-expect-error: Replacing private property for testing
s3Driver.s3Client = mockS3Client;
// @ts-expect-error: Replacing private property for testing
s3Driver.mediaHasher = mockMediaHasher;
});
it("should add a file", async () => {
const file = new File(["test"], "test.webp", { type: "image/webp" });
const result = await s3Driver.addFile(file);
expect(mockMediaHasher).toHaveBeenCalledWith(file);
expect(mockS3Client.putObject).toHaveBeenCalledWith(
"testhash/test.webp",
expect.any(ReadableStream),
{ size: file.size, metadata: { "Content-Type": file.type } },
);
expect(result).toEqual({
uploadedFile: file,
path: "testhash/test.webp",
hash: "testhash",
});
});
it("should handle a Blob instead of a File", async () => {
const file = new Blob(["test"], { type: "image/webp" });
const result = await s3Driver.addFile(file as File);
expect(mockMediaHasher).toHaveBeenCalledWith(file);
expect(mockS3Client.putObject).toHaveBeenCalledWith(
expect.stringContaining("testhash"),
expect.any(ReadableStream),
{
size: file.size,
metadata: {
"Content-Type": file.type,
},
},
);
expect(result).toEqual({
uploadedFile: expect.any(Blob),
path: expect.stringContaining("testhash"),
hash: "testhash",
});
});
it("should get a file by hash", async () => {
const hash = "testhash";
const databaseHashFetcher = mock(() => Promise.resolve("test.webp"));
const result = await s3Driver.getFileByHash(hash, databaseHashFetcher);
expect(databaseHashFetcher).toHaveBeenCalledWith(hash);
expect(mockS3Client.statObject).toHaveBeenCalledWith("test.webp");
expect(mockS3Client.getObject).toHaveBeenCalledWith("test.webp");
expect(result).toBeInstanceOf(File);
expect(result?.name).toBe("test.webp");
expect(result?.type).toBe("image/webp");
});
it("should get a file by filename", async () => {
const filename = "test.webp";
const result = await s3Driver.getFile(filename);
expect(mockS3Client.statObject).toHaveBeenCalledWith(filename);
expect(mockS3Client.getObject).toHaveBeenCalledWith(filename);
expect(result).toBeInstanceOf(File);
expect(result?.name).toBe(filename);
expect(result?.type).toBe("image/webp");
});
it("should delete a file by URL", async () => {
const url = "https://test-bucket.s3.amazonaws.com/test/test.webp";
await s3Driver.deleteFileByUrl(url);
expect(mockS3Client.deleteObject).toHaveBeenCalledWith(
"test/test.webp",
);
});
});

View file

@ -1,97 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager/Drivers
*/
import { S3Client } from "@bradenmacdonald/s3-lite-client";
import type { Config } from "~/packages/config-manager/config.type";
import { getMediaHash } from "../media-hasher.ts";
import type { UploadedFileMetadata } from "../media-manager.ts";
import type { MediaDriver } from "./media-driver.ts";
/**
* Implements the MediaDriver interface for S3 storage.
*/
export class S3MediaDriver implements MediaDriver {
private s3Client: S3Client;
/**
* Creates a new S3MediaDriver instance.
* @param config - The configuration object.
*/
public constructor(config: Config) {
this.s3Client = new S3Client({
endPoint: config.s3.endpoint,
useSSL: true,
region: config.s3.region || "auto",
bucket: config.s3.bucket_name,
accessKey: config.s3.access_key,
secretKey: config.s3.secret_access_key,
});
}
/**
* @inheritdoc
*/
public async addFile(
file: File,
): Promise<Omit<UploadedFileMetadata, "blurhash">> {
// Sometimes the file name is not available, so we generate a random name
const fileName = file.name ?? crypto.randomUUID();
const hash = await getMediaHash(file);
const path = `${hash}/${fileName}`;
await this.s3Client.putObject(path, file.stream(), {
size: file.size,
metadata: {
"Content-Type": file.type,
},
});
return {
uploadedFile: file,
path,
hash,
};
}
/**
* @inheritdoc
*/
public async getFileByHash(
hash: string,
databaseHashFetcher: (sha256: string) => Promise<string | null>,
): Promise<File | null> {
const filename = await databaseHashFetcher(hash);
if (!filename) {
return null;
}
return this.getFile(filename);
}
/**
* @inheritdoc
*/
public async getFile(filename: string): Promise<File | null> {
try {
await this.s3Client.statObject(filename);
const file = await this.s3Client.getObject(filename);
const arrayBuffer = await file.arrayBuffer();
return new File([arrayBuffer], filename, {
type: file.headers.get("Content-Type") || undefined,
});
} catch {
return null;
}
}
/**
* @inheritdoc
*/
public async deleteFileByUrl(url: string): Promise<void> {
const urlObj = new URL(url);
const path = urlObj.pathname.slice(1); // Remove leading slash
await this.s3Client.deleteObject(path);
}
}

View file

@ -1,123 +0,0 @@
/**
* @packageDocumentation
* @module Tests/MediaManager
*/
import { beforeEach, describe, expect, it, mock } from "bun:test";
import type { Config } from "~/packages/config-manager/config.type";
import { MediaBackendType } from "~/packages/config-manager/config.type";
import { DiskMediaDriver } from "./drivers/disk.ts";
import { S3MediaDriver } from "./drivers/s3.ts";
import { MediaManager } from "./media-manager.ts";
import type { ImageConversionPreprocessor } from "./preprocessors/image-conversion.ts";
describe("MediaManager", () => {
let mediaManager: MediaManager;
let mockConfig: Config;
let mockS3Driver: S3MediaDriver;
let mockImagePreprocessor: ImageConversionPreprocessor;
beforeEach(() => {
mockConfig = {
media: {
backend: "s3",
conversion: {
convert_images: true,
convert_to: "image/webp",
},
},
s3: {
endpoint: "s3.amazonaws.com",
region: "us-west-2",
bucket_name: "test-bucket",
access_key: "test-key",
secret_access_key: "test-secret",
},
} as Config;
mockS3Driver = mock(() => ({
addFile: mock(() =>
Promise.resolve({
uploadedFile: new File(["hey"], "test.webp"),
path: "test/test.webp",
hash: "testhash",
}),
),
getFileByHash: mock(() => {
return Promise.resolve(new File(["hey"], "test.webp"));
}),
getFile: mock(() =>
Promise.resolve(new File(["hey"], "test.webp")),
),
deleteFileByUrl: mock(() => Promise.resolve()),
}))() as unknown as S3MediaDriver;
mockImagePreprocessor = mock(() => ({
process: mock((_: File) =>
Promise.resolve(new File(["hey"], "test.webp")),
),
}))() as unknown as ImageConversionPreprocessor;
mediaManager = new MediaManager(mockConfig);
// @ts-expect-error: Accessing private property for testing
mediaManager.driver = mockS3Driver;
// @ts-expect-error: Accessing private property for testing
mediaManager.preprocessors = [mockImagePreprocessor];
});
it("should initialize with the correct driver based on config", () => {
const s3Manager = new MediaManager(mockConfig);
// @ts-expect-error: Accessing private property for testing
expect(s3Manager.driver).toBeInstanceOf(S3MediaDriver);
mockConfig.media.backend = MediaBackendType.Local;
const diskManager = new MediaManager(mockConfig);
// @ts-expect-error: Accessing private property for testing
expect(diskManager.driver).toBeInstanceOf(DiskMediaDriver);
});
it("should add a file with preprocessing", async () => {
const file = new File(["test"], "test.jpg", { type: "image/jpeg" });
const result = await mediaManager.addFile(file);
expect(mockImagePreprocessor.process).toHaveBeenCalledWith(file);
expect(mockS3Driver.addFile).toHaveBeenCalled();
expect(result).toEqual({
uploadedFile: new File(["hey"], "test.webp"),
path: "test/test.webp",
hash: "testhash",
});
});
it("should get a file by hash", async () => {
const hash = "testhash";
const databaseHashFetcher = mock(() => Promise.resolve("test.webp"));
const result = await mediaManager.getFileByHash(
hash,
databaseHashFetcher,
);
expect(mockS3Driver.getFileByHash).toHaveBeenCalledWith(
hash,
databaseHashFetcher,
);
expect(result).toBeInstanceOf(File);
expect(result?.name).toBe("test.webp");
});
it("should get a file by filename", async () => {
const filename = "test.webp";
const result = await mediaManager.getFile(filename);
expect(mockS3Driver.getFile).toHaveBeenCalledWith(filename);
expect(result).toBeInstanceOf(File);
expect(result?.name).toBe("test.webp");
});
it("should delete a file by URL", async () => {
const url = "https://test-bucket.s3.amazonaws.com/test/test.webp";
await mediaManager.deleteFileByUrl(url);
expect(mockS3Driver.deleteFileByUrl).toHaveBeenCalledWith(url);
});
});

View file

@ -1,111 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager
*/
import type { Config } from "~/packages/config-manager/config.type";
import { DiskMediaDriver } from "./drivers/disk.ts";
import type { MediaDriver } from "./drivers/media-driver.ts";
import { S3MediaDriver } from "./drivers/s3.ts";
import type { MediaPreprocessor } from "./preprocessors/media-preprocessor.ts";
/**
* Manages media operations with support for different storage drivers and preprocessing plugins.
* @example
* const mediaManager = new MediaManager(config);
*
* const file = new File(["hello"], "hello.txt");
*
* const { path, hash, blurhash } = await mediaManager.addFile(file);
*
* const retrievedFile = await mediaManager.getFileByHash(hash, fetchHashFromDatabase);
*
* await mediaManager.deleteFileByUrl(path);
*/
export class MediaManager {
private driver: MediaDriver;
private preprocessors: MediaPreprocessor[] = [];
/**
* Creates a new MediaManager instance.
* @param config - The configuration object.
*/
public constructor(private config: Config) {
this.driver = this.initializeDriver();
}
/**
* Initializes the appropriate media driver based on the configuration.
* @returns An instance of MediaDriver.
*/
private initializeDriver(): MediaDriver {
switch (this.config.media.backend) {
case "s3":
return new S3MediaDriver(this.config);
case "local":
return new DiskMediaDriver(this.config);
default:
throw new Error(
`Unsupported media backend: ${this.config.media.backend}`,
);
}
}
/**
* Adds a file to the media storage.
* @param file - The file to add.
* @returns A promise that resolves to the metadata of the uploaded file.
*/
public async addFile(file: File): Promise<UploadedFileMetadata> {
let processedFile = file;
for (const preprocessor of this.preprocessors) {
const result = await preprocessor.process(processedFile);
processedFile = result.file;
}
const uploadResult = await this.driver.addFile(processedFile);
return uploadResult;
}
/**
* Retrieves a file from the media storage by its hash.
* @param hash - The hash of the file to retrieve.
* @param databaseHashFetcher - A function to fetch the filename from the database.
* @returns A promise that resolves to the file or null if not found.
*/
public getFileByHash(
hash: string,
databaseHashFetcher: (sha256: string) => Promise<string | null>,
): Promise<File | null> {
return this.driver.getFileByHash(hash, databaseHashFetcher);
}
/**
* Retrieves a file from the media storage by its filename.
* @param filename - The name of the file to retrieve.
* @returns A promise that resolves to the file or null if not found.
*/
public getFile(filename: string): Promise<File | null> {
return this.driver.getFile(filename);
}
/**
* Deletes a file from the media storage by its URL.
* @param url - The URL of the file to delete.
* @returns A promise that resolves when the file is deleted.
*/
public deleteFileByUrl(url: string): Promise<void> {
return this.driver.deleteFileByUrl(url);
}
}
/**
* Represents the metadata of an uploaded file.
*/
export interface UploadedFileMetadata {
uploadedFile: File;
path: string;
hash: string;
}

View file

@ -1,14 +1,8 @@
import { beforeEach, describe, expect, it, mock } from "bun:test";
import { describe, expect, it, mock } from "bun:test";
import sharp from "sharp";
import { BlurhashPreprocessor } from "./blurhash.ts";
import { calculateBlurhash } from "./blurhash.ts";
describe("BlurhashPreprocessor", () => {
let preprocessor: BlurhashPreprocessor;
beforeEach(() => {
preprocessor = new BlurhashPreprocessor();
});
it("should calculate blurhash for a valid image", async () => {
const inputBuffer = await sharp({
create: {
@ -24,21 +18,19 @@ describe("BlurhashPreprocessor", () => {
const inputFile = new File([inputBuffer], "test.png", {
type: "image/png",
});
const result = await preprocessor.process(inputFile);
const result = await calculateBlurhash(inputFile);
expect(result.file).toBe(inputFile);
expect(result.blurhash).toBeTypeOf("string");
expect(result.blurhash).not.toBe("");
expect(result).toBeTypeOf("string");
expect(result).not.toBe("");
});
it("should return null blurhash for an invalid image", async () => {
const invalidFile = new File(["invalid image data"], "invalid.png", {
type: "image/png",
});
const result = await preprocessor.process(invalidFile);
const result = await calculateBlurhash(invalidFile);
expect(result.file).toBe(invalidFile);
expect(result.blurhash).toBeNull();
expect(result).toBeNull();
});
it("should handle errors during blurhash calculation", async () => {
@ -63,9 +55,8 @@ describe("BlurhashPreprocessor", () => {
},
}));
const result = await preprocessor.process(inputFile);
const result = await calculateBlurhash(inputFile);
expect(result.file).toBe(inputFile);
expect(result.blurhash).toBeNull();
expect(result).toBeNull();
});
});

View file

@ -1,16 +1,12 @@
import { encode } from "blurhash";
import sharp from "sharp";
import type { MediaPreprocessor } from "./media-preprocessor.ts";
export class BlurhashPreprocessor implements MediaPreprocessor {
public async process(
file: File,
): Promise<{ file: File; blurhash: string | null }> {
export const calculateBlurhash = async (file: File): Promise<string | null> => {
try {
const arrayBuffer = await file.arrayBuffer();
const metadata = await sharp(arrayBuffer).metadata();
const blurhash = await new Promise<string | null>((resolve) => {
return new Promise<string | null>((resolve) => {
sharp(arrayBuffer)
.raw()
.ensureAlpha()
@ -35,10 +31,7 @@ export class BlurhashPreprocessor implements MediaPreprocessor {
}
});
});
return { file, blurhash };
} catch {
return { file, blurhash: null };
}
}
return null;
}
};

View file

@ -1,10 +1,9 @@
import { beforeEach, describe, expect, it } from "bun:test";
import { beforeEach, describe, expect, it, mock } from "bun:test";
import sharp from "sharp";
import type { Config } from "~/packages/config-manager/config.type";
import { ImageConversionPreprocessor } from "./image-conversion.ts";
import { convertImage } from "./image-conversion.ts";
describe("ImageConversionPreprocessor", () => {
let preprocessor: ImageConversionPreprocessor;
let mockConfig: Config;
beforeEach(() => {
@ -18,7 +17,9 @@ describe("ImageConversionPreprocessor", () => {
},
} as Config;
preprocessor = new ImageConversionPreprocessor(mockConfig);
mock.module("~/packages/config-manager/index.ts", () => ({
config: mockConfig,
}));
});
it("should convert a JPEG image to WebP", async () => {
@ -36,12 +37,12 @@ describe("ImageConversionPreprocessor", () => {
const inputFile = new File([inputBuffer], "test.jpg", {
type: "image/jpeg",
});
const result = await preprocessor.process(inputFile);
const result = await convertImage(inputFile);
expect(result.file.type).toBe("image/webp");
expect(result.file.name).toBe("test.webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("test.webp");
const resultBuffer = await result.file.arrayBuffer();
const resultBuffer = await result.arrayBuffer();
const metadata = await sharp(resultBuffer).metadata();
expect(metadata.format).toBe("webp");
});
@ -52,38 +53,36 @@ describe("ImageConversionPreprocessor", () => {
const inputFile = new File([svgContent], "test.svg", {
type: "image/svg+xml",
});
const result = await preprocessor.process(inputFile);
const result = await convertImage(inputFile);
expect(result.file).toBe(inputFile);
expect(result).toBe(inputFile);
});
it("should convert SVG when convert_vector is true", async () => {
mockConfig.media.conversion.convert_vector = true;
preprocessor = new ImageConversionPreprocessor(mockConfig);
const svgContent =
'<svg xmlns="http://www.w3.org/2000/svg"><rect width="100" height="100" fill="red"/></svg>';
const inputFile = new File([svgContent], "test.svg", {
type: "image/svg+xml",
});
const result = await preprocessor.process(inputFile);
const result = await convertImage(inputFile);
expect(result.file.type).toBe("image/webp");
expect(result.file.name).toBe("test.webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("test.webp");
});
it("should not convert unsupported file types", async () => {
const inputFile = new File(["test content"], "test.txt", {
type: "text/plain",
});
const result = await preprocessor.process(inputFile);
const result = await convertImage(inputFile);
expect(result.file).toBe(inputFile);
expect(result).toBe(inputFile);
});
it("should throw an error for unsupported output format", async () => {
mockConfig.media.conversion.convert_to = "image/bmp";
preprocessor = new ImageConversionPreprocessor(mockConfig);
const inputBuffer = await sharp({
create: {
@ -100,7 +99,7 @@ describe("ImageConversionPreprocessor", () => {
type: "image/png",
});
await expect(preprocessor.process(inputFile)).rejects.toThrow(
await expect(convertImage(inputFile)).rejects.toThrow(
"Unsupported output format: image/bmp",
);
});
@ -121,12 +120,12 @@ describe("ImageConversionPreprocessor", () => {
const inputFile = new File([inputBuffer], "animated.gif", {
type: "image/gif",
});
const result = await preprocessor.process(inputFile);
const result = await convertImage(inputFile);
expect(result.file.type).toBe("image/webp");
expect(result.file.name).toBe("animated.webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("animated.webp");
const resultBuffer = await result.file.arrayBuffer();
const resultBuffer = await result.arrayBuffer();
const metadata = await sharp(resultBuffer).metadata();
expect(metadata.format).toBe("webp");
});
@ -148,9 +147,9 @@ describe("ImageConversionPreprocessor", () => {
"test image with spaces.png",
{ type: "image/png" },
);
const result = await preprocessor.process(inputFile);
const result = await convertImage(inputFile);
expect(result.file.type).toBe("image/webp");
expect(result.file.name).toBe("test image with spaces.webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("test image with spaces.webp");
});
});

View file

@ -4,8 +4,7 @@
*/
import sharp from "sharp";
import type { Config } from "~/packages/config-manager/config.type";
import type { MediaPreprocessor } from "./media-preprocessor.ts";
import { config } from "~/packages/config-manager/index.ts";
/**
* Supported input media formats.
@ -33,24 +32,51 @@ const supportedOutputFormats = [
];
/**
* Implements the MediaPreprocessor interface for image conversion.
* Checks if a file is convertible.
* @param file - The file to check.
* @returns True if the file is convertible, false otherwise.
*/
export class ImageConversionPreprocessor implements MediaPreprocessor {
/**
* Creates a new ImageConversionPreprocessor instance.
* @param config - The configuration object.
*/
public constructor(private config: Config) {}
const isConvertible = (file: File): boolean => {
if (
file.type === "image/svg+xml" &&
!config.media.conversion.convert_vector
) {
return false;
}
return supportedInputFormats.includes(file.type);
};
/**
* @inheritdoc
* Extracts the filename from a path.
* @param path - The path to extract the filename from.
* @returns The extracted filename.
*/
public async process(file: File): Promise<{ file: File }> {
if (!this.isConvertible(file)) {
return { file };
const extractFilenameFromPath = (path: string): string => {
const pathParts = path.split(/(?<!\\)\//);
return pathParts[pathParts.length - 1];
};
/**
* Replaces the file extension in the filename.
* @param fileName - The original filename.
* @param newExtension - The new extension.
* @returns The filename with the new extension.
*/
const getReplacedFileName = (fileName: string, newExtension: string): string =>
extractFilenameFromPath(fileName).replace(/\.[^/.]+$/, `.${newExtension}`);
/**
* Converts an image file to the format specified in the configuration.
*
* @param file - The image file to convert.
* @returns The converted image file.
*/
export const convertImage = async (file: File): Promise<File> => {
if (!isConvertible(file)) {
return file;
}
const targetFormat = this.config.media.conversion.convert_to;
const targetFormat = config.media.conversion.convert_to;
if (!supportedOutputFormats.includes(targetFormat)) {
throw new Error(`Unsupported output format: ${targetFormat}`);
}
@ -67,58 +93,12 @@ export class ImageConversionPreprocessor implements MediaPreprocessor {
| "tiff";
const convertedBuffer = await sharpCommand[commandName]().toBuffer();
return {
file: new File(
return new File(
[convertedBuffer],
ImageConversionPreprocessor.getReplacedFileName(
file.name,
commandName,
),
getReplacedFileName(file.name, commandName),
{
type: targetFormat,
lastModified: Date.now(),
},
),
);
};
}
/**
* Checks if a file is convertible.
* @param file - The file to check.
* @returns True if the file is convertible, false otherwise.
*/
private isConvertible(file: File): boolean {
if (
file.type === "image/svg+xml" &&
!this.config.media.conversion.convert_vector
) {
return false;
}
return supportedInputFormats.includes(file.type);
}
/**
* Replaces the file extension in the filename.
* @param fileName - The original filename.
* @param newExtension - The new extension.
* @returns The filename with the new extension.
*/
private static getReplacedFileName(
fileName: string,
newExtension: string,
): string {
return ImageConversionPreprocessor.extractFilenameFromPath(
fileName,
).replace(/\.[^/.]+$/, `.${newExtension}`);
}
/**
* Extracts the filename from a path.
* @param path - The path to extract the filename from.
* @returns The extracted filename.
*/
private static extractFilenameFromPath(path: string): string {
const pathParts = path.split(/(?<!\\)\//);
return pathParts[pathParts.length - 1];
}
}

View file

@ -1,16 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager/Preprocessors
*/
/**
* Represents a media preprocessor.
*/
export interface MediaPreprocessor {
/**
* Processes a file before it's stored.
* @param file - The file to process.
* @returns A promise that resolves to the processed file.
*/
process(file: File): Promise<{ file: File } & Record<string, unknown>>;
}

View file

@ -3,6 +3,7 @@ import { connection } from "~/utils/redis.ts";
export enum MediaJobType {
ConvertMedia = "convertMedia",
CalculateMetadata = "calculateMetadata",
}
export type MediaJobData = {

View file

@ -2,9 +2,8 @@ import { Media } from "@versia/kit/db";
import { Worker } from "bullmq";
import { config } from "~/packages/config-manager";
import { connection } from "~/utils/redis.ts";
import { MediaManager } from "../media/media-manager.ts";
import { BlurhashPreprocessor } from "../media/preprocessors/blurhash.ts";
import { ImageConversionPreprocessor } from "../media/preprocessors/image-conversion.ts";
import { calculateBlurhash } from "../media/preprocessors/blurhash.ts";
import { convertImage } from "../media/preprocessors/image-conversion.ts";
import {
type MediaJobData,
MediaJobType,
@ -29,62 +28,72 @@ export const getMediaWorker = (): Worker<MediaJobData, void, MediaJobType> =>
);
}
const processor = new ImageConversionPreprocessor(config);
const blurhashProcessor = new BlurhashPreprocessor();
const hash = attachment?.data.sha256;
if (!hash) {
throw new Error(
`Attachment [${attachmentId}] has no hash, cannot process.`,
);
}
await job.log(`Processing attachment [${attachmentId}]`);
await job.log(
`Fetching file from [${attachment.data.url}]`,
`Fetching file from [${attachment.getUrl()}]`,
);
// Download the file and process it.
const blob = await (
await fetch(attachment.data.url)
await fetch(attachment.getUrl())
).blob();
const file = new File([blob], filename);
await job.log(`Converting attachment [${attachmentId}]`);
const { file: processedFile } =
await processor.process(file);
await job.log(`Generating blurhash for [${attachmentId}]`);
const { blurhash } = await blurhashProcessor.process(file);
const mediaManager = new MediaManager(config);
const processedFile = await convertImage(file);
await job.log(`Uploading attachment [${attachmentId}]`);
const { path, uploadedFile } =
await mediaManager.addFile(processedFile);
await attachment.updateFromFile(processedFile);
const url = Media.getUrl(path);
await job.log(
`✔ Finished processing attachment [${attachmentId}]`,
);
const sha256 = new Bun.SHA256();
break;
}
case MediaJobType.CalculateMetadata: {
// Calculate blurhash
const { attachmentId } = job.data;
await job.log(`Fetching attachment ID [${attachmentId}]`);
const attachment = await Media.fromId(attachmentId);
if (!attachment) {
throw new Error(
`Attachment not found: [${attachmentId}]`,
);
}
await job.log(`Processing attachment [${attachmentId}]`);
await job.log(
`Fetching file from [${attachment.getUrl()}]`,
);
// Download the file and process it.
const blob = await (
await fetch(attachment.getUrl())
).blob();
// Filename is not important for blurhash
const file = new File([blob], "");
await job.log(`Generating blurhash for [${attachmentId}]`);
const blurhash = await calculateBlurhash(file);
await attachment.update({
url,
sha256: sha256
.update(await uploadedFile.arrayBuffer())
.digest("hex"),
mimeType: uploadedFile.type,
size: uploadedFile.size,
blurhash,
});
await job.log(
`✔ Finished processing attachment [${attachmentId}]`,
);
break;
}
}
},

View file

@ -1,9 +1,9 @@
import { parseUserAddress, userAddressValidator } from "@/api";
import { Args, type Command, Flags, type Interfaces } from "@oclif/core";
import { type Emoji, Instance, User, db } from "@versia/kit/db";
import { Emoji, Instance, User } from "@versia/kit/db";
import { Emojis, Instances, Users } from "@versia/kit/tables";
import chalk from "chalk";
import { and, eq, getTableColumns, like } from "drizzle-orm";
import { and, eq, inArray, like } from "drizzle-orm";
import { BaseCommand } from "./base.ts";
export type FlagsType<T extends typeof Command> = Interfaces.InferredFlags<
@ -203,14 +203,7 @@ export abstract class EmojiFinderCommand<
this.args = args as ArgsType<T>;
}
public async findEmojis(): Promise<
Omit<
typeof Emoji.$type & {
instanceUrl: string | null;
},
"instance"
>[]
> {
public async findEmojis(): Promise<Emoji[]> {
// Check if there are asterisks in the identifier but no pattern flag, warn the user if so
if (this.args.identifier.includes("*") && !this.flags.pattern) {
this.log(
@ -228,22 +221,26 @@ export abstract class EmojiFinderCommand<
? this.args.identifier.replace(/\*/g, "%")
: this.args.identifier;
return await db
.select({
...getTableColumns(Emojis),
instanceUrl: Instances.baseUrl,
})
.from(Emojis)
.leftJoin(Instances, eq(Emojis.instanceId, Instances.id))
.where(
const instanceIds =
this.flags.type === "instance"
? (
await Instance.manyFromSql(
operator(Instances.baseUrl, identifier),
)
).map((instance) => instance.id)
: undefined;
return await Emoji.manyFromSql(
and(
this.flags.type === "shortcode"
? operator(Emojis.shortcode, identifier)
: undefined,
this.flags.type === "instance"
? operator(Instances.baseUrl, identifier)
instanceIds && instanceIds.length > 0
? inArray(Emojis.instanceId, instanceIds)
: undefined,
),
undefined,
this.flags.limit,
);
}
}

View file

@ -4,7 +4,6 @@ import { Emojis } from "@versia/kit/tables";
import chalk from "chalk";
import { and, eq, isNull } from "drizzle-orm";
import ora from "ora";
import { MediaManager } from "~/classes/media/media-manager";
import { BaseCommand } from "~/cli/base";
import { config } from "~/packages/config-manager";
@ -97,35 +96,22 @@ export default class EmojiAdd extends BaseCommand<typeof EmojiAdd> {
);
}
const mediaManager = new MediaManager(config);
const spinner = ora("Uploading emoji").start();
const uploaded = await mediaManager.addFile(file).catch((e: Error) => {
spinner.fail();
this.log(`${chalk.red("✗")} Error: ${chalk.red(e.message)}`);
return null;
});
if (!uploaded) {
return this.exit(1);
}
const media = await Media.fromFile(file);
spinner.succeed();
await Emoji.insert({
shortcode: args.shortcode,
url: Media.getUrl(uploaded.path),
mediaId: media.id,
visibleInPicker: true,
contentType: uploaded.uploadedFile.type,
});
this.log(
`${chalk.green("✓")} Created emoji ${chalk.green(
args.shortcode,
)} with url ${chalk.blue(
chalk.underline(Media.getUrl(uploaded.path)),
)}`,
)} with url ${chalk.blue(chalk.underline(media.getUrl()))}`,
);
this.exit(0);

View file

@ -1,14 +1,9 @@
import confirm from "@inquirer/confirm";
import { Flags } from "@oclif/core";
import { db } from "@versia/kit/db";
import { Emojis } from "@versia/kit/tables";
import chalk from "chalk";
import { eq } from "drizzle-orm";
import ora from "ora";
import { MediaManager } from "~/classes/media/media-manager";
import { EmojiFinderCommand } from "~/cli/classes";
import { formatArray } from "~/cli/utils/format";
import { config } from "~/packages/config-manager";
export default class EmojiDelete extends EmojiFinderCommand<
typeof EmojiDelete
@ -55,13 +50,10 @@ export default class EmojiDelete extends EmojiFinderCommand<
flags.print &&
this.log(
formatArray(emojis, [
"id",
"shortcode",
"alt",
"contentType",
"instanceUrl",
]),
formatArray(
emojis.map((e) => e.data),
["id", "shortcode", "alt", "contentType", "instanceUrl"],
),
);
if (flags.confirm) {
@ -80,15 +72,11 @@ export default class EmojiDelete extends EmojiFinderCommand<
const spinner = ora("Deleting emoji(s)").start();
for (const emoji of emojis) {
spinner.text = `Deleting emoji ${chalk.gray(emoji.shortcode)} (${
spinner.text = `Deleting emoji ${chalk.gray(emoji.data.shortcode)} (${
emojis.findIndex((e) => e.id === emoji.id) + 1
}/${emojis.length})`;
const mediaManager = new MediaManager(config);
await mediaManager.deleteFileByUrl(emoji.url);
await db.delete(Emojis).where(eq(Emojis.id, emoji.id));
await emoji.delete();
}
spinner.succeed("Emoji(s) deleted");

View file

@ -6,7 +6,6 @@ import { and, inArray, isNull } from "drizzle-orm";
import { lookup } from "mime-types";
import ora from "ora";
import { unzip } from "unzipit";
import { MediaManager } from "~/classes/media/media-manager";
import { BaseCommand } from "~/cli/base";
import { config } from "~/packages/config-manager";
@ -169,8 +168,6 @@ export default class EmojiImport extends BaseCommand<typeof EmojiImport> {
const importSpinner = ora("Importing emojis").start();
const mediaManager = new MediaManager(config);
const successfullyImported: MetaType["emojis"] = [];
for (const emoji of newEmojis) {
@ -197,26 +194,12 @@ export default class EmojiImport extends BaseCommand<typeof EmojiImport> {
type: contentType,
});
const uploaded = await mediaManager
.addFile(newFile)
.catch((e: Error) => {
this.log(
`${chalk.red("✗")} Error uploading ${chalk.red(
emoji.emoji.name,
)}: ${chalk.red(e.message)}`,
);
return null;
});
if (!uploaded) {
continue;
}
const media = await Media.fromFile(newFile);
await Emoji.insert({
shortcode: emoji.emoji.name,
url: Media.getUrl(uploaded.path),
mediaId: media.id,
visibleInPicker: true,
contentType: uploaded.uploadedFile.type,
});
successfullyImported.push(emoji);

View file

@ -125,9 +125,8 @@ enabled = false
[media]
# Can be "s3" or "local", where "local" uploads the file to the local filesystem
# If you need to change this value after setting up your instance, you must move all the files
# from one backend to the other manually (the CLI will have an option to do this later)
# TODO: Add CLI command to move files
# Changing this value will not retroactively apply to existing data
# Don't forget to fill in the s3 config :3
backend = "s3"
# Whether to check the hash of media when uploading to avoid duplication
deduplicate_media = true
@ -145,7 +144,7 @@ convert_to = "image/webp"
convert_vector = false
# [s3]
# Can be left blank if you don't use the S3 media backend
# Can be left commented if you don't use the S3 media backend
# endpoint = ""
# access_key = "XXXXX"
# secret_access_key = "XXX"

View file

@ -486,14 +486,7 @@
"secret_access_key",
"public_url"
],
"additionalProperties": false,
"default": {
"endpoint": "",
"access_key": "",
"secret_access_key": "",
"bucket_name": "versia",
"public_url": "https://cdn.example.com"
}
"additionalProperties": false
},
"validation": {
"type": "object",

View file

@ -1,6 +1,10 @@
import type { Config } from "drizzle-kit";
import { config } from "~/packages/config-manager/index.ts";
import { config } from "./packages/config-manager/index.ts";
/**
* Drizzle can't properly resolve imports with top-level await, so uncomment
* this line when generating migrations.
*/
export default {
dialect: "postgresql",
out: "./drizzle/migrations",

View file

@ -0,0 +1,26 @@
CREATE TABLE "MediasToNote" (
"mediaId" uuid NOT NULL,
"noteId" uuid NOT NULL
);
--> statement-breakpoint
ALTER TABLE "Medias" DROP CONSTRAINT "Medias_noteId_Notes_id_fk";
--> statement-breakpoint
ALTER TABLE "Medias" ADD COLUMN "content" jsonb NOT NULL;--> statement-breakpoint
ALTER TABLE "Medias" ADD COLUMN "original_content" jsonb;--> statement-breakpoint
ALTER TABLE "Medias" ADD COLUMN "thumbnail" jsonb;--> statement-breakpoint
ALTER TABLE "MediasToNote" ADD CONSTRAINT "MediasToNote_mediaId_Medias_id_fk" FOREIGN KEY ("mediaId") REFERENCES "public"."Medias"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "MediasToNote" ADD CONSTRAINT "MediasToNote_noteId_Notes_id_fk" FOREIGN KEY ("noteId") REFERENCES "public"."Notes"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
CREATE INDEX "MediasToNote_mediaId_index" ON "MediasToNote" USING btree ("mediaId");--> statement-breakpoint
CREATE INDEX "MediasToNote_noteId_index" ON "MediasToNote" USING btree ("noteId");--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "url";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "remote_url";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "thumbnail_url";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "mime_type";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "description";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "sha256";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "fps";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "duration";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "width";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "height";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "size";--> statement-breakpoint
ALTER TABLE "Medias" DROP COLUMN "noteId";

View file

@ -0,0 +1,5 @@
ALTER TABLE "Emojis" ADD COLUMN "mediaId" uuid;--> statement-breakpoint
ALTER TABLE "Emojis" ADD CONSTRAINT "Emojis_mediaId_Medias_id_fk" FOREIGN KEY ("mediaId") REFERENCES "public"."Medias"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "Emojis" DROP COLUMN "url";--> statement-breakpoint
ALTER TABLE "Emojis" DROP COLUMN "alt";--> statement-breakpoint
ALTER TABLE "Emojis" DROP COLUMN "content_type";

View file

@ -0,0 +1 @@
ALTER TABLE "Emojis" ALTER COLUMN "mediaId" SET NOT NULL;

View file

@ -0,0 +1,6 @@
ALTER TABLE "Users" ADD COLUMN "avatarId" uuid;--> statement-breakpoint
ALTER TABLE "Users" ADD COLUMN "headerId" uuid;--> statement-breakpoint
ALTER TABLE "Users" ADD CONSTRAINT "Users_avatarId_Medias_id_fk" FOREIGN KEY ("avatarId") REFERENCES "public"."Medias"("id") ON DELETE set null ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "Users" ADD CONSTRAINT "Users_headerId_Medias_id_fk" FOREIGN KEY ("headerId") REFERENCES "public"."Medias"("id") ON DELETE set null ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "Users" DROP COLUMN "avatar";--> statement-breakpoint
ALTER TABLE "Users" DROP COLUMN "header";

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -295,6 +295,34 @@
"when": 1737644734501,
"tag": "0041_bright_doctor_spectrum",
"breakpoints": true
},
{
"idx": 42,
"version": "7",
"when": 1737660317024,
"tag": "0042_swift_the_phantom",
"breakpoints": true
},
{
"idx": 43,
"version": "7",
"when": 1738080562679,
"tag": "0043_mute_jigsaw",
"breakpoints": true
},
{
"idx": 44,
"version": "7",
"when": 1738082427051,
"tag": "0044_quiet_jasper_sitwell",
"breakpoints": true
},
{
"idx": 45,
"version": "7",
"when": 1738087527661,
"tag": "0045_polite_mikhail_rasputin",
"breakpoints": true
}
]
}

View file

@ -52,10 +52,13 @@ export const Challenges = pgTable("Challenges", {
export const Emojis = pgTable("Emojis", {
id: id(),
shortcode: text("shortcode").notNull(),
url: text("url").notNull(),
mediaId: uuid("mediaId")
.references(() => Medias.id, {
onDelete: "cascade",
onUpdate: "cascade",
})
.notNull(),
visibleInPicker: boolean("visible_in_picker").notNull(),
alt: text("alt"),
contentType: text("content_type").notNull(),
instanceId: uuid("instanceId").references(() => Instances.id, {
onDelete: "cascade",
onUpdate: "cascade",
@ -67,6 +70,19 @@ export const Emojis = pgTable("Emojis", {
category: text("category"),
});
export const EmojisRelations = relations(Emojis, ({ one, many }) => ({
media: one(Medias, {
fields: [Emojis.mediaId],
references: [Medias.id],
}),
instance: one(Instances, {
fields: [Emojis.instanceId],
references: [Instances.id],
}),
users: many(EmojiToUser),
notes: many(EmojiToNote),
}));
export const PushSubscriptions = pgTable("PushSubscriptions", {
id: id(),
endpoint: text("endpoint").notNull(),
@ -231,6 +247,17 @@ export const Likes = pgTable("Likes", {
createdAt: createdAt(),
});
export const LikesRelations = relations(Likes, ({ one }) => ({
liker: one(Users, {
fields: [Likes.likerId],
references: [Users.id],
}),
liked: one(Notes, {
fields: [Likes.likedId],
references: [Notes.id],
}),
}));
export const Relationships = pgTable("Relationships", {
id: id(),
ownerId: uuid("ownerId")
@ -260,6 +287,19 @@ export const Relationships = pgTable("Relationships", {
updatedAt: updatedAt(),
});
export const RelationshipsRelations = relations(Relationships, ({ one }) => ({
owner: one(Users, {
fields: [Relationships.ownerId],
references: [Users.id],
relationName: "RelationshipToOwner",
}),
subject: one(Users, {
fields: [Relationships.subjectId],
references: [Users.id],
relationName: "RelationshipToSubject",
}),
}));
export const Applications = pgTable(
"Applications",
{
@ -303,26 +343,36 @@ export const Tokens = pgTable("Tokens", {
}),
});
export const TokensRelations = relations(Tokens, ({ one }) => ({
user: one(Users, {
fields: [Tokens.userId],
references: [Users.id],
}),
application: one(Applications, {
fields: [Tokens.applicationId],
references: [Applications.id],
}),
}));
export const Medias = pgTable("Medias", {
id: id(),
url: text("url").notNull(),
remoteUrl: text("remote_url"),
thumbnailUrl: text("thumbnail_url"),
mimeType: text("mime_type").notNull(),
description: text("description"),
content: jsonb("content").notNull().$type<ContentFormat>(),
originalContent: jsonb("original_content").$type<ContentFormat>(),
thumbnail: jsonb("thumbnail").$type<ContentFormat>(),
blurhash: text("blurhash"),
sha256: text("sha256"),
fps: integer("fps"),
duration: integer("duration"),
width: integer("width"),
height: integer("height"),
size: integer("size"),
noteId: uuid("noteId").references(() => Notes.id, {
onDelete: "cascade",
onUpdate: "cascade",
}),
});
export const MediasRelations = relations(Medias, ({ many }) => ({
notes: many(Notes),
emojis: many(Emojis),
avatars: many(Users, {
relationName: "UserToAvatar",
}),
headers: many(Users, {
relationName: "UserToHeader",
}),
}));
export const Notifications = pgTable("Notifications", {
id: id(),
type: text("type").notNull(),
@ -346,6 +396,23 @@ export const Notifications = pgTable("Notifications", {
dismissed: boolean("dismissed").default(false).notNull(),
});
export const NotificationsRelations = relations(Notifications, ({ one }) => ({
account: one(Users, {
fields: [Notifications.accountId],
references: [Users.id],
relationName: "NotificationToAccount",
}),
notified: one(Users, {
fields: [Notifications.notifiedId],
references: [Users.id],
relationName: "NotificationToNotified",
}),
note: one(Notes, {
fields: [Notifications.noteId],
references: [Notes.id],
}),
}));
export const Notes = pgTable("Notes", {
id: id(),
uri: uri(),
@ -381,6 +448,50 @@ export const Notes = pgTable("Notes", {
contentSource: text("content_source").default("").notNull(),
});
export const NotesRelations = relations(Notes, ({ many, one }) => ({
emojis: many(EmojiToNote),
author: one(Users, {
fields: [Notes.authorId],
references: [Users.id],
relationName: "NoteToAuthor",
}),
attachments: many(MediasToNotes, {
relationName: "AttachmentToNote",
}),
mentions: many(NoteToMentions),
reblog: one(Notes, {
fields: [Notes.reblogId],
references: [Notes.id],
relationName: "NoteToReblogs",
}),
usersThatHavePinned: many(UserToPinnedNotes),
reply: one(Notes, {
fields: [Notes.replyId],
references: [Notes.id],
relationName: "NoteToReplies",
}),
quote: one(Notes, {
fields: [Notes.quotingId],
references: [Notes.id],
relationName: "NoteToQuotes",
}),
application: one(Applications, {
fields: [Notes.applicationId],
references: [Applications.id],
}),
quotes: many(Notes, {
relationName: "NoteToQuotes",
}),
replies: many(Notes, {
relationName: "NoteToReplies",
}),
likes: many(Likes),
reblogs: many(Notes, {
relationName: "NoteToReblogs",
}),
notifications: many(Notifications),
}));
export const Instances = pgTable("Instances", {
id: id(),
baseUrl: text("base_url").notNull(),
@ -399,6 +510,11 @@ export const Instances = pgTable("Instances", {
extensions: jsonb("extensions").$type<InstanceMetadata["extensions"]>(),
});
export const InstancesRelations = relations(Instances, ({ many }) => ({
users: many(Users),
emojis: many(Emojis),
}));
export const OpenIdAccounts = pgTable("OpenIdAccounts", {
id: id(),
userId: uuid("userId").references(() => Users.id, {
@ -447,8 +563,14 @@ export const Users = pgTable(
};
}
>(),
avatar: text("avatar").notNull(),
header: text("header").notNull(),
avatarId: uuid("avatarId").references(() => Medias.id, {
onDelete: "set null",
onUpdate: "cascade",
}),
headerId: uuid("headerId").references(() => Medias.id, {
onDelete: "set null",
onUpdate: "cascade",
}),
createdAt: createdAt(),
updatedAt: updatedAt(),
isBot: boolean("is_bot").default(false).notNull(),
@ -472,6 +594,48 @@ export const Users = pgTable(
],
);
export const UsersRelations = relations(Users, ({ many, one }) => ({
emojis: many(EmojiToUser),
pinnedNotes: many(UserToPinnedNotes),
notes: many(Notes, {
relationName: "NoteToAuthor",
}),
avatar: one(Medias, {
fields: [Users.avatarId],
references: [Medias.id],
relationName: "UserToAvatar",
}),
header: one(Medias, {
fields: [Users.headerId],
references: [Medias.id],
relationName: "UserToHeader",
}),
likes: many(Likes),
relationships: many(Relationships, {
relationName: "RelationshipToOwner",
}),
relationshipSubjects: many(Relationships, {
relationName: "RelationshipToSubject",
}),
notificationsMade: many(Notifications, {
relationName: "NotificationToAccount",
}),
notificationsReceived: many(Notifications, {
relationName: "NotificationToNotified",
}),
openIdAccounts: many(OpenIdAccounts),
flags: many(Flags),
modNotes: many(ModNotes),
modTags: many(ModTags),
tokens: many(Tokens),
instance: one(Instances, {
fields: [Users.instanceId],
references: [Instances.id],
}),
mentionedIn: many(NoteToMentions),
roles: many(RoleToUsers),
}));
export const OpenIdLoginFlows = pgTable("OpenIdLoginFlows", {
id: id(),
codeVerifier: text("code_verifier").notNull(),
@ -755,6 +919,17 @@ export const EmojiToNote = pgTable(
],
);
export const EmojisToNotesRelations = relations(EmojiToNote, ({ one }) => ({
emoji: one(Emojis, {
fields: [EmojiToNote.emojiId],
references: [Emojis.id],
}),
note: one(Notes, {
fields: [EmojiToNote.noteId],
references: [Notes.id],
}),
}));
export const NoteToMentions = pgTable(
"NoteToMentions",
{
@ -777,6 +952,20 @@ export const NoteToMentions = pgTable(
],
);
export const NotesToMentionsRelations = relations(
NoteToMentions,
({ one }) => ({
note: one(Notes, {
fields: [NoteToMentions.noteId],
references: [Notes.id],
}),
user: one(Users, {
fields: [NoteToMentions.userId],
references: [Users.id],
}),
}),
);
export const UserToPinnedNotes = pgTable(
"UserToPinnedNotes",
{
@ -799,80 +988,6 @@ export const UserToPinnedNotes = pgTable(
],
);
export const AttachmentsRelations = relations(Medias, ({ one }) => ({
notes: one(Notes, {
fields: [Medias.noteId],
references: [Notes.id],
}),
}));
export const UsersRelations = relations(Users, ({ many, one }) => ({
emojis: many(EmojiToUser),
pinnedNotes: many(UserToPinnedNotes),
notes: many(Notes, {
relationName: "NoteToAuthor",
}),
likes: many(Likes),
relationships: many(Relationships, {
relationName: "RelationshipToOwner",
}),
relationshipSubjects: many(Relationships, {
relationName: "RelationshipToSubject",
}),
notificationsMade: many(Notifications, {
relationName: "NotificationToAccount",
}),
notificationsReceived: many(Notifications, {
relationName: "NotificationToNotified",
}),
openIdAccounts: many(OpenIdAccounts),
flags: many(Flags),
modNotes: many(ModNotes),
modTags: many(ModTags),
tokens: many(Tokens),
instance: one(Instances, {
fields: [Users.instanceId],
references: [Instances.id],
}),
mentionedIn: many(NoteToMentions),
roles: many(RoleToUsers),
}));
export const RelationshipsRelations = relations(Relationships, ({ one }) => ({
owner: one(Users, {
fields: [Relationships.ownerId],
references: [Users.id],
relationName: "RelationshipToOwner",
}),
subject: one(Users, {
fields: [Relationships.subjectId],
references: [Users.id],
relationName: "RelationshipToSubject",
}),
}));
export const TokensRelations = relations(Tokens, ({ one }) => ({
user: one(Users, {
fields: [Tokens.userId],
references: [Users.id],
}),
application: one(Applications, {
fields: [Tokens.applicationId],
references: [Applications.id],
}),
}));
export const NotesToUsersRelations = relations(NoteToMentions, ({ one }) => ({
note: one(Notes, {
fields: [NoteToMentions.noteId],
references: [Notes.id],
}),
user: one(Users, {
fields: [NoteToMentions.userId],
references: [Users.id],
}),
}));
export const UserToPinnedNotesRelations = relations(
UserToPinnedNotes,
({ one }) => ({
@ -887,97 +1002,33 @@ export const UserToPinnedNotesRelations = relations(
}),
);
export const NotesRelations = relations(Notes, ({ many, one }) => ({
emojis: many(EmojiToNote),
author: one(Users, {
fields: [Notes.authorId],
references: [Users.id],
relationName: "NoteToAuthor",
export const MediasToNotes = pgTable(
"MediasToNote",
{
mediaId: uuid("mediaId")
.notNull()
.references(() => Medias.id, {
onDelete: "cascade",
onUpdate: "cascade",
}),
attachments: many(Medias),
mentions: many(NoteToMentions),
reblog: one(Notes, {
fields: [Notes.reblogId],
references: [Notes.id],
relationName: "NoteToReblogs",
noteId: uuid("noteId")
.notNull()
.references(() => Notes.id, {
onDelete: "cascade",
onUpdate: "cascade",
}),
usersThatHavePinned: many(UserToPinnedNotes),
reply: one(Notes, {
fields: [Notes.replyId],
references: [Notes.id],
relationName: "NoteToReplies",
}),
quote: one(Notes, {
fields: [Notes.quotingId],
references: [Notes.id],
relationName: "NoteToQuotes",
}),
application: one(Applications, {
fields: [Notes.applicationId],
references: [Applications.id],
}),
quotes: many(Notes, {
relationName: "NoteToQuotes",
}),
replies: many(Notes, {
relationName: "NoteToReplies",
}),
likes: many(Likes),
reblogs: many(Notes, {
relationName: "NoteToReblogs",
}),
notifications: many(Notifications),
}));
},
(table) => [index().on(table.mediaId), index().on(table.noteId)],
);
export const NotificationsRelations = relations(Notifications, ({ one }) => ({
account: one(Users, {
fields: [Notifications.accountId],
references: [Users.id],
relationName: "NotificationToAccount",
}),
notified: one(Users, {
fields: [Notifications.notifiedId],
references: [Users.id],
relationName: "NotificationToNotified",
export const MediasToNotesRelations = relations(MediasToNotes, ({ one }) => ({
media: one(Medias, {
fields: [MediasToNotes.mediaId],
references: [Medias.id],
}),
note: one(Notes, {
fields: [Notifications.noteId],
references: [Notes.id],
}),
}));
export const LikesRelations = relations(Likes, ({ one }) => ({
liker: one(Users, {
fields: [Likes.likerId],
references: [Users.id],
}),
liked: one(Notes, {
fields: [Likes.likedId],
references: [Notes.id],
}),
}));
export const EmojisRelations = relations(Emojis, ({ one, many }) => ({
instance: one(Instances, {
fields: [Emojis.instanceId],
references: [Instances.id],
}),
users: many(EmojiToUser),
notes: many(EmojiToNote),
}));
export const InstancesRelations = relations(Instances, ({ many }) => ({
users: many(Users),
emojis: many(Emojis),
}));
export const EmojisToNotesRelations = relations(EmojiToNote, ({ one }) => ({
emoji: one(Emojis, {
fields: [EmojiToNote.emojiId],
references: [Emojis.id],
}),
note: one(Notes, {
fields: [EmojiToNote.noteId],
fields: [MediasToNotes.noteId],
references: [Notes.id],
relationName: "AttachmentToNote",
}),
}));

View file

@ -81,20 +81,20 @@
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@types/bun": "^1.2.0",
"@types/bun": "^1.2.1",
"@types/cli-progress": "^3.11.6",
"@types/cli-table": "^0.3.4",
"@types/html-to-text": "^9.0.4",
"@types/jsonld": "^1.5.15",
"@types/markdown-it-container": "^2.0.10",
"@types/mime-types": "^2.1.4",
"@types/pg": "^8.11.10",
"@types/pg": "^8.11.11",
"@types/qs": "^6.9.18",
"@types/web-push": "^3.6.4",
"drizzle-kit": "^0.30.2",
"drizzle-kit": "^0.30.3",
"markdown-it-image-figures": "^2.1.1",
"markdown-it-mathjax3": "^4.3.2",
"oclif": "^4.17.17",
"oclif": "^4.17.20",
"ts-prune": "^0.10.3",
"typescript": "^5.7.3",
"vitepress": "^1.6.3",
@ -107,7 +107,6 @@
"typescript": "^5.7.2"
},
"dependencies": {
"@bradenmacdonald/s3-lite-client": "npm:@jsr/bradenmacdonald__s3-lite-client@0.8.0",
"@bull-board/api": "^6.7.1",
"@bull-board/hono": "^6.7.1",
"@hackmd/markdown-it-task-lists": "^2.1.4",
@ -120,22 +119,22 @@
"@json2csv/plainjs": "^7.0.6",
"@logtape/logtape": "npm:@jsr/logtape__logtape@0.9.0-dev.114+327c9473",
"@oclif/core": "^4.2.4",
"@sentry/bun": "^8.51.0",
"@sentry/bun": "^8.52.0",
"@tufjs/canonical-json": "^2.0.0",
"@versia/client": "^0.1.5",
"@versia/federation": "^0.1.4",
"@versia/kit": "workspace:*",
"altcha-lib": "^1.2.0",
"blurhash": "^2.0.5",
"bullmq": "^5.35.1",
"bullmq": "^5.38.0",
"c12": "^2.0.1",
"chalk": "^5.4.1",
"cli-progress": "^3.12.0",
"cli-table": "^0.3.11",
"confbox": "^0.1.8",
"drizzle-orm": "^0.38.4",
"drizzle-orm": "^0.39.0",
"extract-zip": "^2.0.1",
"hono": "^4.6.18",
"hono": "^4.6.19",
"html-to-text": "^9.0.5",
"ioredis": "^5.4.2",
"ip-matching": "^2.1.2",

View file

@ -276,14 +276,7 @@ export const configValidator = z
public_url: zUrl,
})
.strict()
.default({
endpoint: "",
access_key: "",
secret_access_key: "",
region: undefined,
bucket_name: "versia",
public_url: "https://cdn.example.com",
}),
.optional(),
validation: z
.object({
max_displayname_size: z.number().int().default(50),
@ -854,6 +847,11 @@ export const configValidator = z
.strict()
.optional(),
})
.strict();
.strict()
.refine(
// If media backend is S3, s3 config must be set
(arg) => arg.media.backend === MediaBackendType.Local || !!arg.s3,
"S3 config must be set when using S3 media backend",
);
export type Config = z.infer<typeof configValidator>;

View file

@ -1,7 +1,7 @@
// biome-ignore lint/performance/noBarrelFile: <explanation>
export { User } from "~/classes/database/user.ts";
export { Role } from "~/classes/database/role.ts";
export { Media } from "~/classes/database/attachment.ts";
export { Media } from "~/classes/database/media";
export { Emoji } from "~/classes/database/emoji.ts";
export { Instance } from "~/classes/database/instance.ts";
export { Note } from "~/classes/database/note.ts";

View file

@ -1,7 +1,6 @@
import { mimeLookup } from "@/content_types.ts";
import { randomString } from "@/math.ts";
import { createRoute, z } from "@hono/zod-openapi";
import { Token, User, db } from "@versia/kit/db";
import { Media, Token, User, db } from "@versia/kit/db";
import { type SQL, and, eq, isNull } from "@versia/kit/drizzle";
import { OpenIdAccounts, RolePermissions, Users } from "@versia/kit/tables";
import { setCookie } from "hono/cookie";
@ -243,16 +242,15 @@ export default (plugin: PluginType): void => {
? !!(await User.fromSql(eq(Users.email, email)))
: false;
const avatar = picture
? await Media.fromUrl(new URL(picture))
: null;
// Create new user
const user = await User.fromDataLocal({
email: doesEmailExist ? undefined : email,
username,
avatar: picture
? {
url: picture,
content_type: await mimeLookup(picture),
}
: undefined,
avatar: avatar ?? undefined,
password: undefined,
});

View file

@ -57,8 +57,11 @@ export const urlToContentFormat = (
};
};
export const mimeLookup = (url: string): Promise<string> => {
const naiveLookup = lookup(url.replace(new URL(url).search, ""));
export const mimeLookup = (url: URL): Promise<string> => {
const urlWithoutSearch = url.toString().replace(url.search, "");
// Strip query params from URL to get the proper file extension
const naiveLookup = lookup(urlWithoutSearch);
if (naiveLookup) {
return Promise.resolve(naiveLookup);