diff --git a/.dockerignore b/.dockerignore index c627bb6c..5c8d6f5b 100644 --- a/.dockerignore +++ b/.dockerignore @@ -14,4 +14,6 @@ helm-charts .idea coverage* uploads -logs \ No newline at end of file +logs +dist +pages/dist \ No newline at end of file diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index fc8026cc..21377659 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -71,7 +71,7 @@ jobs: # https://github.com/docker/build-push-action - name: Build and push Docker image id: build-and-push - uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + uses: docker/build-push-action@v5.1.0 with: context: . push: ${{ github.event_name != 'pull_request' }} diff --git a/.gitignore b/.gitignore index db1bd149..38109eac 100644 --- a/.gitignore +++ b/.gitignore @@ -168,4 +168,8 @@ dist .yarn/install-state.gz .pnp.\* config/config.toml -uploads/ \ No newline at end of file +config/config.internal.toml +uploads/ +pages/dist +log.txt +*.log \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..980184cd --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,18 @@ +{ + "configurations": [ + { + "type": "node", + "name": "vscode-jest-tests.v2.lysand", + "request": "launch", + "args": [ + "test", + "${jest.testFile}" + ], + "cwd": "/home/jessew/Dev/lysand", + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "disableOptimisticBPs": true, + "program": "/home/jessew/.bun/bin/bun" + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..6286bcf3 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "typescript.tsdk": "node_modules/typescript/lib", + "jest.jestCommandLine": "/home/jessew/.bun/bin/bun test", + "jest.rootPath": "." +} diff --git a/API.md b/API.md new file mode 100644 index 00000000..7d44dec9 --- /dev/null +++ b/API.md @@ -0,0 +1,268 @@ +# API + +The Lysand project uses the Mastodon API to interact with clients. However, the moderation API is custom-made for Lysand Server, as it allows for more fine-grained control over the server's behavior. + +## Flags, ModTags and ModNotes + +Flags are used by Lysand Server to automatically attribute tags to a status or account based on rules. ModTags and ModNotes are used by moderators to manually tag and take notes on statuses and accounts. + +The difference between flags and modtags is that flags are automatically attributed by the server, while modtags are manually attributed by moderators. + +### Flag Types + +- `content_filter`: (Statuses only) The status contains content that was filtered by the server's content filter. +- `bio_filter`: (Accounts only) The account's bio contains content that was filtered by the server's content filter. +- `emoji_filter`: The status or account contains an emoji that was filtered by the server's content filter. +- `reported`: The status or account was previously reported by a user. +- `suspended`: The status or account was previously suspended by a moderator. +- `silenced`: The status or account was previously silenced by a moderator. + +### ModTag Types + +ModTag do not have set types and can be anything. Lysand Server autosuggest previously used tags when a moderator is adding a new tag to avoid duplicates. + +### Data Format + +```ts +type Flag = { + id: string, + // One of the following two fields will be present + flaggedStatus?: Status, + flaggedUser?: User, + flagType: string, + createdAt: string, +} + +type ModTag = { + id: string, + // One of the following two fields will be present + taggedStatus?: Status, + taggedUser?: User, + mod: User, + tag: string, + createdAt: string, +} + +type ModNote = { + id: string, + // One of the following two fields will be present + notedStatus?: Status, + notedUser?: User, + mod: User, + note: string, + createdAt: string, +} +``` + +The `User` and `Status` types are the same as the ones in the Mastodon API. + +## Moderation API Routes + +### `GET /api/v1/moderation/accounts/:id` + +Returns full moderation data and flags for the account with the given ID. + +Output format: + +```ts +{ + id: string, // Same ID as in account field + flags: Flag[], + modtags: ModTag[], + modnotes: ModNote[], + account: User, +} +``` + +### `GET /api/v1/moderation/statuses/:id` + +Returns full moderation data and flags for the status with the given ID. + +Output format: + +```ts +{ + id: string, // Same ID as in status field + flags: Flag[], + modtags: ModTag[], + modnotes: ModNote[], + status: Status, +} +``` + +### `POST /api/v1/moderation/accounts/:id/modtags` + +Params: +- `tag`: string + +Adds a modtag to the account with the given ID + +### `POST /api/v1/moderation/statuses/:id/modtags` + +Params: +- `tag`: string + +Adds a modtag to the status with the given ID + +### `POST /api/v1/moderation/accounts/:id/modnotes` + +Params: +- `note`: string + +Adds a modnote to the account with the given ID + +### `POST /api/v1/moderation/statuses/:id/modnotes` + +Params: +- `note`: string + +Adds a modnote to the status with the given ID + +### `DELETE /api/v1/moderation/accounts/:id/modtags/:modtag_id` + +Deletes the modtag with the given ID from the account with the given ID + +### `DELETE /api/v1/moderation/statuses/:id/modtags/:modtag_id` + +Deletes the modtag with the given ID from the status with the given ID + +### `DELETE /api/v1/moderation/accounts/:id/modnotes/:modnote_id` + +Deletes the modnote with the given ID from the account with the given ID + +### `DELETE /api/v1/moderation/statuses/:id/modnotes/:modnote_id` + +Deletes the modnote with the given ID from the status with the given ID + +### `GET /api/v1/moderation/modtags` + +Returns a list of all modtags previously used by moderators + +Output format: + +```ts +{ + tags: string[], +} +``` + +### `GET /api/v1/moderation/accounts/flags/search` + +Allows moderators to search for accounts based on their flags, this can also include status flags + +Params: +- `limit`: Number +- `min_id`: String. Returns results immediately newer than this ID. In effect, sets a cursor at this ID and paginates forward. +- `max_id`: String. All results returned will be lesser than this ID. In effect, sets an upper bound on results. +- `since_id`: String. All results returned will be greater than this ID. In effect, sets a lower bound on results. +- `flags`: String (optional). Comma-separated list of flag types to filter by. Can be left out to return accounts with at least one flag +- `flag_count`: Number (optional). Minimum number of flags to filter by +- `include_statuses`: Boolean (optional). If true, includes status flags in the search results +- `account_id`: Array of strings (optional). Filters accounts by account ID + +This method returns a `Link` header the same way Mastodon does, to allow for pagination. + +Output format: + +```ts +{ + accounts: { + account: User, + modnotes: ModNote[], + flags: Flag[], + statuses?: { + status: Status, + modnotes: ModNote[], + flags: Flag[], + }[], + }[], +} +``` + +### `GET /api/v1/moderation/statuses/flags/search` + +Allows moderators to search for statuses based on their flags + +Params: +- `limit`: Number +- `min_id`: String. Returns results immediately newer than this ID. In effect, sets a cursor at this ID and paginates forward. +- `max_id`: String. All results returned will be lesser than this ID. In effect, sets an upper bound on results. +- `since_id`: String. All results returned will be greater than this ID. In effect, sets a lower bound on results. +- `flags`: String (optional). Comma-separated list of flag types to filter by. Can be left out to return statuses with at least one flag +- `flag_count`: Number (optional). Minimum number of flags to filter by +- `account_id`: Array of strings (optional). Filters statuses by account ID + +This method returns a `Link` header the same way Mastodon does, to allow for pagination. + +Output format: + +```ts +{ + statuses: { + status: Status, + modnotes: ModNote[], + flags: Flag[], + }[], +} +``` + +### `GET /api/v1/moderation/accounts/modtags/search` + +Allows moderators to search for accounts based on their modtags + +Params: +- `limit`: Number +- `min_id`: String. Returns results immediately newer than this ID. In effect, sets a cursor at this ID and paginates forward. +- `max_id`: String. All results returned will be lesser than this ID. In effect, sets an upper bound on results. +- `since_id`: String. All results returned will be greater than this ID. In effect, sets a lower bound on results. +- `tags`: String (optional). Comma-separated list of tags to filter by. Can be left out to return accounts with at least one tag +- `tag_count`: Number (optional). Minimum number of tags to filter by +- `include_statuses`: Boolean (optional). If true, includes status tags in the search results +- `account_id`: Array of strings (optional). Filters accounts by account ID + +This method returns a `Link` header the same way Mastodon does, to allow for pagination. + +Output format: + +```ts +{ + accounts: { + account: User, + modnotes: ModNote[], + modtags: ModTag[], + statuses?: { + status: Status, + modnotes: ModNote[], + modtags: ModTag[], + }[], + }[], +} +``` + +### `GET /api/v1/moderation/statuses/modtags/search` + +Allows moderators to search for statuses based on their modtags + +Params: +- `limit`: Number +- `min_id`: String. Returns results immediately newer than this ID. In effect, sets a cursor at this ID and paginates forward. +- `max_id`: String. All results returned will be lesser than this ID. In effect, sets an upper bound on results. +- `since_id`: String. All results returned will be greater than this ID. In effect, sets a lower bound on results. +- `tags`: String (optional). Comma-separated list of tags to filter by. Can be left out to return statuses with at least one tag +- `tag_count`: Number (optional). Minimum number of tags to filter by +- `account_id`: Array of strings (optional). Filters statuses by account ID +- `include_statuses`: Boolean (optional). If true, includes status tags in the search results + +This method returns a `Link` header the same way Mastodon does, to allow for pagination. + +Output format: + +```ts +{ + statuses: { + status: Status, + modnotes: ModNote[], + modtags: ModTag[], + }[], +} +``` diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 19e9ce47..bfef0cf7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,6 @@ # Contributing to Lysand +> [!NOTE] > This document was authored by [@CPlusPatch](https://github.com/CPlusPatch). Thank you for your interest in contributing to Lysand! We welcome contributions from everyone, regardless of their level of experience or expertise. @@ -8,7 +9,7 @@ Thank you for your interest in contributing to Lysand! We welcome contributions Lysand is built using the following technologies: -- [Bun](https://bun.sh) - A JavaScript runtime similar to Node.js, but improved +- [Bun](https://bun.sh) - A JavaScript runtime similar to Node.js, but faster and with more features - [PostgreSQL](https://www.postgresql.org/) - A relational database - [`pg_uuidv7`](https://github.com/fboulnois/pg_uuidv7) - A PostgreSQL extension that provides a UUIDv7 data type - [UnoCSS](https://unocss.dev) - A utility-first CSS framework, used for the login page @@ -67,7 +68,10 @@ RUN chmod +x /docker-entrypoint-initdb.d/init.sh ``` 4. Copy the `config.toml.example` file to `config.toml` and fill in the values (you can leave most things to the default, but you will need to configure things such as the database connection) - + +> [!WARNING] +> You should disable Prisma Redis caching while developing, as it can mess up tests + 5. Generate the Prisma client: ```bash @@ -89,6 +93,15 @@ bun dev If your port number is lower than 1024, you may need to run the command as root. +### Running the Vite server + +To start the Vite server, run: +```sh +bun vite:dev +``` + +This should be run in a separate terminal window. The Vite server is used to serve the frontend assets and to provide hot module reloading. + ## Running tests To run the tests, run: @@ -96,7 +109,7 @@ To run the tests, run: bun test ``` -The tests are located in the `tests/` directory and follow a Jest-like syntax. The server must be started with `bun dev` before running the tests. +The tests are located in the `tests/` directory and follow a Jest-like syntax. The server does not need to be started before running the tests, as the tests will spawn their own Lysand server instance. ## Code style diff --git a/Dockerfile b/Dockerfile index edb38027..51b9eb58 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,31 +1,36 @@ # use the official Bun image # see all versions at https://hub.docker.com/r/oven/bun/tags -FROM oven/bun:1.0.14-alpine as base +FROM oven/bun:1.0.30-alpine as base WORKDIR /usr/src/app -RUN apk add vips-dev # Required for Prisma to work -COPY --from=node:18-alpine /usr/local/bin/node /usr/local/bin/node +# COPY --from=node:18-alpine /usr/local/bin/node /usr/local/bin/node # install dependencies into temp directory # this will cache them and speed up future builds FROM base AS install -RUN mkdir -p /temp/dev -COPY package.json bun.lockb /temp/dev/ -RUN cd /temp/dev && bun install --frozen-lockfile # install with --production (exclude devDependencies) -RUN mkdir -p /temp/prod -COPY package.json bun.lockb /temp/prod/ -RUN cd /temp/prod && bun install --frozen-lockfile --production. +RUN mkdir -p /temp +COPY . /temp +WORKDIR /temp +RUN bun install --frozen-lockfile --production + +# Build Vite in pages +RUN bunx --bun vite build pages + +# Build the project +RUN bun run build.ts +WORKDIR /temp/dist # copy production dependencies and source code into final image FROM base AS release # Create app directory RUN mkdir -p /app -COPY --from=install /temp/prod/node_modules /app/node_modules -COPY . /app +COPY --from=install /temp/dist /app/dist +COPY entrypoint.sh /app + LABEL org.opencontainers.image.authors "Gaspard Wierzbinski (https://cpluspatch.dev)" LABEL org.opencontainers.image.source "https://github.com/lysand-org/lysand" @@ -34,11 +39,8 @@ LABEL org.opencontainers.image.licenses "AGPL-3.0" LABEL org.opencontainers.image.title "Lysand Server" LABEL org.opencontainers.image.description "Lysand Server docker image" -# CD to app -WORKDIR /app -RUN bunx prisma generate # CD to app WORKDIR /app ENV NODE_ENV=production # Run migrations and start the server -ENTRYPOINT [ "bun", "migrate", "&&", "bun", "run", "index.ts" ] +ENTRYPOINT [ "./entrypoint.sh" "start" ] diff --git a/README.md b/README.md index b23635cf..83a9e54d 100644 --- a/README.md +++ b/README.md @@ -4,14 +4,14 @@ ![Postgres](https://img.shields.io/badge/postgres-%23316192.svg?style=for-the-badge&logo=postgresql&logoColor=white) ![Bun](https://img.shields.io/badge/Bun-%23000000.svg?style=for-the-badge&logo=bun&logoColor=white) ![VS Code Insiders](https://img.shields.io/badge/VS%20Code%20Insiders-35b393.svg?style=for-the-badge&logo=visual-studio-code&logoColor=white) ![TypeScript](https://img.shields.io/badge/typescript-%23007ACC.svg?style=for-the-badge&logo=typescript&logoColor=white) ![Linux](https://img.shields.io/badge/Linux-FCC624?style=for-the-badge&logo=linux&logoColor=black) ![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white) ![ESLint](https://img.shields.io/badge/ESLint-4B3263?style=for-the-badge&logo=eslint&logoColor=white) [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa?style=for-the-badge)](code_of_conduct.md) - ## What is this? -This is a project to create a federated social network based on the [Lysand](https://lysand.org) protocol. It is currently in alpha phase, with basic federation and API support. +This is a project to create a federated social network based on the [Lysand](https://lysand.org) protocol. It is currently in alpha phase, with basic federation and almost complete Mastodon API support. -This project aims to be a fully featured social network, with a focus on privacy, security, and performance. It will implement the Mastodon API for support with clients that already support Mastodon or Pleroma. +This project aims to be a fully featured social network, with a focus on privacy, security, and performance. It implements the Mastodon API for support with clients that already support Mastodon or Pleroma. -> **Note:** This project is not affiliated with Mastodon or Pleroma, and is not a fork of either project. It is a new project built from the ground up. +> [!NOTE] +> This project is not affiliated with Mastodon or Pleroma, and is not a fork of either project. It is a new project built from the ground up. ## Features @@ -31,7 +31,8 @@ This project aims to be a fully featured social network, with a focus on privacy ## Benchmarks -> **Note**: These benchmarks are not representative of real-world performance, and are only meant to be used as a rough guide. +> [!NOTE] +> These benchmarks are not representative of real-world performance, and are only meant to be used as a rough guide. Load, and therefore performance, will vary depending on the server's hardware and software configuration, as well as user activity. ### Timeline Benchmarks @@ -63,17 +64,21 @@ $ bun run benchmarks/timelines.ts 10000 ✓ 10000 requests fulfilled in 12.44852s ``` -Lysand is extremely fast and can handle tens of thousands of HTTP requests per second on a good server. +Lysand is extremely fast and can handle thousands of HTTP requests per second on a good server. ## How do I run it? ### Requirements -- The [Bun Runtime](https://bun.sh), version 1.0.5 or later (usage of the latest version is recommended) +- The [Bun Runtime](https://bun.sh), version 1.0.30 or later (usage of the latest version is recommended) - A PostgreSQL database - (Optional but recommended) A Linux-based operating system +- (Optional if you want search) A working Meiliseach instance -> **Note**: We will not be offerring support to Windows or MacOS users. If you are using one of these operating systems, please use a virtual machine or container to run Lysand. +> [!WARNING] +> Lysand has not been tested on Windows or MacOS. It is recommended to use a Linux-based operating system to run Lysand. +> +> We will not be offerring support to Windows or MacOS users. If you are using one of these operating systems, please use a virtual machine or container to run Lysand. ### Installation @@ -125,6 +130,18 @@ RUN chmod +x /docker-entrypoint-initdb.d/init.sh bun migrate ``` +6. (If you want search) +Create a Meilisearch instance (using Docker is recommended). For a [`docker-compose`] file, copy the `meilisearch` service from the [`docker-compose.yml`](docker-compose.yml) file. + +Set up Meiliseach's API key by passing the `MEILI_MASTER_KEY` environment variable to the server. Then, enale and configure search in the config file. +7. Build everything: + +```bash +bun prod-build +``` + +You may now start the server with `bun start`. It lives in the `dist/` directory, all the other code can be removed from this point onwards. +In fact, the `bun start` script merely runs `bun run dist/index.js --prod`! ### Running To run the server, simply run the following command: @@ -138,24 +155,31 @@ bun start Lysand includes a built-in CLI for managing the server. To use it, simply run the following command: ```bash -bun cli +bun cli help ``` -You can use the `help` command to see a list of available commands. These include creating users, deleting users and more. +If you are running a production build, you will need to run `bun run dist/cli.js` or `./entrypoint.sh cli` instead. + +You can use the `help` command to see a list of available commands. These include creating users, deleting users and more. Each command also has a `--help,-h` flag that you can use to see more information about the command. #### Scripting with the CLI -Some CLI commands that return data as tables can be used in scripts. To do so, you can use the `--json` flag to output the data as JSON instead of a table, or even `--csv` to output the data as CSV. See `bun cli help` for more information. +Some CLI commands that return data as tables can be used in scripts. To convert them to JSON or CSV, some commands allow you to specify a `--format` flag that can be either `"json"` or `"csv"`. See `bun cli help` or `bun cli -h` for more information. Flags can be used in any order and anywhere in the script (except for the `bun cli` command itself). The command arguments themselves must be in the correct order, however. +### Rebuilding the Search Index + +You may use the `bun cli index rebuild` command to automatically push all posts and users to Meilisearch, if it is configured. This is useful if you have just set up Meilisearch, or if you accidentally deleted something. + ### Using Database Commands The `bun prisma` commands allows you to use Prisma commands without needing to add in environment variables for the database config. Just run Prisma commands as you would normally, replacing `bunx prisma` with `bun prisma`. ## With Docker -> **Note**: Docker is currently broken, as Bun with Prisma does not work well with Docker yet for unknown reasons. The following instructions are for when this is fixed. +> [!NOTE] +> Docker is currently broken, as Bun with Prisma does not work well with Docker yet for unknown reasons. The following instructions are for when this is fixed. > > These instructions will probably also work with Podman and other container runtimes. @@ -180,7 +204,7 @@ You may need root privileges to run Docker commands. You can run CLI commands inside Docker using the following command: ```bash -sudo docker exec -it lysand bun cli ... +sudo docker exec -it lysand sh entrypoint.sh cli ... ``` ### Running migrations inside Docker @@ -188,7 +212,7 @@ sudo docker exec -it lysand bun cli ... You can run migrations inside Docker using the following command (if needed): ```bash -sudo docker exec -it lysand bun migrate +sudo docker exec -it lysand sh entrypoint.sh prisma migrate deploy ``` ## Contributing @@ -202,7 +226,8 @@ Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) fil ## Federation -> **Warning**: Federation has not been tested outside of automated tests. It is not recommended to use this software in production. +> [!WARNING] +> Federation has not been tested outside of automated tests. It is not recommended to use this software in production. The following extensions are currently supported or being worked on: - `org.lysand:custom_emojis`: Custom emojis @@ -254,17 +279,21 @@ Working endpoints are: - `/api/v1/blocks` - `/api/v1/mutes` - `/api/v2/media` +- `/api/v1/notifications` Tests needed but completed: - `/api/v1/media/:id` +- `/api/v2/media` - `/api/v1/favourites` +- `/api/v1/accounts/:id/followers` +- `/api/v1/accounts/:id/following` +- `/api/v2/search` Endpoints left: - `/api/v1/reports` - `/api/v1/accounts/:id/lists` -- `/api/v1/accounts/:id/following` - `/api/v1/follow_requests` - `/api/v1/follow_requests/:account_id/authorize` - `/api/v1/follow_requests/:account_id/reject` @@ -308,11 +337,9 @@ Endpoints left: - `/api/v1/lists/:id` (`GET`, `PUT`, `DELETE`) - `/api/v1/markers` (`GET`, `POST`) - `/api/v1/lists/:id/accounts` (`GET`, `POST`, `DELETE`) -- `/api/v1/notifications` - `/api/v1/notifications/:id` - `/api/v1/notifications/clear` - `/api/v1/notifications/:id/dismiss` -- `/api/v2/search` - `/api/v2/instance` - `/api/v1/instance/peers` - `/api/v1/instance/activity` @@ -330,126 +357,6 @@ Endpoints left: WebSocket Streaming API also needed to be added (and push notifications) -## Configuration Values - -Configuration can be found inside the `config.toml` file. The following values are available: - -### Database - -- `host`: The hostname or IP address of the database server. Example: `"localhost"` -- `port`: The port number to use for the database connection. Example: `48654` -- `username`: The username to use for the database connection. Example: `"lysand"` -- `password`: The password to use for the database connection. Example: `"mycoolpassword"` -- `database`: The name of the database to use. Example: `"lysand"` - -### HTTP - -- `base_url`: The base URL for the HTTP server. Example: `"https://lysand.social"` -- `bind`: The hostname or IP address to bind the HTTP server to. Example: `"http://localhost"` -- `bind_port`: The port number to bind the HTTP server to. Example: `"8080"` - -#### Security - -- `banned_ips`: An array of strings representing banned IPv4 or IPv6 IPs. Wildcards, networks and ranges are supported. Example: `[ "192.168.0.*" ]` (empty array) - -### Media - -- `backend`: Specifies the backend to use for media storage. Can be "local" or "s3", "local" uploads the file to the local filesystem. -- `deduplicate_media`: When set to true, the hash of media is checked when uploading to avoid duplication. - -#### Conversion - -- `convert_images`: Whether to convert uploaded images to another format. Example: `true` -- `convert_to`: The format to convert uploaded images to. Example: `"webp"`. Can be "jxl", "webp", "avif", "png", "jpg" or "gif". - -### S3 - -- `endpoint`: The endpoint to use for the S3 server. Example: `"https://s3.example.com"` -- `access_key`: Access key to use for S3 -- `secret_access_key`: Secret access key to use for S3 -- `bucket_name`: The bucket to use for S3 (can be left empty) -- `region`: The region to use for S3 (can be left empty) -- `public_url`: The public URL to access uploaded media. Example: `"https://cdn.example.com"` - -### SMTP - -- `server`: The SMTP server to use for sending emails. Example: `"smtp.example.com"` -- `port`: The port number to use for the SMTP server. Example: `465` -- `username`: The username to use for the SMTP server. Example: `"test@example.com"` -- `password`: The password to use for the SMTP server. Example: `"password123"` -- `tls`: Whether to use TLS for the SMTP server. Example: `true` - -### Email - -- `send_on_report`: Whether to send an email to moderators when a report is received. Example: `false` -- `send_on_suspend`: Whether to send an email to moderators when a user is suspended. Example: `true` -- `send_on_unsuspend`: Whether to send an email to moderators when a user is unsuspended. Example: `false` - -### Validation - -- `max_displayname_size`: The maximum size of a user's display name, in characters. Example: `30` -- `max_bio_size`: The maximum size of a user's bio, in characters. Example: `160` -- `max_note_size`: The maximum size of a user's note, in characters. Example: `500` -- `max_avatar_size`: The maximum size of a user's avatar image, in bytes. Example: `1048576` (1 MB) -- `max_header_size`: The maximum size of a user's header image, in bytes. Example: `2097152` (2 MB) -- `max_media_size`: The maximum size of a media attachment, in bytes. Example: `5242880` (5 MB) -- `max_media_attachments`: The maximum number of media attachments allowed per post. Example: `4` -- `max_media_description_size`: The maximum size of a media attachment's description, in characters. Example: `100` -- `max_username_size`: The maximum size of a user's username, in characters. Example: `20` -- `username_blacklist`: An array of strings representing usernames that are not allowed to be used by users. Defaults are from Akkoma. Example: `["admin", "moderator"]` -- `blacklist_tempmail`: Whether to blacklist known temporary email providers. Example: `true` -- `email_blacklist`: Additional email providers to blacklist. Example: `["example.com", "test.com"]` -- `url_scheme_whitelist`: An array of strings representing valid URL schemes. URLs that do not use one of these schemes will be parsed as text. Example: `["http", "https"]` -- `allowed_mime_types`: An array of strings representing allowed MIME types for media attachments. Example: `["image/jpeg", "image/png", "video/mp4"]` - -### Defaults - -- `visibility`: The default visibility for new notes. Example: `"public"` -- `language`: The default language for new notes. Example: `"en"` -- `avatar`: The default avatar URL. Example: `""` (empty string) -- `header`: The default header URL. Example: `""` (empty string) - -### ActivityPub - -> **Note**: These options do nothing and date back to when Lysand had ActivityPub support. They will be removed in a future version. - -- `use_tombstones`: Whether to use ActivityPub Tombstones instead of deleting objects. Example: `true` -- `fetch_all_collection_members`: Whether to fetch all members of collections (followers, following, etc) when receiving them. Example: `false` -- `reject_activities`: An array of instance domain names without "https" or glob patterns. Rejects all activities from these instances, simply doesn't save them at all. Example: `[ "mastodon.social" ]` -- `force_followers_only`: An array of instance domain names without "https" or glob patterns. Force posts from this instance to be followers only. Example: `[ "mastodon.social" ]` -- `discard_reports`: An array of instance domain names without "https" or glob patterns. Discard all reports from these instances. Example: `[ "mastodon.social" ]` -- `discard_deletes`: An array of instance domain names without "https" or glob patterns. Discard all deletes from these instances. Example: `[ "mastodon.social" ]` -- `discard_updates`: An array of instance domain names without "https" or glob patterns. Discard all updates (edits) from these instances. Example: `[]` -- `discard_banners`: An array of instance domain names without "https" or glob patterns. Discard all banners from these instances. Example: `[ "mastodon.social" ]` -- `discard_avatars`: An array of instance domain names without "https" or glob patterns. Discard all avatars from these instances. Example: `[ "mastodon.social" ]` -- `discard_follows`: An array of instance domain names without "https" or glob patterns. Discard all follow requests from these instances. Example: `[]` -- `force_sensitive`: An array of instance domain names without "https" or glob patterns. Force set these instances' media as sensitive. Example: `[ "mastodon.social" ]` -- `remove_media`: An array of instance domain names without "https" or glob patterns. Remove these instances' media. Example: `[ "mastodon.social" ]` - -### Filters - -- `note_filters`: An array of regex filters to drop notes from new activities. Example: `["(https?://)?(www\\.)?youtube\\.com/watch\\?v=[a-zA-Z0-9_-]+", "(https?://)?(www\\.)?youtu\\.be/[a-zA-Z0-9_-]+"]` -- `username_filters`: An array of regex filters to drop users from new activities based on their username. Example: `[ "^spammer-[a-z]" ]` -- `displayname_filters`: An array of regex filters to drop users from new activities based on their display name. Example: `[ "^spammer-[a-z]" ]` -- `bio_filters`: An array of regex filters to drop users from new activities based on their bio. Example: `[ "badword" ]` -- `emoji_filters`: An array of regex filters to drop users from new activities based on their emoji usage. Example: `[ ":bademoji:" ]` - -### Logging - -- `log_requests`: Whether to log all requests. Example: `true` -- `log_requests_verbose`: Whether to log request and their contents. Example: `false` -- `log_filters`: Whether to log all filtered objects. Example: `true` - -### Ratelimits - -- `duration_coeff`: The amount to multiply every route's duration by. Example: `1.0` -- `max_coeff`: The amount to multiply every route's max by. Example: `1.0` - -### Custom Ratelimits - -- `"/api/v1/timelines/public"`: An object representing a custom ratelimit for the specified API route. Example: `{ duration = 60, max = 200 }` - - ## License -This project is licensed under the [AGPL-3.0](LICENSE). \ No newline at end of file +This project is licensed under the [AGPL-3.0](LICENSE). diff --git a/benchmarks/timelines.ts b/benchmarks/timelines.ts index 2804f691..b5348b69 100644 --- a/benchmarks/timelines.ts +++ b/benchmarks/timelines.ts @@ -2,10 +2,10 @@ * Usage: TOKEN=your_token_here bun benchmark:timeline */ -import { getConfig } from "@config"; import chalk from "chalk"; +import { ConfigManager } from "config-manager"; -const config = getConfig(); +const config = await new ConfigManager({}).getConfig(); const token = process.env.TOKEN; const requestCount = Number(process.argv[2]) || 100; diff --git a/build.ts b/build.ts new file mode 100644 index 00000000..eec9642e --- /dev/null +++ b/build.ts @@ -0,0 +1,43 @@ +// Delete dist directory +import { rm, cp, mkdir, exists } from "fs/promises"; + +if (!(await exists("./pages/dist"))) { + console.log("Please build the Vite server first, or use `bun prod-build`"); + process.exit(1); +} + +console.log(`Building at ${process.cwd()}`); + +await rm("./dist", { recursive: true }); + +await mkdir(process.cwd() + "/dist"); + +//bun build --entrypoints ./index.ts ./prisma.ts ./cli.ts --outdir dist --target bun --splitting --minify --external bullmq,@prisma/client +await Bun.build({ + entrypoints: [ + process.cwd() + "/index.ts", + process.cwd() + "/prisma.ts", + process.cwd() + "/cli.ts", + ], + outdir: process.cwd() + "/dist", + target: "bun", + splitting: true, + minify: true, + external: ["bullmq"], +}).then(output => { + if (!output.success) { + console.log(output.logs); + } +}); + +// Create pages directory +// mkdir ./dist/pages +await mkdir(process.cwd() + "/dist/pages"); + +// Copy Vite build output to dist +// cp -r ./pages/dist ./dist/pages +await cp(process.cwd() + "/pages/dist", process.cwd() + "/dist/pages/", { + recursive: true, +}); + +console.log(`Built!`); diff --git a/bun.lockb b/bun.lockb index ae9680ac..f2a6bae9 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/bunfig.toml b/bunfig.toml new file mode 100644 index 00000000..bea1efe1 --- /dev/null +++ b/bunfig.toml @@ -0,0 +1,2 @@ +[install.scopes] +"@jsr" = "https://npm.jsr.io" diff --git a/classes/media.ts b/classes/media.ts deleted file mode 100644 index 9d40ebe0..00000000 --- a/classes/media.ts +++ /dev/null @@ -1,273 +0,0 @@ -import type { GetObjectCommandOutput } from "@aws-sdk/client-s3"; -import { - GetObjectCommand, - PutObjectCommand, - S3Client, -} from "@aws-sdk/client-s3"; -import type { ConfigType } from "@config"; -import sharp from "sharp"; -import { exists, mkdir } from "fs/promises"; -class MediaBackend { - backend: string; - - constructor(backend: string) { - this.backend = backend; - } - - /** - * Adds media to the media backend - * @param media - * @returns The hash of the file in SHA-256 (hex format) with the file extension added to it - */ - async addMedia(media: File) { - const hash = new Bun.SHA256() - .update(await media.arrayBuffer()) - .digest("hex"); - - return `${hash}.${media.name.split(".").pop()}`; - } - - async convertMedia(media: File, config: ConfigType) { - const sharpCommand = sharp(await media.arrayBuffer()); - - // Rename ".jpg" files to ".jpeg" to avoid sharp errors - let name = media.name; - if (media.name.endsWith(".jpg")) { - name = media.name.replace(".jpg", ".jpeg"); - } - - const fileFormatToConvertTo = config.media.conversion.convert_to; - - switch (fileFormatToConvertTo) { - case "png": - return new File( - [(await sharpCommand.png().toBuffer()).buffer], - // Replace the file extension with PNG - name.replace(/\.[^/.]+$/, ".png"), - { - type: "image/png", - } - ); - case "webp": - return new File( - [(await sharpCommand.webp().toBuffer()).buffer], - // Replace the file extension with WebP - name.replace(/\.[^/.]+$/, ".webp"), - { - type: "image/webp", - } - ); - case "jpeg": - return new File( - [(await sharpCommand.jpeg().toBuffer()).buffer], - // Replace the file extension with JPEG - name.replace(/\.[^/.]+$/, ".jpeg"), - { - type: "image/jpeg", - } - ); - case "avif": - return new File( - [(await sharpCommand.avif().toBuffer()).buffer], - // Replace the file extension with AVIF - name.replace(/\.[^/.]+$/, ".avif"), - { - type: "image/avif", - } - ); - // Needs special build of libvips - case "jxl": - return new File( - [(await sharpCommand.jxl().toBuffer()).buffer], - // Replace the file extension with JXL - name.replace(/\.[^/.]+$/, ".jxl"), - { - type: "image/jxl", - } - ); - case "heif": - return new File( - [(await sharpCommand.heif().toBuffer()).buffer], - // Replace the file extension with HEIF - name.replace(/\.[^/.]+$/, ".heif"), - { - type: "image/heif", - } - ); - default: - return media; - } - } - - /** - * Retrieves element from media backend by hash - * @param hash The hash of the element in SHA-256 hex format - * @param extension The extension of the file - * @returns The file as a File object - */ - // eslint-disable-next-line @typescript-eslint/require-await, @typescript-eslint/no-unused-vars - async getMediaByHash( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - hash: string - ): Promise { - return new File([], "test"); - } -} - -/** - * S3 Backend, stores files in S3 - */ -export class S3Backend extends MediaBackend { - client: S3Client; - config: ConfigType; - - constructor(config: ConfigType) { - super("s3"); - - this.config = config; - - this.client = new S3Client({ - endpoint: this.config.s3.endpoint, - region: this.config.s3.region || "auto", - credentials: { - accessKeyId: this.config.s3.access_key, - secretAccessKey: this.config.s3.secret_access_key, - }, - }); - } - - async addMedia(media: File): Promise { - if (this.config.media.conversion.convert_images) { - media = await this.convertMedia(media, this.config); - } - - const hash = await super.addMedia(media); - - if (!hash) { - throw new Error("Failed to hash file"); - } - - // Check if file is already present - const existingFile = await this.getMediaByHash(hash); - - if (existingFile) { - // File already exists, so return the hash without uploading it - return hash; - } - - const command = new PutObjectCommand({ - Bucket: this.config.s3.bucket_name, - Key: hash, - Body: Buffer.from(await media.arrayBuffer()), - ContentType: media.type, - ContentLength: media.size, - Metadata: { - "x-amz-meta-original-name": media.name, - }, - }); - - const response = await this.client.send(command); - - if (response.$metadata.httpStatusCode !== 200) { - throw new Error("Failed to upload file"); - } - - return hash; - } - - async getMediaByHash(hash: string): Promise { - const command = new GetObjectCommand({ - Bucket: this.config.s3.bucket_name, - Key: hash, - }); - - let response: GetObjectCommandOutput; - - try { - response = await this.client.send(command); - } catch { - return null; - } - - if (response.$metadata.httpStatusCode !== 200) { - throw new Error("Failed to get file"); - } - - const body = await response.Body?.transformToByteArray(); - - if (!body) { - throw new Error("Failed to get file"); - } - - return new File([body], hash, { - type: response.ContentType, - }); - } -} - -/** - * Local backend, stores files on filesystem - */ -export class LocalBackend extends MediaBackend { - config: ConfigType; - - constructor(config: ConfigType) { - super("local"); - - this.config = config; - } - - async addMedia(media: File): Promise { - if (this.config.media.conversion.convert_images) { - media = await this.convertMedia(media, this.config); - } - - const hash = await super.addMedia(media); - - if (!(await exists(`${process.cwd()}/uploads`))) { - await mkdir(`${process.cwd()}/uploads`); - } - - await Bun.write(Bun.file(`${process.cwd()}/uploads/${hash}`), media); - - return hash; - } - - async getMediaByHash(hash: string): Promise { - const file = Bun.file(`${process.cwd()}/uploads/${hash}`); - - if (!(await file.exists())) { - return null; - } - - return new File([await file.arrayBuffer()], `${hash}`, { - type: file.type, - }); - } -} - -export const uploadFile = (file: File, config: ConfigType) => { - const backend = config.media.backend; - - if (backend === "local") { - return new LocalBackend(config).addMedia(file); - } else if (backend === "s3") { - return new S3Backend(config).addMedia(file); - } -}; - -export const getFile = ( - hash: string, - extension: string, - config: ConfigType -) => { - const backend = config.media.backend; - - if (backend === "local") { - return new LocalBackend(config).getMediaByHash(hash); - } else if (backend === "s3") { - return new S3Backend(config).getMediaByHash(hash); - } - - return null; -}; diff --git a/cli.ts b/cli.ts index a42f4d77..01f0cb19 100644 --- a/cli.ts +++ b/cli.ts @@ -1,513 +1,1751 @@ -import type { Prisma } from "@prisma/client"; import chalk from "chalk"; -import { client } from "~database/datasource"; import { createNewLocalUser } from "~database/entities/User"; import Table from "cli-table"; +import { rebuildSearchIndexes, MeiliIndexType } from "@meilisearch"; +import { getUrl } from "~database/entities/Attachment"; +import extract from "extract-zip"; +import { client } from "~database/datasource"; +import { CliBuilder, CliCommand } from "cli-parser"; +import { CliParameterType } from "~packages/cli-parser/cli-builder.type"; +import { ConfigManager } from "~packages/config-manager"; +import { Parser } from "@json2csv/plainjs"; +import type { Prisma } from "@prisma/client"; +import { MediaBackend } from "media-manager"; +import { mkdtemp } from "fs/promises"; +import { join } from "path"; +import { tmpdir } from "os"; const args = process.argv; -/** - * Make the text have a width of 20 characters, padding with gray dots - * Text can be a Chalk string, in which case formatting codes should not be counted in text length - * @param text The text to align - */ -const alignDots = (text: string, length = 20) => { - // Remove formatting codes - // eslint-disable-next-line no-control-regex - const textLength = text.replace(/\u001b\[\d+m/g, "").length; - const dots = ".".repeat(length - textLength); - return `${text}${chalk.gray(dots)}`; -}; +const config = await new ConfigManager({}).getConfig(); -const alignDotsSmall = (text: string, length = 16) => alignDots(text, length); +const cliBuilder = new CliBuilder([ + new CliCommand<{ + username: string; + password: string; + email: string; + admin: boolean; + help: boolean; + }>( + ["user", "create"], + [ + { + name: "username", + type: CliParameterType.STRING, + description: "Username of the user", + needsValue: true, + positioned: false, + }, + { + name: "password", + type: CliParameterType.STRING, + description: "Password of the user", + needsValue: true, + positioned: false, + }, + { + name: "email", + type: CliParameterType.STRING, + description: "Email of the user", + needsValue: true, + positioned: false, + }, + { + name: "admin", + type: CliParameterType.BOOLEAN, + description: "Make the user an admin", + needsValue: false, + positioned: false, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + ], + async (instance: CliCommand, args) => { + const { username, password, email, admin, help } = args; -const help = ` -${chalk.bold(`Usage: bun cli ${chalk.blue("[...flags]")} [...args]`)} - -${chalk.bold("Commands:")} - ${alignDots(chalk.blue("help"), 24)} Show this help message - ${alignDots(chalk.blue("user"), 24)} Manage users - ${alignDots(chalk.blue("create"))} Create a new user - ${alignDotsSmall(chalk.green("username"))} Username of the user - ${alignDotsSmall(chalk.green("password"))} Password of the user - ${alignDotsSmall(chalk.green("email"))} Email of the user - ${alignDotsSmall( - chalk.yellow("--admin") - )} Make the user an admin (optional) - ${chalk.bold("Example:")} ${chalk.bgGray( - `bun cli user create admin password123 admin@gmail.com --admin` - )} - ${alignDots(chalk.blue("delete"))} Delete a user - ${alignDotsSmall(chalk.green("username"))} Username of the user - ${chalk.bold("Example:")} ${chalk.bgGray( - `bun cli user delete admin` - )} - ${alignDots(chalk.blue("list"))} List all users - ${alignDotsSmall( - chalk.yellow("--admins") - )} List only admins (optional) - ${chalk.bold("Example:")} ${chalk.bgGray(`bun cli user list`)} - ${alignDots(chalk.blue("search"))} Search for a user - ${alignDotsSmall(chalk.green("query"))} Query to search for - ${alignDotsSmall( - chalk.yellow("--displayname") - )} Search by display name (optional) - ${alignDotsSmall(chalk.yellow("--bio"))} Search in bio (optional) - ${alignDotsSmall( - chalk.yellow("--local") - )} Search in local users (optional) - ${alignDotsSmall( - chalk.yellow("--remote") - )} Search in remote users (optional) - ${alignDotsSmall( - chalk.yellow("--email") - )} Search in emails (optional) - ${alignDotsSmall(chalk.yellow("--json"))} Output as JSON (optional) - ${alignDotsSmall(chalk.yellow("--csv"))} Output as CSV (optional) - ${chalk.bold("Example:")} ${chalk.bgGray( - `bun cli user search admin` - )} - ${alignDots(chalk.blue("note"), 24)} Manage notes - ${alignDots(chalk.blue("delete"))} Delete a note - ${alignDotsSmall(chalk.green("id"))} ID of the note - ${chalk.bold("Example:")} ${chalk.bgGray( - `bun cli note delete 018c1838-6e0b-73c4-a157-a91ea4e25d1d` - )} - ${alignDots(chalk.blue("search"))} Search for a status - ${alignDotsSmall(chalk.green("query"))} Query to search for - ${alignDotsSmall( - chalk.yellow("--local") - )} Search in local statuses (optional) - ${alignDotsSmall( - chalk.yellow("--remote") - )} Search in remote statuses (optional) - ${alignDotsSmall(chalk.yellow("--json"))} Output as JSON (optional) - ${alignDotsSmall(chalk.yellow("--csv"))} Output as CSV (optional) - ${chalk.bold("Example:")} ${chalk.bgGray( - `bun cli note search hello` - )} - -`; - -if (args.length < 3) { - console.log(help); - process.exit(0); -} - -const command = args[2]; - -switch (command) { - case "help": - console.log(help); - break; - case "user": - switch (args[3]) { - case "create": { - // Check if --admin flag is provided - const argsWithFlags = args.filter(arg => arg.startsWith("--")); - const argsWithoutFlags = args.filter( - arg => !arg.startsWith("--") - ); - - const username = argsWithoutFlags[4]; - const password = argsWithoutFlags[5]; - const email = argsWithoutFlags[6]; - - const admin = argsWithFlags.includes("--admin"); - - // Check if username, password and email are provided - if (!username || !password || !email) { - console.log( - `${chalk.red(`✗`)} Missing username, password or email` - ); - process.exit(1); - } - - // Check if user already exists - const user = await client.user.findFirst({ - where: { - OR: [{ username }, { email }], - }, - }); - - if (user) { - console.log(`${chalk.red(`✗`)} User already exists`); - process.exit(1); - } - - // Create user - const newUser = await createNewLocalUser({ - email: email, - password: password, - username: username, - admin: admin, - }); - - console.log( - `${chalk.green(`✓`)} Created user ${chalk.blue( - newUser.username - )}${admin ? chalk.green(" (admin)") : ""}` - ); - break; + if (help) { + instance.displayHelp(); + return 0; } - case "delete": { - const username = args[4]; - if (!username) { - console.log(`${chalk.red(`✗`)} Missing username`); - process.exit(1); + // Check if username, password and email are provided + if (!username || !password || !email) { + console.log( + `${chalk.red(`✗`)} Missing username, password or email` + ); + return 1; + } + + // Check if user already exists + const user = await client.user.findFirst({ + where: { + OR: [{ username }, { email }], + }, + }); + + if (user) { + if (user.username === username) { + console.log( + `${chalk.red(`✗`)} User with username ${chalk.blue(username)} already exists` + ); + } else { + console.log( + `${chalk.red(`✗`)} User with email ${chalk.blue(email)} already exists` + ); + } + return 1; + } + + // Create user + const newUser = await createNewLocalUser({ + email: email, + password: password, + username: username, + admin: admin, + }); + + console.log( + `${chalk.green(`✓`)} Created user ${chalk.blue( + newUser.username + )}${admin ? chalk.green(" (admin)") : ""}` + ); + + return 0; + }, + "Creates a new user", + "bun cli user create --username admin --password password123 --email email@email.com" + ), + new CliCommand<{ + username: string; + help: boolean; + noconfirm: boolean; + }>( + ["user", "delete"], + [ + { + name: "username", + type: CliParameterType.STRING, + description: "Username of the user", + needsValue: true, + positioned: true, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + { + name: "noconfirm", + shortName: "y", + type: CliParameterType.EMPTY, + description: "Skip confirmation", + needsValue: false, + positioned: false, + }, + ], + async (instance: CliCommand, args) => { + const { username, help } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!username) { + console.log(`${chalk.red(`✗`)} Missing username`); + return 1; + } + + const user = await client.user.findFirst({ + where: { + username: username, + }, + }); + + if (!user) { + console.log(`${chalk.red(`✗`)} User not found`); + return 1; + } + + if (!args.noconfirm) { + process.stdout.write( + `Are you sure you want to delete user ${chalk.blue( + user.username + )}?\n${chalk.red(chalk.bold("This is a destructive action and cannot be undone!"))} [y/N] ` + ); + + for await (const line of console) { + if (line.trim().toLowerCase() === "y") { + break; + } else { + console.log(`${chalk.red(`✗`)} Deletion cancelled`); + return 0; + } + } + } + + await client.user.delete({ + where: { + id: user.id, + }, + }); + + console.log( + `${chalk.green(`✓`)} Deleted user ${chalk.blue(user.username)}` + ); + + return 0; + }, + "Deletes a user", + "bun cli user delete --username admin" + ), + new CliCommand<{ + admins: boolean; + help: boolean; + format: string; + limit: number; + redact: boolean; + fields: string[]; + }>( + ["user", "list"], + [ + { + name: "admins", + type: CliParameterType.BOOLEAN, + description: "List only admins", + needsValue: false, + positioned: false, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + { + name: "format", + type: CliParameterType.STRING, + description: "Output format (can be json or csv)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "limit", + type: CliParameterType.NUMBER, + description: + "Limit the number of users to list (defaults to 200)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "redact", + type: CliParameterType.BOOLEAN, + description: + "Redact sensitive information (such as password hashes, emails or keys)", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "fields", + type: CliParameterType.ARRAY, + description: + "If provided, restricts output to these fields (comma-separated)", + needsValue: true, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { admins, help } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (args.format && !["json", "csv"].includes(args.format)) { + console.log(`${chalk.red(`✗`)} Invalid format`); + return 1; + } + + const users = await client.user.findMany({ + where: { + isAdmin: admins || undefined, + }, + take: args.limit ?? 200, + include: { + instance: true, + }, + }); + + if (args.redact) { + for (const user of users) { + user.email = "[REDACTED]"; + user.password = "[REDACTED]"; + user.publicKey = "[REDACTED]"; + user.privateKey = "[REDACTED]"; + } + } + + if (args.fields) { + for (const user of users) { + const keys = Object.keys(user); + for (const key of keys) { + if (!args.fields.includes(key)) { + // @ts-expect-error Shouldn't cause issues in this case + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete user[key]; + } + } + } + } + + if (args.format === "json") { + console.log(JSON.stringify(users, null, 4)); + return 0; + } else if (args.format == "csv") { + const parser = new Parser({}); + console.log(parser.parse(users)); + return 0; + } + + console.log( + `${chalk.green(`✓`)} Found ${chalk.blue(users.length)} users (limit ${args.limit ?? 200})` + ); + + const tableHead = { + username: chalk.white(chalk.bold("Username")), + email: chalk.white(chalk.bold("Email")), + displayName: chalk.white(chalk.bold("Display Name")), + isAdmin: chalk.white(chalk.bold("Admin?")), + instance: chalk.white(chalk.bold("Instance URL")), + createdAt: chalk.white(chalk.bold("Created At")), + id: chalk.white(chalk.bold("Internal UUID")), + }; + + // Only keep the fields specified if --fields is provided + if (args.fields) { + const keys = Object.keys(tableHead); + for (const key of keys) { + if (!args.fields.includes(key)) { + // @ts-expect-error This is fine + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete tableHead[key]; + } + } + } + + const table = new Table({ + head: Object.values(tableHead), + }); + + for (const user of users) { + // Print table of users + const data = { + username: () => chalk.yellow(`@${user.username}`), + email: () => chalk.green(user.email), + displayName: () => chalk.blue(user.displayName), + isAdmin: () => chalk.red(user.isAdmin ? "Yes" : "No"), + instance: () => + chalk.blue( + user.instance ? user.instance.base_url : "Local" + ), + createdAt: () => chalk.blue(user.createdAt.toISOString()), + id: () => chalk.blue(user.id), + }; + + // Only keep the fields specified if --fields is provided + if (args.fields) { + const keys = Object.keys(data); + for (const key of keys) { + if (!args.fields.includes(key)) { + // @ts-expect-error This is fine + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete data[key]; + } + } } - const user = await client.user.findFirst({ - where: { - username: username, + table.push(Object.values(data).map(fn => fn())); + } + + console.log(table.toString()); + + return 0; + }, + "Lists all users", + "bun cli user list" + ), + new CliCommand<{ + query: string; + fields: string[]; + format: string; + help: boolean; + "case-sensitive": boolean; + limit: number; + redact: boolean; + }>( + ["user", "search"], + [ + { + name: "query", + type: CliParameterType.STRING, + description: "Query to search for", + needsValue: true, + positioned: true, + }, + { + name: "fields", + type: CliParameterType.ARRAY, + description: "Fields to search in", + needsValue: true, + positioned: false, + }, + { + name: "format", + type: CliParameterType.STRING, + description: "Output format (can be json or csv)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + { + name: "case-sensitive", + shortName: "c", + type: CliParameterType.EMPTY, + description: "Case-sensitive search", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "limit", + type: CliParameterType.NUMBER, + description: "Limit the number of users to list (default 20)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "redact", + type: CliParameterType.BOOLEAN, + description: + "Redact sensitive information (such as password hashes, emails or keys)", + needsValue: false, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { + query, + fields = [], + help, + limit = 20, + "case-sensitive": caseSensitive = false, + redact, + } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!query) { + console.log(`${chalk.red(`✗`)} Missing query parameter`); + return 1; + } + + if (fields.length === 0) { + console.log(`${chalk.red(`✗`)} Missing fields parameter`); + return 1; + } + + const queries: Prisma.UserWhereInput[] = []; + + for (const field of fields) { + queries.push({ + [field]: { + contains: query, + mode: caseSensitive ? "default" : "insensitive", }, }); + } - if (!user) { - console.log(`${chalk.red(`✗`)} User not found`); - process.exit(1); + const users = await client.user.findMany({ + where: { + OR: queries, + }, + include: { + instance: true, + }, + take: limit, + }); + + if (redact) { + for (const user of users) { + user.email = "[REDACTED]"; + user.password = "[REDACTED]"; + user.publicKey = "[REDACTED]"; + user.privateKey = "[REDACTED]"; } + } - await client.user.delete({ - where: { - id: user.id, + if (args.format === "json") { + console.log(JSON.stringify(users, null, 4)); + return 0; + } else if (args.format === "csv") { + const parser = new Parser({}); + console.log(parser.parse(users)); + return 0; + } + + console.log( + `${chalk.green(`✓`)} Found ${chalk.blue(users.length)} users (limit ${limit})` + ); + + const table = new Table({ + head: [ + chalk.white(chalk.bold("Username")), + chalk.white(chalk.bold("Email")), + chalk.white(chalk.bold("Display Name")), + chalk.white(chalk.bold("Admin?")), + chalk.white(chalk.bold("Instance URL")), + ], + }); + + for (const user of users) { + table.push([ + chalk.yellow(`@${user.username}`), + chalk.green(user.email), + chalk.blue(user.displayName), + chalk.red(user.isAdmin ? "Yes" : "No"), + chalk.blue( + user.instanceId ? user.instance?.base_url : "Local" + ), + ]); + } + + console.log(table.toString()); + + return 0; + }, + "Searches for a user", + "bun cli user search bob --fields email,username" + ), + + new CliCommand<{ + username: string; + "issuer-id": string; + "server-id": string; + help: boolean; + }>( + ["user", "oidc", "connect"], + [ + { + name: "username", + type: CliParameterType.STRING, + description: "Username of the local account", + needsValue: true, + positioned: true, + }, + { + name: "issuer-id", + type: CliParameterType.STRING, + description: "ID of the OpenID Connect issuer in config", + needsValue: true, + positioned: false, + }, + { + name: "server-id", + type: CliParameterType.STRING, + description: "ID of the user on the OpenID Connect server", + needsValue: true, + positioned: false, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + ], + async (instance: CliCommand, args) => { + const { + username, + "issuer-id": issuerId, + "server-id": serverId, + help, + } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!username || !issuerId || !serverId) { + console.log(`${chalk.red(`✗`)} Missing username, issuer or ID`); + return 1; + } + + // Check if issuerId is valid + if (!config.oidc.providers.find(p => p.id === issuerId)) { + console.log(`${chalk.red(`✗`)} Invalid issuer ID`); + return 1; + } + + const user = await client.user.findFirst({ + where: { + username: username, + }, + include: { + linkedOpenIdAccounts: true, + }, + }); + + if (!user) { + console.log(`${chalk.red(`✗`)} User not found`); + return 1; + } + + if (user.linkedOpenIdAccounts.find(a => a.issuerId === issuerId)) { + console.log( + `${chalk.red(`✗`)} User ${chalk.blue( + user.username + )} is already connected to this OpenID Connect issuer with another account` + ); + return 1; + } + + // Connect the OpenID account + await client.user.update({ + where: { + id: user.id, + }, + data: { + linkedOpenIdAccounts: { + create: { + issuerId: issuerId, + serverId: serverId, + }, + }, + }, + }); + + console.log( + `${chalk.green(`✓`)} Connected OpenID Connect account to user ${chalk.blue( + user.username + )}` + ); + + return 0; + }, + "Connects an OpenID Connect account to a local account", + "bun cli user oidc connect admin google 123456789" + ), + new CliCommand<{ + "server-id": string; + help: boolean; + }>( + ["user", "oidc", "disconnect"], + [ + { + name: "server-id", + type: CliParameterType.STRING, + description: "Server ID of the OpenID Connect account", + needsValue: true, + positioned: true, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + ], + async (instance: CliCommand, args) => { + const { "server-id": id, help } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!id) { + console.log(`${chalk.red(`✗`)} Missing ID`); + return 1; + } + + const account = await client.openIdAccount.findFirst({ + where: { + serverId: id, + }, + include: { + User: true, + }, + }); + + if (!account) { + console.log(`${chalk.red(`✗`)} Account not found`); + return 1; + } + + await client.openIdAccount.delete({ + where: { + id: account.id, + }, + }); + + console.log( + `${chalk.green(`✓`)} Disconnected OpenID account from user ${chalk.blue(account.User?.username)}` + ); + + return 0; + }, + "Disconnects an OpenID Connect account from a local account", + "bun cli user oidc disconnect 123456789" + ), + new CliCommand<{ + id: string; + help: boolean; + noconfirm: boolean; + }>( + ["note", "delete"], + [ + { + name: "id", + type: CliParameterType.STRING, + description: "ID of the note", + needsValue: true, + positioned: true, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + { + name: "noconfirm", + shortName: "y", + type: CliParameterType.EMPTY, + description: "Skip confirmation", + needsValue: false, + positioned: false, + }, + ], + async (instance: CliCommand, args) => { + const { id, help } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!id) { + console.log(`${chalk.red(`✗`)} Missing ID`); + return 1; + } + + const note = await client.status.findFirst({ + where: { + id: id, + }, + }); + + if (!note) { + console.log(`${chalk.red(`✗`)} Note not found`); + return 1; + } + + if (!args.noconfirm) { + process.stdout.write( + `Are you sure you want to delete note ${chalk.blue( + note.id + )}?\n${chalk.red(chalk.bold("This is a destructive action and cannot be undone!"))} [y/N] ` + ); + + for await (const line of console) { + if (line.trim().toLowerCase() === "y") { + break; + } else { + console.log(`${chalk.red(`✗`)} Deletion cancelled`); + return 0; + } + } + } + + await client.status.delete({ + where: { + id: note.id, + }, + }); + + console.log( + `${chalk.green(`✓`)} Deleted note ${chalk.blue(note.id)}` + ); + + return 0; + }, + "Deletes a note", + "bun cli note delete 018c1838-6e0b-73c4-a157-a91ea4e25d1d" + ), + new CliCommand<{ + query: string; + fields: string[]; + local: boolean; + remote: boolean; + format: string; + help: boolean; + "case-sensitive": boolean; + limit: number; + redact: boolean; + }>( + ["note", "search"], + [ + { + name: "query", + type: CliParameterType.STRING, + description: "Query to search for", + needsValue: true, + positioned: true, + }, + { + name: "fields", + type: CliParameterType.ARRAY, + description: "Fields to search in", + needsValue: true, + positioned: false, + }, + { + name: "local", + type: CliParameterType.BOOLEAN, + description: "Only search in local statuses", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "remote", + type: CliParameterType.BOOLEAN, + description: "Only search in remote statuses", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "format", + type: CliParameterType.STRING, + description: "Output format (can be json or csv)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + { + name: "case-sensitive", + shortName: "c", + type: CliParameterType.EMPTY, + description: "Case-sensitive search", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "limit", + type: CliParameterType.NUMBER, + description: "Limit the number of notes to list (default 20)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "redact", + type: CliParameterType.BOOLEAN, + description: + "Redact sensitive information (such as password hashes, emails or keys)", + needsValue: false, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { + query, + local, + remote, + format, + help, + limit = 20, + fields = [], + "case-sensitive": caseSensitive = false, + redact, + } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!query) { + console.log(`${chalk.red(`✗`)} Missing query parameter`); + return 1; + } + + if (fields.length === 0) { + console.log(`${chalk.red(`✗`)} Missing fields parameter`); + return 1; + } + + const queries: Prisma.StatusWhereInput[] = []; + + for (const field of fields) { + queries.push({ + [field]: { + contains: query, + mode: caseSensitive ? "default" : "insensitive", }, }); + } + + let instanceIdQuery; + + if (local && remote) { + instanceIdQuery = undefined; + } else if (local) { + instanceIdQuery = null; + } else if (remote) { + instanceIdQuery = { + not: null, + }; + } else { + instanceIdQuery = undefined; + } + + const notes = await client.status.findMany({ + where: { + OR: queries, + instanceId: instanceIdQuery, + }, + include: { + author: true, + instance: true, + }, + take: limit, + }); + + if (redact) { + for (const note of notes) { + note.author.email = "[REDACTED]"; + note.author.password = "[REDACTED]"; + note.author.publicKey = "[REDACTED]"; + note.author.privateKey = "[REDACTED]"; + } + } + + if (format === "json") { + console.log(JSON.stringify(notes, null, 4)); + return 0; + } else if (format === "csv") { + const parser = new Parser({}); + console.log(parser.parse(notes)); + return 0; + } + + console.log( + `${chalk.green(`✓`)} Found ${chalk.blue(notes.length)} notes (limit ${limit})` + ); + + const table = new Table({ + head: [ + chalk.white(chalk.bold("ID")), + chalk.white(chalk.bold("Content")), + chalk.white(chalk.bold("Author")), + chalk.white(chalk.bold("Instance")), + chalk.white(chalk.bold("Created At")), + ], + }); + + for (const note of notes) { + table.push([ + chalk.yellow(note.id), + chalk.green(note.content), + chalk.blue(note.author.username), + chalk.red( + note.instanceId ? note.instance?.base_url : "Yes" + ), + chalk.blue(note.createdAt.toISOString()), + ]); + } + + console.log(table.toString()); + + return 0; + }, + "Searches for a status", + "bun cli note search hello --fields content --local" + ), + new CliCommand<{ + help: boolean; + type: string[]; + }>( + ["index", "rebuild"], + [ + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + }, + { + name: "type", + type: CliParameterType.ARRAY, + description: + "Type(s) of index(es) to rebuild (can be accounts or statuses)", + needsValue: true, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { help, type = [] } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + // Check if Meilisearch is enabled + if (!config.meilisearch.enabled) { + console.log(`${chalk.red(`✗`)} Meilisearch is not enabled`); + return 1; + } + + // Check type validity + for (const _type of type) { + if ( + !Object.values(MeiliIndexType).includes( + _type as MeiliIndexType + ) + ) { + console.log( + `${chalk.red(`✗`)} Invalid index type ${chalk.blue(_type)}` + ); + return 1; + } + } + + if (type.length === 0) { + // Rebuild all indexes + await rebuildSearchIndexes(Object.values(MeiliIndexType)); + } else { + await rebuildSearchIndexes(type as MeiliIndexType[]); + } + + console.log(`${chalk.green(`✓`)} Rebuilt search indexes`); + + return 0; + }, + "Rebuilds the Meilisearch indexes", + "bun cli index rebuild" + ), + new CliCommand<{ + help: boolean; + shortcode: string; + url: string; + "keep-url": boolean; + }>( + ["emoji", "add"], + [ + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "shortcode", + type: CliParameterType.STRING, + description: "Shortcode of the new emoji", + needsValue: true, + positioned: true, + }, + { + name: "url", + type: CliParameterType.STRING, + description: "URL of the new emoji", + needsValue: true, + positioned: true, + }, + { + name: "keep-url", + type: CliParameterType.BOOLEAN, + description: + "Keep the URL of the emoji instead of uploading the file to object storage", + needsValue: false, + positioned: false, + }, + ], + async (instance: CliCommand, args) => { + const { help, shortcode, url } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!shortcode) { + console.log(`${chalk.red(`✗`)} Missing shortcode`); + return 1; + } + if (!url) { + console.log(`${chalk.red(`✗`)} Missing URL`); + return 1; + } + + // Check if shortcode is valid + if (!shortcode.match(/^[a-zA-Z0-9-_]+$/)) { + console.log( + `${chalk.red(`✗`)} Invalid shortcode (must be alphanumeric with dashes and underscores allowed)` + ); + return 1; + } + + // Check if URL is valid + if (!URL.canParse(url)) { + console.log( + `${chalk.red(`✗`)} Invalid URL (must be a valid full URL, including protocol)` + ); + return 1; + } + + // Check if emoji already exists + const existingEmoji = await client.emoji.findFirst({ + where: { + shortcode: shortcode, + instanceId: null, + }, + }); + + if (existingEmoji) { + console.log( + `${chalk.red(`✗`)} Emoji with shortcode ${chalk.blue( + shortcode + )} already exists` + ); + return 1; + } + + let newUrl = url; + + if (!args["keep-url"]) { + // Upload the emoji to object storage + const mediaBackend = await MediaBackend.fromBackendType( + config.media.backend, + config + ); console.log( - `${chalk.green(`✓`)} Deleted user ${chalk.blue( - user.username + `${chalk.blue(`⏳`)} Downloading emoji from ${chalk.underline(chalk.blue(url))}` + ); + + const downloadedFile = await fetch(url).then( + async r => + new File( + [await r.blob()], + url.split("/").pop() ?? + `${crypto.randomUUID()}-emoji.png` + ) + ); + + const metadata = await mediaBackend + .addFile(downloadedFile) + .catch(() => null); + + if (!metadata) { + console.log( + `${chalk.red(`✗`)} Failed to upload emoji to object storage (is your URL accessible?)` + ); + return 1; + } + + newUrl = getUrl(metadata.uploadedFile.name, config); + + console.log( + `${chalk.green(`✓`)} Uploaded emoji to object storage` + ); + } + + // Add the emoji + const content_type = `image/${url + .split(".") + .pop() + ?.replace("jpg", "jpeg")}}`; + + const emoji = await client.emoji.create({ + data: { + shortcode: shortcode, + url: newUrl, + visible_in_picker: true, + content_type: content_type, + instanceId: null, + }, + }); + + console.log( + `${chalk.green(`✓`)} Created emoji ${chalk.blue( + emoji.shortcode + )}` + ); + + return 0; + }, + "Adds a custom emoji", + "bun cli emoji add bun https://bun.com/bun.png" + ), + new CliCommand<{ + help: boolean; + shortcode: string; + noconfirm: boolean; + }>( + ["emoji", "delete"], + [ + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "shortcode", + type: CliParameterType.STRING, + description: + "Shortcode of the emoji to delete (can add up to two wildcards *)", + needsValue: true, + positioned: true, + }, + { + name: "noconfirm", + type: CliParameterType.BOOLEAN, + description: "Skip confirmation", + needsValue: false, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { help, shortcode, noconfirm } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!shortcode) { + console.log(`${chalk.red(`✗`)} Missing shortcode`); + return 1; + } + + // Check if shortcode is valid + if (!shortcode.match(/^[a-zA-Z0-9-_*]+$/)) { + console.log( + `${chalk.red(`✗`)} Invalid shortcode (must be alphanumeric with dashes and underscores allowed + optional wildcards)` + ); + return 1; + } + + // Validate up to one wildcard + if (shortcode.split("*").length > 3) { + console.log( + `${chalk.red(`✗`)} Invalid shortcode (can only have up to two wildcards)` + ); + return 1; + } + + const hasWildcard = shortcode.includes("*"); + const hasTwoWildcards = shortcode.split("*").length === 3; + + const emojis = await client.emoji.findMany({ + where: { + shortcode: { + startsWith: hasWildcard + ? shortcode.split("*")[0] + : undefined, + endsWith: hasWildcard + ? shortcode.split("*").at(-1) + : undefined, + contains: hasTwoWildcards + ? shortcode.split("*")[1] + : undefined, + equals: hasWildcard ? undefined : shortcode, + }, + instanceId: null, + }, + }); + + if (emojis.length === 0) { + console.log( + `${chalk.red(`✗`)} No emoji with shortcode ${chalk.blue( + shortcode + )} found` + ); + return 1; + } + + // List emojis and ask for confirmation + for (const emoji of emojis) { + console.log( + `${chalk.blue(emoji.shortcode)}: ${chalk.underline( + emoji.url )}` ); - - break; } - case "list": { - const admins = args.includes("--admins"); - const users = await client.user.findMany({ - where: { - isAdmin: admins || undefined, + if (!noconfirm) { + process.stdout.write( + `Are you sure you want to delete these emojis?\n${chalk.red(chalk.bold("This is a destructive action and cannot be undone!"))} [y/N] ` + ); + + for await (const line of console) { + if (line.trim().toLowerCase() === "y") { + break; + } else { + console.log(`${chalk.red(`✗`)} Deletion cancelled`); + return 0; + } + } + } + + await client.emoji.deleteMany({ + where: { + id: { + in: emojis.map(e => e.id), }, - take: 200, - }); + }, + }); + console.log( + `${chalk.green(`✓`)} Deleted emojis matching shortcode ${chalk.blue( + shortcode + )}` + ); + + return 0; + }, + "Deletes custom emojis", + "bun cli emoji delete bun" + ), + new CliCommand<{ + help: boolean; + format: string; + limit: number; + }>( + ["emoji", "list"], + [ + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "format", + type: CliParameterType.STRING, + description: "Output format (can be json or csv)", + needsValue: true, + positioned: false, + optional: true, + }, + { + name: "limit", + type: CliParameterType.NUMBER, + description: "Limit the number of emojis to list (default 20)", + needsValue: true, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { help, format, limit = 20 } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + const emojis = await client.emoji.findMany({ + where: { + instanceId: null, + }, + take: limit, + }); + + if (format === "json") { + console.log(JSON.stringify(emojis, null, 4)); + return 0; + } else if (format === "csv") { + const parser = new Parser({}); + console.log(parser.parse(emojis)); + return 0; + } + + console.log( + `${chalk.green(`✓`)} Found ${chalk.blue(emojis.length)} emojis (limit ${limit})` + ); + + const table = new Table({ + head: [ + chalk.white(chalk.bold("Shortcode")), + chalk.white(chalk.bold("URL")), + ], + }); + + for (const emoji of emojis) { + table.push([ + chalk.blue(emoji.shortcode), + chalk.underline(emoji.url), + ]); + } + + console.log(table.toString()); + + return 0; + }, + "Lists all custom emojis", + "bun cli emoji list" + ), + new CliCommand<{ + help: boolean; + url: string; + noconfirm: boolean; + }>( + ["emoji", "import"], + [ + { + name: "help", + shortName: "h", + type: CliParameterType.EMPTY, + description: "Show help message", + needsValue: false, + positioned: false, + optional: true, + }, + { + name: "url", + type: CliParameterType.STRING, + description: "URL of the emoji pack manifest", + needsValue: true, + positioned: true, + }, + { + name: "noconfirm", + type: CliParameterType.BOOLEAN, + description: "Skip confirmation", + needsValue: false, + positioned: false, + optional: true, + }, + ], + async (instance: CliCommand, args) => { + const { help, url, noconfirm } = args; + + if (help) { + instance.displayHelp(); + return 0; + } + + if (!url) { + console.log(`${chalk.red(`✗`)} Missing URL`); + return 1; + } + + // Check if URL is valid + if (!URL.canParse(url)) { console.log( - `${chalk.green(`✓`)} Found ${chalk.blue( - users.length - )} users` + `${chalk.red(`✗`)} Invalid URL (must be a valid full URL, including protocol)` ); - - for (const user of users) { - console.log( - `\t${chalk.blue(user.username)} ${chalk.gray( - user.email - )} ${chalk.green(user.isAdmin ? "Admin" : "User")}` - ); - } - break; + return 1; } - case "search": { - const argsWithoutFlags = args.filter( - arg => !arg.startsWith("--") + + // Fetch the emoji pack manifest + const manifest = await fetch(url) + .then( + r => + r.json() as Promise< + Record< + string, + { + files: string; + homepage: string; + src: string; + src_sha256?: string; + } + > + > + ) + .catch(() => null); + + if (!manifest) { + console.log( + `${chalk.red(`✗`)} Failed to fetch emoji pack manifest from ${chalk.underline( + url + )}` ); - const query = argsWithoutFlags[4]; + return 1; + } - if (!query) { - console.log(`${chalk.red(`✗`)} Missing query`); - process.exit(1); - } + const homepage = Object.values(manifest)[0].homepage; + // If URL is not a valid URL, assume it's a relative path to homepage + const srcUrl = URL.canParse(Object.values(manifest)[0].src) + ? Object.values(manifest)[0].src + : new URL(Object.values(manifest)[0].src, homepage).toString(); + const filesUrl = URL.canParse(Object.values(manifest)[0].files) + ? Object.values(manifest)[0].files + : new URL( + Object.values(manifest)[0].files, + homepage + ).toString(); - const displayname = args.includes("--displayname"); - const bio = args.includes("--bio"); - const local = args.includes("--local"); - const remote = args.includes("--remote"); - const email = args.includes("--email"); - const json = args.includes("--json"); - const csv = args.includes("--csv"); + console.log( + `${chalk.blue(`⏳`)} Fetching emoji pack from ${chalk.underline( + srcUrl + )}` + ); - const queries: Prisma.UserWhereInput[] = []; + // Fetch actual pack (should be a zip file) + const pack = await fetch(srcUrl) + .then( + async r => + new File( + [await r.blob()], + srcUrl.split("/").pop() ?? "pack.zip" + ) + ) + .catch(() => null); - if (displayname) { - queries.push({ - displayName: { - contains: query, - mode: "insensitive", - }, - }); - } + // Check if pack is valid + if (!pack) { + console.log( + `${chalk.red(`✗`)} Failed to fetch emoji pack from ${chalk.underline( + srcUrl + )}` + ); + return 1; + } - if (bio) { - queries.push({ - note: { - contains: query, - mode: "insensitive", - }, - }); - } - - if (local) { - queries.push({ - instanceId: null, - }); - } - - if (remote) { - queries.push({ - instanceId: { - not: null, - }, - }); - } - - if (email) { - queries.push({ - email: { - contains: query, - mode: "insensitive", - }, - }); - } - - const users = await client.user.findMany({ - where: { - AND: queries, - }, - include: { - instance: true, - }, - take: 40, - }); - - if (json || csv) { - if (json) { - console.log(JSON.stringify(users, null, 4)); - } - if (csv) { - // Convert the outputted JSON to CSV - - // Remove all object children from each object - const items = users.map(user => { - const item = { - ...user, - instance: undefined, - endpoints: undefined, - source: undefined, - }; - return item; - }); - const replacer = (key: string, value: any): any => - value === null ? "" : value; // Null values are returned as empty strings - const header = Object.keys(items[0]); - const csv = [ - header.join(","), // header row first - ...items.map(row => - header - .map(fieldName => - // @ts-expect-error This is fine - JSON.stringify(row[fieldName], replacer) - ) - .join(",") - ), - ].join("\r\n"); - - console.log(csv); - } + // Validate sha256 if available + if (Object.values(manifest)[0].src_sha256) { + const sha256 = new Bun.SHA256() + .update(await pack.arrayBuffer()) + .digest("hex"); + if (sha256 !== Object.values(manifest)[0].src_sha256) { + console.log( + `${chalk.red(`✗`)} SHA256 of pack (${chalk.blue( + sha256 + )}) does not match manifest ${chalk.blue( + Object.values(manifest)[0].src_sha256 + )}` + ); + return 1; } else { console.log( - `${chalk.green(`✓`)} Found ${chalk.blue( - users.length - )} users` + `${chalk.green(`✓`)} SHA256 of pack matches manifest` ); - - const table = new Table({ - head: [ - chalk.white(chalk.bold("Username")), - chalk.white(chalk.bold("Email")), - chalk.white(chalk.bold("Display Name")), - chalk.white(chalk.bold("Admin?")), - chalk.white(chalk.bold("Instance URL")), - ], - }); - - for (const user of users) { - table.push([ - chalk.yellow(`@${user.username}`), - chalk.green(user.email), - chalk.blue(user.displayName), - chalk.red(user.isAdmin ? "Yes" : "No"), - chalk.blue( - user.instanceId - ? user.instance?.base_url - : "Local" - ), - ]); - } - - console.log(table.toString()); } - - break; - } - default: - console.log(`Unknown command ${chalk.blue(command)}`); - break; - } - break; - case "note": { - switch (args[3]) { - case "delete": { - const id = args[4]; - - if (!id) { - console.log(`${chalk.red(`✗`)} Missing ID`); - process.exit(1); - } - - const note = await client.status.findFirst({ - where: { - id: id, - }, - }); - - if (!note) { - console.log(`${chalk.red(`✗`)} Note not found`); - process.exit(1); - } - - await client.status.delete({ - where: { - id: note.id, - }, - }); - + } else { console.log( - `${chalk.green(`✓`)} Deleted note ${chalk.blue(note.id)}` + `${chalk.yellow(`⚠`)} No SHA256 in manifest, skipping validation` ); - - break; } - case "search": { - const argsWithoutFlags = args.filter( - arg => !arg.startsWith("--") + + console.log( + `${chalk.green(`✓`)} Fetched emoji pack from ${chalk.underline(srcUrl)}, unzipping to tempdir` + ); + + // Unzip the pack to temp dir + const tempDir = await mkdtemp(join(tmpdir(), "bun-emoji-import-")); + + console.log(join(tempDir, pack.name)); + + // Put the pack as a file + await Bun.write(join(tempDir, pack.name), pack); + + await extract(join(tempDir, pack.name), { + dir: tempDir, + }); + + console.log( + `${chalk.green(`✓`)} Unzipped emoji pack to ${chalk.blue(tempDir)}` + ); + + console.log( + `${chalk.blue(`⏳`)} Fetching emoji pack file metadata from ${chalk.underline( + filesUrl + )}` + ); + + // Fetch files URL + const packFiles = await fetch(filesUrl) + .then(r => r.json() as Promise>) + .catch(() => null); + + if (!packFiles) { + console.log( + `${chalk.red(`✗`)} Failed to fetch emoji pack file metadata from ${chalk.underline( + filesUrl + )}` ); - const query = argsWithoutFlags[4]; + return 1; + } - if (!query) { - console.log(`${chalk.red(`✗`)} Missing query`); - process.exit(1); + console.log( + `${chalk.green(`✓`)} Fetched emoji pack file metadata from ${chalk.underline( + filesUrl + )}` + ); + + if (Object.keys(packFiles).length === 0) { + console.log(`${chalk.red(`✗`)} Empty emoji pack`); + return 1; + } + + if (!noconfirm) { + process.stdout.write( + `Are you sure you want to import ${chalk.blue( + Object.keys(packFiles).length + )} emojis from ${chalk.underline(chalk.blue(url))}? [y/N] ` + ); + + for await (const line of console) { + if (line.trim().toLowerCase() === "y") { + break; + } else { + console.log(`${chalk.red(`✗`)} Import cancelled`); + return 0; + } } + } - const local = args.includes("--local"); - const remote = args.includes("--remote"); - const json = args.includes("--json"); - const csv = args.includes("--csv"); + const successfullyImported: string[] = []; - const queries: Prisma.StatusWhereInput[] = []; + // Add emojis + for (const [shortcode, url] of Object.entries(packFiles)) { + // If emoji URL is not a valid URL, assume it's a relative path to homepage + const fileUrl = Bun.pathToFileURL( + join(tempDir, url) + ).toString(); - if (local) { - queries.push({ - instanceId: null, - }); - } - - if (remote) { - queries.push({ - instanceId: { - not: null, - }, - }); - } - - const statuses = await client.status.findMany({ + // Check if emoji already exists + const existingEmoji = await client.emoji.findFirst({ where: { - AND: queries, - content: { - contains: query, - mode: "insensitive", - }, - }, - take: 40, - include: { - author: true, - instance: true, + shortcode: shortcode, + instanceId: null, }, }); - if (json || csv) { - if (json) { - console.log(JSON.stringify(statuses, null, 4)); - } - if (csv) { - // Convert the outputted JSON to CSV - - // Remove all object children from each object - const items = statuses.map(status => { - const item = { - ...status, - author: undefined, - instance: undefined, - }; - return item; - }); - const replacer = (key: string, value: any): any => - value === null ? "" : value; // Null values are returned as empty strings - const header = Object.keys(items[0]); - const csv = [ - header.join(","), // header row first - ...items.map(row => - header - .map(fieldName => - // @ts-expect-error This is fine - JSON.stringify(row[fieldName], replacer) - ) - .join(",") - ), - ].join("\r\n"); - - console.log(csv); - } - } else { + if (existingEmoji) { console.log( - `${chalk.green(`✓`)} Found ${chalk.blue( - statuses.length - )} statuses` + `${chalk.red(`✗`)} Emoji with shortcode ${chalk.blue( + shortcode + )} already exists` ); - - const table = new Table({ - head: [ - chalk.white(chalk.bold("Username")), - chalk.white(chalk.bold("Instance URL")), - chalk.white(chalk.bold("Content")), - ], - }); - - for (const status of statuses) { - table.push([ - chalk.yellow(`@${status.author.username}`), - chalk.blue( - status.instanceId - ? status.instance?.base_url - : "Local" - ), - chalk.green(status.content.slice(0, 50)), - ]); - } - - console.log(table.toString()); + continue; } - break; - } - default: - console.log(`Unknown command ${chalk.blue(command)}`); - break; - } + // Add the emoji by calling the add command + const returnCode = await cliBuilder.processArgs([ + "emoji", + "add", + shortcode, + fileUrl, + "--noconfirm", + ]); - break; - } - default: - console.log(`Unknown command ${chalk.blue(command)}`); - break; -} + if (returnCode === 0) successfullyImported.push(shortcode); + } + + console.log( + `${chalk.green(`✓`)} Imported ${successfullyImported.length} emojis from ${chalk.underline( + url + )}` + ); + + // List imported + if (successfullyImported.length > 0) { + console.log( + `${chalk.green(`✓`)} Successfully imported ${successfullyImported.length} emojis: ${successfullyImported.join( + ", " + )}` + ); + } + + // List unimported + if (successfullyImported.length < Object.keys(packFiles).length) { + const unimported = Object.keys(packFiles).filter( + key => !successfullyImported.includes(key) + ); + console.log( + `${chalk.red(`✗`)} Failed to import ${unimported.length} emojis: ${unimported.join( + ", " + )}` + ); + } + + return 0; + }, + "Imports a Pleroma emoji pack", + "bun cli emoji import https://site.com/neofox/manifest.json" + ), +]); + +// eslint-disable-next-line @typescript-eslint/no-confusing-void-expression +const exitCode = await cliBuilder.processArgs(args); + +// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition +process.exit(Number(exitCode ?? 0)); diff --git a/config/config.example.toml b/config/config.example.toml index 603eb05a..de256901 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -18,6 +18,36 @@ password = "" database = 1 enabled = false +[meilisearch] +host = "localhost" +port = 40007 +api_key = "" +enabled = true + +[signups] +# URL of your Terms of Service +tos_url = "https://example.com/tos" +# Whether to enable registrations or not +registration = true +rules = [ + "Do not harass others", + "Be nice to people", + "Don't spam", + "Don't post illegal content", +] + +# Delete this section if you don't want to use custom OAuth providers +# This is an example configuration +# The provider MUST support OpenID Connect with .well-known discovery +# Most notably, GitHub does not support this +[[oidc.providers]] +name = "CPlusPatch ID" +id = "cpluspatch-id" +url = "https://id.cpluspatch.com/application/o/lysand-testing/" +client_id = "XXXXXXXXXXXXXXXX" +client_secret = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" +icon = "https://cpluspatch.com/images/icons/logo.svg" + [http] base_url = "https://lysand.social" bind = "http://localhost" @@ -41,6 +71,8 @@ tls = true backend = "s3" # Whether to check the hash of media when uploading to avoid duplication deduplicate_media = true +# If media backend is "local", this is the folder where the files will be stored +local_uploads_folder = "uploads" [media.conversion] convert_images = false @@ -240,6 +272,8 @@ emoji_filters = [] # NOT IMPLEMENTED log_requests = true # Log request and their contents (warning: this is a lot of data) log_requests_verbose = false +# For GDPR compliance, you can disable logging of IPs +log_ip = false # Log all filtered objects log_filters = true diff --git a/database/datasource.ts b/database/datasource.ts index b2a1e23a..334d2d58 100644 --- a/database/datasource.ts +++ b/database/datasource.ts @@ -1,8 +1,8 @@ import { Queue } from "bullmq"; -import { getConfig } from "../utils/config"; import { PrismaClient } from "@prisma/client"; +import { ConfigManager } from "config-manager"; -const config = getConfig(); +const config = await new ConfigManager({}).getConfig(); const client = new PrismaClient({ datasourceUrl: `postgresql://${config.database.username}:${config.database.password}@${config.database.host}:${config.database.port}/${config.database.database}`, diff --git a/database/entities/Attachment.ts b/database/entities/Attachment.ts index e85bdeac..abb07c9e 100644 --- a/database/entities/Attachment.ts +++ b/database/entities/Attachment.ts @@ -1,5 +1,6 @@ -import type { ConfigType } from "@config"; import type { Attachment } from "@prisma/client"; +import type { ConfigType } from "config-manager"; +import { MediaBackendType } from "media-manager"; import type { APIAsyncAttachment } from "~types/entities/async_attachment"; import type { APIAttachment } from "~types/entities/attachment"; @@ -56,11 +57,13 @@ export const attachmentToAPI = ( }; }; -export const getUrl = (hash: string, config: ConfigType) => { - if (config.media.backend === "local") { - return `${config.http.base_url}/media/${hash}`; - } else if (config.media.backend === "s3") { - return `${config.s3.public_url}/${hash}`; +export const getUrl = (name: string, config: ConfigType) => { + // eslint-disable-next-line @typescript-eslint/no-unsafe-enum-comparison + if (config.media.backend === MediaBackendType.LOCAL) { + return `${config.http.base_url}/media/${name}`; + // eslint-disable-next-line @typescript-eslint/no-unsafe-enum-comparison + } else if (config.media.backend === MediaBackendType.S3) { + return `${config.s3.public_url}/${name}`; } return ""; }; diff --git a/database/entities/Emoji.ts b/database/entities/Emoji.ts index 55511503..5a610fc4 100644 --- a/database/entities/Emoji.ts +++ b/database/entities/Emoji.ts @@ -76,3 +76,22 @@ export const emojiToLysand = (emoji: Emoji): LysandEmoji => { alt: emoji.alt || undefined, }; }; + +/** + * Converts the emoji to an ActivityPub object. + * @returns The ActivityPub object. + */ +export const emojiToActivityPub = (emoji: Emoji): any => { + // replace any with your ActivityPub Emoji type + return { + type: "Emoji", + name: `:${emoji.shortcode}:`, + updated: new Date().toISOString(), + icon: { + type: "Image", + url: emoji.url, + mediaType: emoji.content_type, + alt: emoji.alt || undefined, + }, + }; +}; diff --git a/database/entities/Like.ts b/database/entities/Like.ts index f4dbf482..037fd547 100644 --- a/database/entities/Like.ts +++ b/database/entities/Like.ts @@ -1,10 +1,12 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ import type { Like as LysandLike } from "~types/lysand/Object"; -import { getConfig } from "@config"; import type { Like } from "@prisma/client"; import { client } from "~database/datasource"; import type { UserWithRelations } from "./User"; import type { StatusWithRelations } from "./Status"; +import { ConfigManager } from "config-manager"; + +const config = await new ConfigManager({}).getConfig(); /** * Represents a Like entity in the database. @@ -16,7 +18,7 @@ export const toLysand = (like: Like): LysandLike => { type: "Like", created_at: new Date(like.createdAt).toISOString(), object: (like as any).liked?.uri, - uri: `${getConfig().http.base_url}/actions/${like.id}`, + uri: `${config.http.base_url}/actions/${like.id}`, }; }; diff --git a/database/entities/Notification.ts b/database/entities/Notification.ts index f6bd4c00..80399f30 100644 --- a/database/entities/Notification.ts +++ b/database/entities/Notification.ts @@ -13,7 +13,7 @@ export const notificationToAPI = async ( ): Promise => { return { account: userToAPI(notification.account), - created_at: notification.createdAt.toISOString(), + created_at: new Date(notification.createdAt).toISOString(), id: notification.id, type: notification.type, status: notification.status diff --git a/database/entities/Queue.ts b/database/entities/Queue.ts index 5e3e1d01..9364497e 100644 --- a/database/entities/Queue.ts +++ b/database/entities/Queue.ts @@ -1,4 +1,3 @@ -import { getConfig } from "@config"; import { Worker } from "bullmq"; import { client, federationQueue } from "~database/datasource"; import { @@ -7,8 +6,9 @@ import { type StatusWithRelations, } from "./Status"; import type { User } from "@prisma/client"; +import { ConfigManager } from "config-manager"; -const config = getConfig(); +const config = await new ConfigManager({}).getConfig(); export const federationWorker = new Worker( "federation", @@ -44,7 +44,7 @@ export const federationWorker = new Worker( instanceId: { not: null, }, - } + } : {}, // Mentioned users { diff --git a/database/entities/Status.ts b/database/entities/Status.ts index 4b1cd2a8..b3ff0d6c 100644 --- a/database/entities/Status.ts +++ b/database/entities/Status.ts @@ -1,5 +1,4 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ -import { getConfig } from "@config"; import type { UserWithRelations } from "./User"; import { fetchRemoteUser, @@ -24,8 +23,14 @@ import type { APIStatus } from "~types/entities/status"; import { applicationToAPI } from "./Application"; import { attachmentToAPI } from "./Attachment"; import type { APIAttachment } from "~types/entities/attachment"; +import { sanitizeHtml } from "@sanitization"; +import { parse } from "marked"; +import linkifyStr from "linkify-string"; +import linkifyHtml from "linkify-html"; +import { addStausToMeilisearch } from "@meilisearch"; +import { ConfigManager } from "config-manager"; -const config = getConfig(); +const config = await new ConfigManager({}).getConfig(); export const statusAndUserRelations: Prisma.StatusInclude = { author: { @@ -206,7 +211,7 @@ export const fetchFromRemote = async (uri: string): Promise => { ? { status: replyStatus, user: (replyStatus as any).author, - } + } : undefined, quote: quotingStatus || undefined, }); @@ -303,7 +308,7 @@ export const createNewStatus = async (data: { visibility: APIStatus["visibility"]; sensitive: boolean; spoiler_text: string; - emojis: Emoji[]; + emojis?: Emoji[]; content_type?: string; uri?: string; mentions?: User[]; @@ -320,6 +325,11 @@ export const createNewStatus = async (data: { let mentions = data.mentions || []; + // Parse emojis + const emojis = await parseEmojis(data.content); + + data.emojis = data.emojis ? [...data.emojis, ...emojis] : emojis; + // Get list of mentioned users if (mentions.length === 0) { mentions = await client.user.findMany({ @@ -335,11 +345,32 @@ export const createNewStatus = async (data: { }); } + let formattedContent; + + // Get HTML version of content + if (data.content_type === "text/markdown") { + formattedContent = linkifyHtml( + await sanitizeHtml(await parse(data.content)) + ); + } else if (data.content_type === "text/x.misskeymarkdown") { + // Parse as MFM + } else { + // Parse as plaintext + formattedContent = linkifyStr(data.content); + + // Split by newline and add

tags + formattedContent = formattedContent + .split("\n") + .map(line => `

${line}

`) + .join("\n"); + } + let status = await client.status.create({ data: { authorId: data.account.id, applicationId: data.application?.id, - content: data.content, + content: formattedContent, + contentSource: data.content, contentType: data.content_type, visibility: data.visibility, sensitive: data.sensitive, @@ -358,7 +389,7 @@ export const createNewStatus = async (data: { id: attachment, }; }), - } + } : undefined, inReplyToPostId: data.reply?.status.id, quotingPostId: data.quote?.id, @@ -390,7 +421,6 @@ export const createNewStatus = async (data: { }); // Create notification - if (status.inReplyToPost) { await client.notification.create({ data: { @@ -402,9 +432,113 @@ export const createNewStatus = async (data: { }); } + // Add to search index + await addStausToMeilisearch(status); + return status; }; +export const editStatus = async ( + status: StatusWithRelations, + data: { + content: string; + visibility?: APIStatus["visibility"]; + sensitive: boolean; + spoiler_text: string; + emojis?: Emoji[]; + content_type?: string; + uri?: string; + mentions?: User[]; + media_attachments?: string[]; + } +) => { + // Get people mentioned in the content (match @username or @username@domain.com mentions + const mentionedPeople = + data.content.match(/@[a-zA-Z0-9_]+(@[a-zA-Z0-9_]+)?/g) ?? []; + + let mentions = data.mentions || []; + + // Parse emojis + const emojis = await parseEmojis(data.content); + + data.emojis = data.emojis ? [...data.emojis, ...emojis] : emojis; + + // Get list of mentioned users + if (mentions.length === 0) { + mentions = await client.user.findMany({ + where: { + OR: mentionedPeople.map(person => ({ + username: person.split("@")[1], + instance: { + base_url: person.split("@")[2], + }, + })), + }, + include: userRelations, + }); + } + + let formattedContent; + + // Get HTML version of content + if (data.content_type === "text/markdown") { + formattedContent = linkifyHtml( + await sanitizeHtml(await parse(data.content)) + ); + } else if (data.content_type === "text/x.misskeymarkdown") { + // Parse as MFM + } else { + // Parse as plaintext + formattedContent = linkifyStr(data.content); + + // Split by newline and add

tags + formattedContent = formattedContent + .split("\n") + .map(line => `

${line}

`) + .join("\n"); + } + + const newStatus = await client.status.update({ + where: { + id: status.id, + }, + data: { + content: formattedContent, + contentSource: data.content, + contentType: data.content_type, + visibility: data.visibility, + sensitive: data.sensitive, + spoilerText: data.spoiler_text, + emojis: { + connect: data.emojis.map(emoji => { + return { + id: emoji.id, + }; + }), + }, + attachments: data.media_attachments + ? { + connect: data.media_attachments.map(attachment => { + return { + id: attachment, + }; + }), + } + : undefined, + mentions: { + connect: mentions.map(mention => { + return { + id: mention.id, + }; + }), + }, + }, + include: statusAndUserRelations, + }); + + return newStatus; +}; + export const isFavouritedBy = async (status: Status, user: User) => { return !!(await client.like.findFirst({ where: { @@ -476,12 +610,59 @@ export const statusToAPI = async ( quote: status.quotingPost ? await statusToAPI( status.quotingPost as unknown as StatusWithRelations - ) + ) : null, quote_id: status.quotingPost?.id || undefined, }; }; +/* export const statusToActivityPub = async ( + status: StatusWithRelations + // user?: UserWithRelations +): Promise => { + // replace any with your ActivityPub type + return { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://mastodon.social/schemas/litepub-0.1.jsonld", + ], + id: `${config.http.base_url}/users/${status.authorId}/statuses/${status.id}`, + type: "Note", + summary: status.spoilerText, + content: status.content, + published: new Date(status.createdAt).toISOString(), + url: `${config.http.base_url}/users/${status.authorId}/statuses/${status.id}`, + attributedTo: `${config.http.base_url}/users/${status.authorId}`, + to: ["https://www.w3.org/ns/activitystreams#Public"], + cc: [], // add recipients here + sensitive: status.sensitive, + attachment: (status.attachments ?? []).map( + a => attachmentToActivityPub(a) as ActivityPubAttachment // replace with your function + ), + tag: [], // add tags here + replies: { + id: `${config.http.base_url}/users/${status.authorId}/statuses/${status.id}/replies`, + type: "Collection", + totalItems: status._count.replies, + }, + likes: { + id: `${config.http.base_url}/users/${status.authorId}/statuses/${status.id}/likes`, + type: "Collection", + totalItems: status._count.likes, + }, + shares: { + id: `${config.http.base_url}/users/${status.authorId}/statuses/${status.id}/shares`, + type: "Collection", + totalItems: status._count.reblogs, + }, + inReplyTo: status.inReplyToPostId + ? `${config.http.base_url}/users/${status.inReplyToPost?.authorId}/statuses/${status.inReplyToPostId}` + : null, + visibility: "public", // adjust as needed + // add more fields as needed + }; +}; */ + export const statusToLysand = (status: StatusWithRelations): Note => { return { type: "Note", diff --git a/database/entities/User.ts b/database/entities/User.ts index 6c781fb6..744c1767 100644 --- a/database/entities/User.ts +++ b/database/entities/User.ts @@ -1,5 +1,3 @@ -import type { ConfigType } from "@config"; -import { getConfig } from "@config"; import type { APIAccount } from "~types/entities/account"; import type { User as LysandUser } from "~types/lysand/Object"; import { htmlToText } from "html-to-text"; @@ -9,6 +7,11 @@ import { client } from "~database/datasource"; import { addEmojiIfNotExists, emojiToAPI, emojiToLysand } from "./Emoji"; import { addInstanceIfNotExists } from "./Instance"; import type { APISource } from "~types/entities/source"; +import { addUserToMeilisearch } from "@meilisearch"; +import { ConfigManager, type ConfigType } from "config-manager"; + +const configManager = new ConfigManager({}); +const config = await configManager.getConfig(); export interface AuthData { user: UserWithRelations | null; @@ -151,6 +154,9 @@ export const fetchRemoteUser = async (uri: string) => { }, }); + // Add to Meilisearch + await addUserToMeilisearch(user); + const emojis = []; for (const emoji of userEmojis) { @@ -197,7 +203,7 @@ export const createNewLocalUser = async (data: { header?: string; admin?: boolean; }) => { - const config = getConfig(); + const config = await configManager.getConfig(); const keys = await generateUserKeys(); @@ -224,6 +230,9 @@ export const createNewLocalUser = async (data: { }, }); + // Add to Meilisearch + await addUserToMeilisearch(user); + return await client.user.update({ where: { id: user.id, @@ -303,10 +312,10 @@ export const getRelationshipToOtherUser = async ( * Generates keys for the user. */ export const generateUserKeys = async () => { - const keys = (await crypto.subtle.generateKey("Ed25519", true, [ + const keys = await crypto.subtle.generateKey("Ed25519", true, [ "sign", "verify", - ])) as CryptoKeyPair; + ]); const privateKey = btoa( String.fromCharCode.apply(null, [ @@ -337,8 +346,6 @@ export const userToAPI = ( user: UserWithRelations, isOwnAccount = false ): APIAccount => { - const config = getConfig(); - return { id: user.id, username: user.username, @@ -366,7 +373,7 @@ export const userToAPI = ( header_static: "", acct: user.instance === null - ? `${user.username}` + ? user.username : `${user.username}@${user.instance.base_url}`, // TODO: Add these fields limited: false, @@ -417,13 +424,13 @@ export const userToLysand = (user: UserWithRelations): LysandUser => { username: user.username, avatar: [ { - content: getAvatarUrl(user, getConfig()) || "", + content: getAvatarUrl(user, config) || "", content_type: `image/${user.avatar.split(".")[1]}`, }, ], header: [ { - content: getHeaderUrl(user, getConfig()) || "", + content: getHeaderUrl(user, config) || "", content_type: `image/${user.header.split(".")[1]}`, }, ], @@ -451,7 +458,7 @@ export const userToLysand = (user: UserWithRelations): LysandUser => { ], })), public_key: { - actor: `${getConfig().http.base_url}/users/${user.id}`, + actor: `${config.http.base_url}/users/${user.id}`, public_key: user.publicKey, }, extensions: { diff --git a/docker-compose.yml b/docker-compose.yml index e8100507..4e66749e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -35,6 +35,18 @@ services: restart: unless-stopped networks: - lysand-net + meilisearch: + stdin_open: true + environment: + - MEILI_MASTER_KEY=add_your_key_here + tty: true + networks: + - lysand-net + volumes: + - ./meili-data:/meili_data + image: getmeili/meilisearch:v1.5 + container_name: lysand-meilisearch + restart: unless-stopped networks: lysand-net: diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 00000000..04964561 --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,37 @@ +#!/bin/sh + +# This script is a wrapper for the main server, CLI and Prisma binaries. +# Commands: +# - `start`: Starts the server +# - `cli`: Starts the CLI, sends all arguments to it +# - `prisma`: Execute a Prisma command, sends + +# Exit immediately if a command exits with a non-zero status. +set -e + +# Parse first argument +case "$1" in + "start") + # Start the server + exec bun run ./dist/index.js --prod + ;; + "cli") + # Start the CLI + shift 1 + exec bun run ./dist/cli.js "$@" + ;; + "prisma") + # Proxy all Prisma commands + # Use output of dist/prisma.js to get the env variable + shift 1 + # Set DATABASE_URL env variable to the output of bun run ./dist/prisma.js + export DATABASE_URL=$(bun run ./dist/prisma.js) + # Execute the Prisma binary + exec bunx prisma "$@" + ;; + *) + # Run custom commands + exec "$@" + ;; +esac +``` \ No newline at end of file diff --git a/index.ts b/index.ts index 38a9de04..b3c79055 100644 --- a/index.ts +++ b/index.ts @@ -1,41 +1,38 @@ -import { getConfig } from "@config"; -import { jsonResponse } from "@response"; -import type { MatchedRoute } from "bun"; -import chalk from "chalk"; -import { appendFile } from "fs/promises"; -import { matches } from "ip-matching"; -import type { AuthData } from "~database/entities/User"; -import { getFromRequest } from "~database/entities/User"; -import type { APIRouteMeta } from "~types/api"; -import { mkdir } from "fs/promises"; -import { client } from "~database/datasource"; import type { PrismaClientInitializationError } from "@prisma/client/runtime/library"; -import { HookTypes, Server } from "~plugins/types"; import { initializeRedisCache } from "@redis"; +import { connectMeili } from "@meilisearch"; +import { ConfigManager } from "config-manager"; +import { client } from "~database/datasource"; +import { LogLevel, LogManager, MultiLogManager } from "log-manager"; +import { moduleIsEntry } from "@module"; +import { createServer } from "~server"; const timeAtStart = performance.now(); -const server = new Server(); -const router = new Bun.FileSystemRouter({ - style: "nextjs", - dir: process.cwd() + "/server/api", -}); +const configManager = new ConfigManager({}); +const config = await configManager.getConfig(); -console.log(`${chalk.green(`>`)} ${chalk.bold("Starting Lysand...")}`); - -server.emit(HookTypes.PreServe); - -const config = getConfig(); const requests_log = Bun.file(process.cwd() + "/logs/requests.log"); +const isEntry = moduleIsEntry(import.meta.url); +// If imported as a module, redirect logs to /dev/null to not pollute console (e.g. in tests) +const logger = new LogManager(isEntry ? requests_log : Bun.file(`/dev/null`)); +const consoleLogger = new LogManager( + isEntry ? Bun.stdout : Bun.file(`/dev/null`) +); +const dualLogger = new MultiLogManager([logger, consoleLogger]); -if (!(await requests_log.exists())) { - console.log(`${chalk.green(`✓`)} ${chalk.bold("Creating logs folder...")}`); - await mkdir(process.cwd() + "/logs"); - await Bun.write(process.cwd() + "/logs/requests.log", ""); -} +await dualLogger.log(LogLevel.INFO, "Lysand", "Starting Lysand..."); + +// NODE_ENV seems to be broken and output `development` even when set to production, so use the flag instead +const isProd = + process.env.NODE_ENV === "production" || process.argv.includes("--prod"); const redisCache = await initializeRedisCache(); +if (config.meilisearch.enabled) { + await connectMeili(dualLogger); +} + if (redisCache) { client.$use(redisCache); } @@ -46,154 +43,23 @@ try { postCount = await client.status.count(); } catch (e) { const error = e as PrismaClientInitializationError; - console.error( - `${chalk.red(`✗`)} ${chalk.bold( - "Error while connecting to database: " - )} ${error.message}` - ); + await logger.logError(LogLevel.CRITICAL, "Database", error); + await consoleLogger.logError(LogLevel.CRITICAL, "Database", error); process.exit(1); } -Bun.serve({ - port: config.http.bind_port, - hostname: config.http.bind || "0.0.0.0", // defaults to "0.0.0.0" - async fetch(req) { - /* Check for banned IPs */ - const request_ip = this.requestIP(req)?.address ?? ""; +const server = createServer(config, configManager, dualLogger, isProd); - for (const ip of config.http.banned_ips) { - try { - if (matches(ip, request_ip)) { - return new Response(undefined, { - status: 403, - statusText: "Forbidden", - }); - } - } catch (e) { - console.error(`[-] Error while parsing banned IP "${ip}" `); - throw e; - } - } - - await logRequest(req); - - if (req.method === "OPTIONS") { - return jsonResponse({}); - } - - const matchedRoute = router.match(req); - - if (matchedRoute) { - const file: { - meta: APIRouteMeta; - default: ( - req: Request, - matchedRoute: MatchedRoute, - auth: AuthData - ) => Response | Promise; - } = await import(matchedRoute.filePath); - - const meta = file.meta; - - // Check for allowed requests - if (!meta.allowedMethods.includes(req.method as any)) { - return new Response(undefined, { - status: 405, - statusText: `Method not allowed: allowed methods are: ${meta.allowedMethods.join( - ", " - )}`, - }); - } - - // TODO: Check for ratelimits - - const auth = await getFromRequest(req); - - // Check for authentication if required - if (meta.auth.required) { - if (!auth.user) { - return new Response(undefined, { - status: 401, - statusText: "Unauthorized", - }); - } - } else if ( - (meta.auth.requiredOnMethods ?? []).includes(req.method as any) - ) { - if (!auth.user) { - return new Response(undefined, { - status: 401, - statusText: "Unauthorized", - }); - } - } - - return await file.default(req.clone(), matchedRoute, auth); - } else { - return new Response(undefined, { - status: 404, - statusText: "Route not found", - }); - } - }, -}); - -const logRequest = async (req: Request) => { - if (config.logging.log_requests_verbose) { - await appendFile( - `${process.cwd()}/logs/requests.log`, - `[${new Date().toISOString()}] ${req.method} ${ - req.url - }\n\tHeaders:\n` - ); - - // Add headers - - const headers = req.headers.entries(); - - for (const [key, value] of headers) { - await appendFile( - `${process.cwd()}/logs/requests.log`, - `\t\t${key}: ${value}\n` - ); - } - - const body = await req.clone().text(); - - await appendFile( - `${process.cwd()}/logs/requests.log`, - `\tBody:\n\t${body}\n` - ); - } else if (config.logging.log_requests) { - await appendFile( - process.cwd() + "/logs/requests.log", - `[${new Date().toISOString()}] ${req.method} ${req.url}\n` - ); - } -}; - -// Remove previous console.log -// console.clear(); - -console.log( - `${chalk.green(`✓`)} ${chalk.bold( - `Lysand started at ${chalk.blue( - `${config.http.bind}:${config.http.bind_port}` - )} in ${chalk.gray((performance.now() - timeAtStart).toFixed(0))}ms` - )}` +await dualLogger.log( + LogLevel.INFO, + "Server", + `Lysand started at ${config.http.bind}:${config.http.bind_port} in ${(performance.now() - timeAtStart).toFixed(0)}ms` ); -console.log( - `${chalk.green(`✓`)} ${chalk.bold(`Database is ${chalk.blue("online")}`)}` +await dualLogger.log( + LogLevel.INFO, + "Database", + `Database is online, now serving ${postCount} posts` ); -// Print "serving x posts" -console.log( - `${chalk.green(`✓`)} ${chalk.bold( - `Serving ${chalk.blue(postCount)} posts` - )}` -); - -server.emit(HookTypes.PostServe, { - postCount, -}); +export { config, server }; diff --git a/package.json b/package.json index 224545b0..83f8f88f 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "lysand", "module": "index.ts", "type": "module", - "version": "0.1.2", + "version": "0.3.0", "description": "A project to build a federated social network", "author": { "email": "contact@cpluspatch.com", @@ -32,14 +32,18 @@ }, "private": true, "scripts": { - "dev": "bun run index.ts", - "start": "bun run index.ts", + "dev": "bun run --watch index.ts", + "vite:dev": "bunx --bun vite pages", + "vite:build": "bunx --bun vite build pages", + "start": "NODE_ENV=production bun run dist/index.js --prod", "migrate-dev": "bun prisma migrate dev", "migrate": "bun prisma migrate deploy", - "lint": "eslint --config .eslintrc.cjs --ext .ts .", - "prisma": "bun run prisma.ts", + "lint": "bunx --bun eslint --config .eslintrc.cjs --ext .ts .", + "prod-build": "bunx --bun vite build pages && bun run build.ts", + "prisma": "DATABASE_URL=$(bun run prisma.ts) bunx prisma", "generate": "bun prisma generate", "benchmark:timeline": "bun run benchmarks/timelines.ts", + "cloc": "cloc . --exclude-dir node_modules,dist", "cli": "bun run cli.ts" }, "trustedDependencies": [ @@ -53,9 +57,9 @@ "@types/html-to-text": "^9.0.4", "@types/ioredis": "^5.0.0", "@types/jsonld": "^1.5.13", - "@typescript-eslint/eslint-plugin": "^6.13.1", - "@typescript-eslint/parser": "^6.13.1", - "@unocss/cli": "^0.57.7", + "@typescript-eslint/eslint-plugin": "latest", + "@typescript-eslint/parser": "latest", + "@unocss/cli": "latest", "activitypub-types": "^1.0.3", "bun-types": "latest", "eslint": "^8.54.0", @@ -64,29 +68,52 @@ "eslint-formatter-summary": "^1.1.0", "eslint-plugin-prettier": "^5.0.1", "prettier": "^3.1.0", - "typescript": "^5.3.2", - "unocss": "^0.57.7" + "typescript": "latest", + "unocss": "latest", + "@vitejs/plugin-vue": "latest", + "@vueuse/head": "^2.0.0", + "vite": "latest", + "vite-ssr": "^0.17.1", + "vue": "^3.3.9", + "vue-router": "^4.2.5", + "vue-tsc": "latest" }, "peerDependencies": { "typescript": "^5.3.2" }, "dependencies": { "@aws-sdk/client-s3": "^3.461.0", + "@iarna/toml": "^2.2.5", + "@json2csv/plainjs": "^7.0.6", "@prisma/client": "^5.6.0", "blurhash": "^2.0.5", - "bullmq": "^4.14.4", + "bullmq": "latest", "chalk": "^5.3.0", + "cli-parser": "file:packages/cli-parser", "cli-table": "^0.3.11", + "config-manager": "file:packages/config-manager", "eventemitter3": "^5.0.1", + "extract-zip": "^2.0.1", "html-to-text": "^9.0.5", "ioredis": "^5.3.2", "ip-matching": "^2.1.2", "iso-639-1": "^3.1.0", - "isomorphic-dompurify": "^1.10.0", + "isomorphic-dompurify": "latest", "jsonld": "^8.3.1", - "marked": "^9.1.2", + "linkify-html": "^4.1.3", + "linkify-string": "^4.1.3", + "linkifyjs": "^4.1.3", + "log-manager": "file:packages/log-manager", + "marked": "latest", + "media-manager": "file:packages/media-manager", + "megalodon": "^9.1.1", + "meilisearch": "latest", + "merge-deep-ts": "^1.2.6", + "next-route-matcher": "^1.0.1", + "oauth4webapi": "^2.4.0", "prisma": "^5.6.0", "prisma-redis-middleware": "^4.8.0", + "request-parser": "file:packages/request-parser", "semver": "^7.5.4", "sharp": "^0.33.0-rc.2" } diff --git a/packages/cli-parser/bun.lockb b/packages/cli-parser/bun.lockb new file mode 100755 index 00000000..249be439 Binary files /dev/null and b/packages/cli-parser/bun.lockb differ diff --git a/packages/cli-parser/cli-builder.type.ts b/packages/cli-parser/cli-builder.type.ts new file mode 100644 index 00000000..89c6dece --- /dev/null +++ b/packages/cli-parser/cli-builder.type.ts @@ -0,0 +1,23 @@ +export interface CliParameter { + name: string; + /* Like -v for --version */ + shortName?: string; + /** + * If not positioned, the argument will need to be called with --name value instead of just value + * @default true + */ + positioned?: boolean; + /* Whether the argument needs a value (requires positioned to be false) */ + needsValue?: boolean; + optional?: true; + type: CliParameterType; + description?: string; +} + +export enum CliParameterType { + STRING = "string", + NUMBER = "number", + BOOLEAN = "boolean", + ARRAY = "array", + EMPTY = "empty", +} diff --git a/packages/cli-parser/index.ts b/packages/cli-parser/index.ts new file mode 100644 index 00000000..4554f096 --- /dev/null +++ b/packages/cli-parser/index.ts @@ -0,0 +1,420 @@ +import { CliParameterType, type CliParameter } from "./cli-builder.type"; +import chalk from "chalk"; +import strip from "strip-ansi"; + +export function startsWithArray(fullArray: any[], startArray: any[]) { + if (startArray.length > fullArray.length) { + return false; + } + return fullArray + .slice(0, startArray.length) + .every((value, index) => value === startArray[index]); +} + +interface TreeType { + [key: string]: CliCommand | TreeType; +} + +/** + * Builder for a CLI + * @param commands Array of commands to register + */ +export class CliBuilder { + constructor(public commands: CliCommand[] = []) {} + + /** + * Add command to the CLI + * @throws Error if command already exists + * @param command Command to add + */ + registerCommand(command: CliCommand) { + if (this.checkIfCommandAlreadyExists(command)) { + throw new Error( + `Command category '${command.categories.join(" ")}' already exists` + ); + } + this.commands.push(command); + } + + /** + * Add multiple commands to the CLI + * @throws Error if command already exists + * @param commands Commands to add + */ + registerCommands(commands: CliCommand[]) { + const existingCommand = commands.find(command => + this.checkIfCommandAlreadyExists(command) + ); + if (existingCommand) { + throw new Error( + `Command category '${existingCommand.categories.join(" ")}' already exists` + ); + } + this.commands.push(...commands); + } + + /** + * Remove command from the CLI + * @param command Command to remove + */ + deregisterCommand(command: CliCommand) { + this.commands = this.commands.filter( + registeredCommand => registeredCommand !== command + ); + } + + /** + * Remove multiple commands from the CLI + * @param commands Commands to remove + */ + deregisterCommands(commands: CliCommand[]) { + this.commands = this.commands.filter( + registeredCommand => !commands.includes(registeredCommand) + ); + } + + checkIfCommandAlreadyExists(command: CliCommand) { + return this.commands.some( + registeredCommand => + registeredCommand.categories.length == + command.categories.length && + registeredCommand.categories.every( + (category, index) => category === command.categories[index] + ) + ); + } + + /** + * Get relevant args for the command (without executable or runtime) + * @param args Arguments passed to the CLI + */ + private getRelevantArgs(args: string[]) { + if (args[0].startsWith("./")) { + // Formatted like ./cli.ts [command] + return args.slice(1); + } else if (args[0].includes("bun")) { + // Formatted like bun cli.ts [command] + return args.slice(2); + } else { + return args; + } + } + + /** + * Turn raw system args into a CLI command and run it + * @param args Args directly from process.argv + */ + async processArgs(args: string[]) { + const revelantArgs = this.getRelevantArgs(args); + + // Handle "-h", "--help" and "help" commands as special cases + if (revelantArgs.length === 1) { + if (["-h", "--help", "help"].includes(revelantArgs[0])) { + this.displayHelp(); + return; + } + } + + // Find revelant command + // Search for a command with as many categories matching args as possible + const matchingCommands = this.commands.filter(command => + startsWithArray(revelantArgs, command.categories) + ); + + if (matchingCommands.length === 0) { + console.log( + `Invalid command "${revelantArgs.join(" ")}". Please use the ${chalk.bold("help")} command to see a list of commands` + ); + return 0; + } + + // Get command with largest category size + const command = matchingCommands.reduce((prev, current) => + prev.categories.length > current.categories.length ? prev : current + ); + + const argsWithoutCategories = revelantArgs.slice( + command.categories.length + ); + + return await command.run(argsWithoutCategories); + } + + /** + * Recursively urns the commands into a tree where subcategories mark each sub-branch + * @example + * ```txt + * user verify + * user delete + * user new admin + * user new + * -> + * user + * verify + * delete + * new + * admin + * "" + * ``` + */ + getCommandTree(commands: CliCommand[]): TreeType { + const tree: TreeType = {}; + + for (const command of commands) { + let currentLevel = tree; // Start at the root + + // Split the command into parts and iterate over them + for (const part of command.categories) { + // If this part doesn't exist in the current level of the tree, add it (__proto__ check to prevent prototype pollution) + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!currentLevel[part] && part !== "__proto__") { + // If this is the last part of the command, add the command itself + if ( + part === + command.categories[command.categories.length - 1] + ) { + currentLevel[part] = command; + break; + } + currentLevel[part] = {}; + } + + // Move down to the next level of the tree + currentLevel = currentLevel[part] as TreeType; + } + } + + return tree; + } + + /** + * Display help for every command in a tree manner + */ + displayHelp() { + /* + user + set + admin: List of admin commands + --prod: Whether to run in production + --dev: Whether to run in development + username: Username of the admin + Example: user set admin --prod --dev --username John + delete + ... + verify + ... + */ + const tree = this.getCommandTree(this.commands); + let writeBuffer = ""; + + const displayTree = (tree: TreeType, depth = 0) => { + for (const [key, value] of Object.entries(tree)) { + if (value instanceof CliCommand) { + writeBuffer += `${" ".repeat(depth)}${chalk.blue(key)}|${chalk.underline(value.description)}\n`; + const positionedArgs = value.argTypes.filter( + arg => arg.positioned ?? true + ); + const unpositionedArgs = value.argTypes.filter( + arg => !(arg.positioned ?? true) + ); + + for (const arg of positionedArgs) { + writeBuffer += `${" ".repeat(depth + 1)}${chalk.green( + arg.name + )}|${ + arg.description ?? "(no description)" + } ${arg.optional ? chalk.gray("(optional)") : ""}\n`; + } + for (const arg of unpositionedArgs) { + writeBuffer += `${" ".repeat(depth + 1)}${chalk.yellow("--" + arg.name)}${arg.shortName ? ", " + chalk.yellow("-" + arg.shortName) : ""}|${ + arg.description ?? "(no description)" + } ${arg.optional ? chalk.gray("(optional)") : ""}\n`; + } + + if (value.example) { + writeBuffer += `${" ".repeat(depth + 1)}${chalk.bold("Example:")} ${chalk.bgGray( + value.example + )}\n`; + } + } else { + writeBuffer += `${" ".repeat(depth)}${chalk.blue(key)}\n`; + displayTree(value, depth + 1); + } + } + }; + + displayTree(tree); + + // Replace all "|" with enough dots so that the text on the left + the dots = the same length + const optimal_length = Number( + // @ts-expect-error Slightly hacky but works + writeBuffer.split("\n").reduce((prev, current) => { + // If previousValue is empty + if (!prev) + return current.includes("|") + ? current.split("|")[0].length + : 0; + if (!current.includes("|")) return prev; + const [left] = current.split("|"); + // Strip ANSI color codes or they mess up the length + return Math.max(Number(prev), strip(left).length); + }) + ); + + for (const line of writeBuffer.split("\n")) { + const [left, right] = line.split("|"); + if (!right) { + console.log(left); + continue; + } + // Strip ANSI color codes or they mess up the length + const dots = ".".repeat(optimal_length + 5 - strip(left).length); + console.log(`${left}${dots}${right}`); + } + } +} + +type ExecuteFunction = ( + instance: CliCommand, + args: Partial + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type +) => Promise | Promise | number | void; + +/** + * A command that can be executed from the command line + * @param categories Example: `["user", "create"]` for the command `./cli user create --name John` + */ +export class CliCommand { + constructor( + public categories: string[], + public argTypes: CliParameter[], + private execute: ExecuteFunction, + public description?: string, + public example?: string + ) {} + + /** + * Display help message for the command + * formatted with Chalk and with emojis + */ + displayHelp() { + const positionedArgs = this.argTypes.filter( + arg => arg.positioned ?? true + ); + const unpositionedArgs = this.argTypes.filter( + arg => !(arg.positioned ?? true) + ); + const helpMessage = ` +${chalk.green("📚 Command:")} ${chalk.yellow(this.categories.join(" "))} +${this.description ? `${chalk.cyan(this.description)}\n` : ""} +${chalk.magenta("🔧 Arguments:")} +${positionedArgs + .map( + arg => + `${chalk.bold(arg.name)}: ${chalk.blue(arg.description ?? "(no description)")} ${ + arg.optional ? chalk.gray("(optional)") : "" + }` + ) + .join("\n")} +${unpositionedArgs + .map( + arg => + `--${chalk.bold(arg.name)}${arg.shortName ? `, -${arg.shortName}` : ""}: ${chalk.blue(arg.description ?? "(no description)")} ${ + arg.optional ? chalk.gray("(optional)") : "" + }` + ) + .join( + "\n" + )}${this.example ? `\n${chalk.magenta("🚀 Example:")}\n${chalk.bgGray(this.example)}` : ""} +`; + + console.log(helpMessage); + } + + /** + * Parses string array arguments into a full JavaScript object + * @param argsWithoutCategories + * @returns + */ + private parseArgs(argsWithoutCategories: string[]): Record { + const parsedArgs: Record = {}; + let currentParameter: CliParameter | null = null; + + for (let i = 0; i < argsWithoutCategories.length; i++) { + const arg = argsWithoutCategories[i]; + + if (arg.startsWith("--")) { + const argName = arg.substring(2); + currentParameter = + this.argTypes.find(argType => argType.name === argName) || + null; + if (currentParameter && !currentParameter.needsValue) { + parsedArgs[argName] = true; + currentParameter = null; + } else if (currentParameter && currentParameter.needsValue) { + parsedArgs[argName] = this.castArgValue( + argsWithoutCategories[i + 1], + currentParameter.type + ); + i++; + currentParameter = null; + } + } else if (arg.startsWith("-")) { + const shortName = arg.substring(1); + const argType = this.argTypes.find( + argType => argType.shortName === shortName + ); + if (argType && !argType.needsValue) { + parsedArgs[argType.name] = true; + } else if (argType && argType.needsValue) { + parsedArgs[argType.name] = this.castArgValue( + argsWithoutCategories[i + 1], + argType.type + ); + i++; + } + } else if (currentParameter) { + parsedArgs[currentParameter.name] = this.castArgValue( + arg, + currentParameter.type + ); + currentParameter = null; + } else { + const positionedArgType = this.argTypes.find( + argType => argType.positioned && !parsedArgs[argType.name] + ); + if (positionedArgType) { + parsedArgs[positionedArgType.name] = this.castArgValue( + arg, + positionedArgType.type + ); + } + } + } + + return parsedArgs; + } + + private castArgValue(value: string, type: CliParameter["type"]): any { + switch (type) { + case CliParameterType.STRING: + return value; + case CliParameterType.NUMBER: + return Number(value); + case CliParameterType.BOOLEAN: + return value === "true"; + case CliParameterType.ARRAY: + return value.split(","); + default: + return value; + } + } + + /** + * Runs the execute function with the parsed parameters as an argument + */ + async run(argsWithoutCategories: string[]) { + const args = this.parseArgs(argsWithoutCategories); + return await this.execute(this, args as any); + } +} diff --git a/packages/cli-parser/package.json b/packages/cli-parser/package.json new file mode 100644 index 00000000..acc2e3ee --- /dev/null +++ b/packages/cli-parser/package.json @@ -0,0 +1,6 @@ +{ + "name": "arg-parser", + "version": "0.0.0", + "main": "index.ts", + "dependencies": { "strip-ansi": "^7.1.0" } +} \ No newline at end of file diff --git a/packages/cli-parser/tests/cli-builder.test.ts b/packages/cli-parser/tests/cli-builder.test.ts new file mode 100644 index 00000000..96ffdb08 --- /dev/null +++ b/packages/cli-parser/tests/cli-builder.test.ts @@ -0,0 +1,485 @@ +// FILEPATH: /home/jessew/Dev/lysand/packages/cli-parser/index.test.ts +import { CliCommand, CliBuilder, startsWithArray } from ".."; +import { describe, beforeEach, it, expect, jest, spyOn } from "bun:test"; +import stripAnsi from "strip-ansi"; +import { CliParameterType } from "../cli-builder.type"; + +describe("startsWithArray", () => { + it("should return true when fullArray starts with startArray", () => { + const fullArray = ["a", "b", "c", "d", "e"]; + const startArray = ["a", "b", "c"]; + expect(startsWithArray(fullArray, startArray)).toBe(true); + }); + + it("should return false when fullArray does not start with startArray", () => { + const fullArray = ["a", "b", "c", "d", "e"]; + const startArray = ["b", "c", "d"]; + expect(startsWithArray(fullArray, startArray)).toBe(false); + }); + + it("should return true when startArray is empty", () => { + const fullArray = ["a", "b", "c", "d", "e"]; + const startArray: any[] = []; + expect(startsWithArray(fullArray, startArray)).toBe(true); + }); + + it("should return false when fullArray is shorter than startArray", () => { + const fullArray = ["a", "b", "c"]; + const startArray = ["a", "b", "c", "d", "e"]; + expect(startsWithArray(fullArray, startArray)).toBe(false); + }); +}); + +describe("CliCommand", () => { + let cliCommand: CliCommand; + + beforeEach(() => { + cliCommand = new CliCommand( + ["category1", "category2"], + [ + { + name: "arg1", + type: CliParameterType.STRING, + needsValue: true, + }, + { + name: "arg2", + shortName: "a", + type: CliParameterType.NUMBER, + needsValue: true, + }, + { + name: "arg3", + type: CliParameterType.BOOLEAN, + needsValue: false, + }, + { + name: "arg4", + type: CliParameterType.ARRAY, + needsValue: true, + }, + ], + () => { + // Do nothing + } + ); + }); + + it("should parse string arguments correctly", () => { + const args = cliCommand["parseArgs"]([ + "--arg1", + "value1", + "--arg2", + "42", + "--arg3", + "--arg4", + "value1,value2", + ]); + expect(args).toEqual({ + arg1: "value1", + arg2: 42, + arg3: true, + arg4: ["value1", "value2"], + }); + }); + + it("should parse short names for arguments too", () => { + const args = cliCommand["parseArgs"]([ + "--arg1", + "value1", + "-a", + "42", + "--arg3", + "--arg4", + "value1,value2", + ]); + expect(args).toEqual({ + arg1: "value1", + arg2: 42, + arg3: true, + arg4: ["value1", "value2"], + }); + }); + + it("should cast argument values correctly", () => { + expect(cliCommand["castArgValue"]("42", CliParameterType.NUMBER)).toBe( + 42 + ); + expect( + cliCommand["castArgValue"]("true", CliParameterType.BOOLEAN) + ).toBe(true); + expect( + cliCommand["castArgValue"]("value1,value2", CliParameterType.ARRAY) + ).toEqual(["value1", "value2"]); + }); + + it("should run the execute function with the parsed parameters", async () => { + const mockExecute = jest.fn(); + cliCommand = new CliCommand( + ["category1", "category2"], + [ + { + name: "arg1", + type: CliParameterType.STRING, + needsValue: true, + }, + { + name: "arg2", + type: CliParameterType.NUMBER, + needsValue: true, + }, + { + name: "arg3", + type: CliParameterType.BOOLEAN, + needsValue: false, + }, + { + name: "arg4", + type: CliParameterType.ARRAY, + needsValue: true, + }, + ], + mockExecute + ); + + await cliCommand.run([ + "--arg1", + "value1", + "--arg2", + "42", + "--arg3", + "--arg4", + "value1,value2", + ]); + expect(mockExecute).toHaveBeenCalledWith(cliCommand, { + arg1: "value1", + arg2: 42, + arg3: true, + arg4: ["value1", "value2"], + }); + }); + + it("should work with a mix of positioned and non-positioned arguments", async () => { + const mockExecute = jest.fn(); + cliCommand = new CliCommand( + ["category1", "category2"], + [ + { + name: "arg1", + type: CliParameterType.STRING, + needsValue: true, + }, + { + name: "arg2", + type: CliParameterType.NUMBER, + needsValue: true, + }, + { + name: "arg3", + type: CliParameterType.BOOLEAN, + needsValue: false, + }, + { + name: "arg4", + type: CliParameterType.ARRAY, + needsValue: true, + }, + { + name: "arg5", + type: CliParameterType.STRING, + needsValue: true, + positioned: true, + }, + ], + mockExecute + ); + + await cliCommand.run([ + "--arg1", + "value1", + "--arg2", + "42", + "--arg3", + "--arg4", + "value1,value2", + "value5", + ]); + + expect(mockExecute).toHaveBeenCalledWith(cliCommand, { + arg1: "value1", + arg2: 42, + arg3: true, + arg4: ["value1", "value2"], + arg5: "value5", + }); + }); + + it("should display help message correctly", () => { + const consoleLogSpy = spyOn(console, "log").mockImplementation(() => { + // Do nothing + }); + + cliCommand = new CliCommand( + ["category1", "category2"], + [ + { + name: "arg1", + type: CliParameterType.STRING, + needsValue: true, + description: "Argument 1", + optional: true, + }, + { + name: "arg2", + type: CliParameterType.NUMBER, + needsValue: true, + description: "Argument 2", + }, + { + name: "arg3", + type: CliParameterType.BOOLEAN, + needsValue: false, + description: "Argument 3", + optional: true, + positioned: false, + }, + { + name: "arg4", + type: CliParameterType.ARRAY, + needsValue: true, + description: "Argument 4", + positioned: false, + }, + ], + () => { + // Do nothing + }, + "This is a test command", + "category1 category2 --arg1 value1 --arg2 42 arg3 --arg4 value1,value2" + ); + + cliCommand.displayHelp(); + + const loggedString = consoleLogSpy.mock.calls.map(call => + stripAnsi(call[0]) + )[0]; + + consoleLogSpy.mockRestore(); + + expect(loggedString).toContain("📚 Command: category1 category2"); + expect(loggedString).toContain("🔧 Arguments:"); + expect(loggedString).toContain("arg1: Argument 1 (optional)"); + expect(loggedString).toContain("arg2: Argument 2"); + expect(loggedString).toContain("--arg3: Argument 3 (optional)"); + expect(loggedString).toContain("--arg4: Argument 4"); + expect(loggedString).toContain("🚀 Example:"); + expect(loggedString).toContain( + "category1 category2 --arg1 value1 --arg2 42 arg3 --arg4 value1,value2" + ); + }); +}); + +describe("CliBuilder", () => { + let cliBuilder: CliBuilder; + let mockCommand1: CliCommand; + let mockCommand2: CliCommand; + + beforeEach(() => { + mockCommand1 = new CliCommand(["category1"], [], jest.fn()); + mockCommand2 = new CliCommand(["category2"], [], jest.fn()); + cliBuilder = new CliBuilder([mockCommand1]); + }); + + it("should register a command correctly", () => { + cliBuilder.registerCommand(mockCommand2); + expect(cliBuilder.commands).toContain(mockCommand2); + }); + + it("should register multiple commands correctly", () => { + const mockCommand3 = new CliCommand(["category3"], [], jest.fn()); + cliBuilder.registerCommands([mockCommand2, mockCommand3]); + expect(cliBuilder.commands).toContain(mockCommand2); + expect(cliBuilder.commands).toContain(mockCommand3); + }); + + it("should error when adding duplicates", () => { + expect(() => { + cliBuilder.registerCommand(mockCommand1); + }).toThrow(); + + expect(() => { + cliBuilder.registerCommands([mockCommand1]); + }).toThrow(); + }); + + it("should deregister a command correctly", () => { + cliBuilder.deregisterCommand(mockCommand1); + expect(cliBuilder.commands).not.toContain(mockCommand1); + }); + + it("should deregister multiple commands correctly", () => { + cliBuilder.registerCommand(mockCommand2); + cliBuilder.deregisterCommands([mockCommand1, mockCommand2]); + expect(cliBuilder.commands).not.toContain(mockCommand1); + expect(cliBuilder.commands).not.toContain(mockCommand2); + }); + + it("should process args correctly", async () => { + const mockExecute = jest.fn(); + const mockCommand = new CliCommand( + ["category1", "sub1"], + [ + { + name: "arg1", + type: CliParameterType.STRING, + needsValue: true, + positioned: false, + }, + ], + mockExecute + ); + cliBuilder.registerCommand(mockCommand); + await cliBuilder.processArgs([ + "./cli.ts", + "category1", + "sub1", + "--arg1", + "value1", + ]); + expect(mockExecute).toHaveBeenCalledWith(expect.anything(), { + arg1: "value1", + }); + }); + + describe("should build command tree", () => { + let cliBuilder: CliBuilder; + let mockCommand1: CliCommand; + let mockCommand2: CliCommand; + let mockCommand3: CliCommand; + let mockCommand4: CliCommand; + let mockCommand5: CliCommand; + + beforeEach(() => { + mockCommand1 = new CliCommand(["user", "verify"], [], jest.fn()); + mockCommand2 = new CliCommand(["user", "delete"], [], jest.fn()); + mockCommand3 = new CliCommand( + ["user", "new", "admin"], + [], + jest.fn() + ); + mockCommand4 = new CliCommand(["user", "new"], [], jest.fn()); + mockCommand5 = new CliCommand(["admin", "delete"], [], jest.fn()); + cliBuilder = new CliBuilder([ + mockCommand1, + mockCommand2, + mockCommand3, + mockCommand4, + mockCommand5, + ]); + }); + + it("should build the command tree correctly", () => { + const tree = cliBuilder.getCommandTree(cliBuilder.commands); + expect(tree).toEqual({ + user: { + verify: mockCommand1, + delete: mockCommand2, + new: { + admin: mockCommand3, + }, + }, + admin: { + delete: mockCommand5, + }, + }); + }); + + it("should build the command tree correctly when there are no commands", () => { + cliBuilder = new CliBuilder([]); + const tree = cliBuilder.getCommandTree(cliBuilder.commands); + expect(tree).toEqual({}); + }); + + it("should build the command tree correctly when there is only one command", () => { + cliBuilder = new CliBuilder([mockCommand1]); + const tree = cliBuilder.getCommandTree(cliBuilder.commands); + expect(tree).toEqual({ + user: { + verify: mockCommand1, + }, + }); + }); + }); + + it("should show help menu", () => { + const consoleLogSpy = spyOn(console, "log").mockImplementation(() => { + // Do nothing + }); + + const cliBuilder = new CliBuilder(); + + const cliCommand = new CliCommand( + ["category1", "category2"], + [ + { + name: "name", + type: CliParameterType.STRING, + needsValue: true, + description: "Name of new item", + }, + { + name: "delete-previous", + type: CliParameterType.NUMBER, + needsValue: false, + positioned: false, + optional: true, + description: "Also delete the previous item", + }, + { + name: "arg3", + type: CliParameterType.BOOLEAN, + needsValue: false, + }, + { + name: "arg4", + type: CliParameterType.ARRAY, + needsValue: true, + }, + ], + () => { + // Do nothing + }, + "I love sussy sauces", + "emoji add --url https://site.com/image.png" + ); + + cliBuilder.registerCommand(cliCommand); + cliBuilder.displayHelp(); + + const loggedString = consoleLogSpy.mock.calls + .map(call => stripAnsi(call[0])) + .join("\n"); + + consoleLogSpy.mockRestore(); + + expect(loggedString).toContain("category1"); + expect(loggedString).toContain( + " category2.................I love sussy sauces" + ); + expect(loggedString).toContain( + " name..................Name of new item" + ); + expect(loggedString).toContain( + " arg3..................(no description)" + ); + expect(loggedString).toContain( + " arg4..................(no description)" + ); + expect(loggedString).toContain( + " --delete-previous.....Also delete the previous item (optional)" + ); + expect(loggedString).toContain( + " Example: emoji add --url https://site.com/image.png" + ); + }); +}); diff --git a/utils/config.ts b/packages/config-manager/config-type.type.ts similarity index 87% rename from utils/config.ts rename to packages/config-manager/config-type.type.ts index 92553c16..4d4b9570 100644 --- a/utils/config.ts +++ b/packages/config-manager/config-type.type.ts @@ -1,4 +1,4 @@ -import data from "../config/config.toml"; +import { MediaBackendType } from "media-manager"; export interface ConfigType { database: { @@ -25,11 +25,36 @@ export interface ConfigType { }; }; + meilisearch: { + host: string; + port: number; + api_key: string; + enabled: boolean; + }; + + signups: { + tos_url: string; + rules: string[]; + registration: boolean; + }; + + oidc: { + providers: { + name: string; + id: string; + url: string; + client_id: string; + client_secret: string; + icon: string; + }[]; + }; + http: { base_url: string; bind: string; bind_port: string; banned_ips: string[]; + banned_user_agents: string[]; }; instance: { @@ -72,12 +97,13 @@ export interface ConfigType { }; media: { - backend: string; + backend: MediaBackendType; deduplicate_media: boolean; conversion: { convert_images: boolean; convert_to: string; }; + local_uploads_folder: string; }; s3: { @@ -129,6 +155,7 @@ export interface ConfigType { logging: { log_requests: boolean; log_requests_verbose: boolean; + log_ip: boolean; log_filters: boolean; }; @@ -153,6 +180,7 @@ export const configDefaults: ConfigType = { bind_port: "8000", base_url: "http://lysand.localhost:8000", banned_ips: [], + banned_user_agents: [], }, database: { host: "localhost", @@ -176,6 +204,20 @@ export const configDefaults: ConfigType = { enabled: false, }, }, + meilisearch: { + host: "localhost", + port: 1491, + api_key: "", + enabled: false, + }, + signups: { + tos_url: "", + rules: [], + registration: false, + }, + oidc: { + providers: [], + }, instance: { banner: "", description: "", @@ -190,12 +232,13 @@ export const configDefaults: ConfigType = { username: "", }, media: { - backend: "local", + backend: MediaBackendType.LOCAL, deduplicate_media: true, conversion: { convert_images: false, convert_to: "webp", }, + local_uploads_folder: "uploads", }, email: { send_on_report: false, @@ -311,6 +354,7 @@ export const configDefaults: ConfigType = { logging: { log_requests: false, log_requests_verbose: false, + log_ip: false, log_filters: true, }, ratelimits: { @@ -319,16 +363,3 @@ export const configDefaults: ConfigType = { }, custom_ratelimits: {}, }; - -export const getConfig = () => { - return { - ...configDefaults, - ...(data as ConfigType), - }; -}; - -export const getHost = () => { - const url = new URL(getConfig().http.base_url); - - return url.host; -}; diff --git a/packages/config-manager/index.ts b/packages/config-manager/index.ts new file mode 100644 index 00000000..d9cbb52f --- /dev/null +++ b/packages/config-manager/index.ts @@ -0,0 +1,122 @@ +/** + * @file index.ts + * @summary ConfigManager system to retrieve and modify system configuration + * @description Can read from a hand-written file, config.toml, or from a machine-saved file, config.internal.toml + * Fuses both and provides a way to retrieve individual values + */ + +import { parse, stringify, type JsonMap } from "@iarna/toml"; +import type { ConfigType } from "./config-type.type"; +import { configDefaults } from "./config-type.type"; +import merge from "merge-deep-ts"; + +export class ConfigManager { + constructor( + public config: { + configPathOverride?: string; + internalConfigPathOverride?: string; + } + ) {} + + /** + * @summary Reads the config files and returns the merge as a JSON object + * @returns {Promise} The merged config file as a JSON object + */ + async getConfig() { + const config = await this.readConfig(); + const internalConfig = await this.readInternalConfig(); + + return this.mergeConfigs(config, internalConfig); + } + + getConfigPath() { + return ( + this.config.configPathOverride || + process.cwd() + "/config/config.toml" + ); + } + + getInternalConfigPath() { + return ( + this.config.internalConfigPathOverride || + process.cwd() + "/config/config.internal.toml" + ); + } + + /** + * @summary Reads the internal config file and returns it as a JSON object + * @returns {Promise} The internal config file as a JSON object + */ + private async readInternalConfig() { + const config = Bun.file(this.getInternalConfigPath()); + + if (!(await config.exists())) { + await Bun.write(config, ""); + } + + return this.parseConfig(await config.text()); + } + + /** + * @summary Reads the config file and returns it as a JSON object + * @returns {Promise} The config file as a JSON object + */ + private async readConfig() { + const config = Bun.file(this.getConfigPath()); + + if (!(await config.exists())) { + throw new Error( + `Error while reading config at path ${this.getConfigPath()}: Config file not found` + ); + } + + return this.parseConfig(await config.text()); + } + + /** + * @summary Parses a TOML string and returns it as a JSON object + * @param text The TOML string to parse + * @returns {T = ConfigType} The parsed TOML string as a JSON object + * @throws {Error} If the TOML string is invalid + * @private + */ + private parseConfig(text: string) { + try { + // To all [Symbol] keys from the object + return JSON.parse(JSON.stringify(parse(text))) as T; + } catch (e: any) { + throw new Error( + `Error while parsing config at path ${this.getConfigPath()}: ${e}` + ); + } + } + + /** + * Writes changed values to the internal config + * @param config The new config object + */ + async writeConfig(config: T) { + const path = this.getInternalConfigPath(); + const file = Bun.file(path); + + await Bun.write( + file, + `# THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT IT MANUALLY, EDIT THE STANDARD CONFIG.TOML INSTEAD.\n${stringify( + config as JsonMap + )}` + ); + } + + /** + * @summary Merges two config objects together, with + * the latter configs' values taking precedence + * @param configs + * @returns + */ + private mergeConfigs(...configs: T[]) { + return merge(configs) as T; + } +} + +export type { ConfigType }; +export const defaultConfig = configDefaults; diff --git a/packages/config-manager/package.json b/packages/config-manager/package.json new file mode 100644 index 00000000..e3c7ad60 --- /dev/null +++ b/packages/config-manager/package.json @@ -0,0 +1,6 @@ +{ + "name": "config-manager", + "version": "0.0.0", + "main": "index.ts", + "dependencies": {} +} \ No newline at end of file diff --git a/packages/config-manager/tests/config-manager.test.ts b/packages/config-manager/tests/config-manager.test.ts new file mode 100644 index 00000000..2635aba2 --- /dev/null +++ b/packages/config-manager/tests/config-manager.test.ts @@ -0,0 +1,96 @@ +// FILEPATH: /home/jessew/Dev/lysand/packages/config-manager/config-manager.test.ts +import { stringify } from "@iarna/toml"; +import { ConfigManager } from ".."; +import { describe, beforeEach, spyOn, it, expect } from "bun:test"; + +describe("ConfigManager", () => { + let configManager: ConfigManager; + + beforeEach(() => { + configManager = new ConfigManager({ + configPathOverride: "./config/config.toml", + internalConfigPathOverride: "./config/config.internal.toml", + }); + }); + + it("should get the correct config path", () => { + expect(configManager.getConfigPath()).toEqual("./config/config.toml"); + }); + + it("should get the correct internal config path", () => { + expect(configManager.getInternalConfigPath()).toEqual( + "./config/config.internal.toml" + ); + }); + + it("should read the config file correctly", async () => { + const mockConfig = { key: "value" }; + + // @ts-expect-error This is a mock + spyOn(Bun, "file").mockImplementationOnce(() => ({ + exists: () => + new Promise(resolve => { + resolve(true); + }), + text: () => + new Promise(resolve => { + resolve(stringify(mockConfig)); + }), + })); + + const config = await configManager.getConfig(); + + expect(config).toEqual(mockConfig); + }); + + it("should read the internal config file correctly", async () => { + const mockConfig = { key: "value" }; + + // @ts-expect-error This is a mock + spyOn(Bun, "file").mockImplementationOnce(() => ({ + exists: () => + new Promise(resolve => { + resolve(true); + }), + text: () => + new Promise(resolve => { + resolve(stringify(mockConfig)); + }), + })); + + const config = + // @ts-expect-error Force call private function for testing + await configManager.readInternalConfig(); + + expect(config).toEqual(mockConfig); + }); + + it("should write to the internal config file correctly", async () => { + const mockConfig = { key: "value" }; + + spyOn(Bun, "write").mockImplementationOnce( + () => + new Promise(resolve => { + resolve(10); + }) + ); + + await configManager.writeConfig(mockConfig); + }); + + it("should merge configs correctly", () => { + const config1 = { key1: "value1", key2: "value2" }; + const config2 = { key2: "newValue2", key3: "value3" }; + // @ts-expect-error Force call private function for testing + const mergedConfig = configManager.mergeConfigs>( + config1, + config2 + ); + + expect(mergedConfig).toEqual({ + key1: "value1", + key2: "newValue2", + key3: "value3", + }); + }); +}); diff --git a/packages/log-manager/index.ts b/packages/log-manager/index.ts new file mode 100644 index 00000000..37d03e1d --- /dev/null +++ b/packages/log-manager/index.ts @@ -0,0 +1,171 @@ +import type { BunFile } from "bun"; +import { appendFile } from "fs/promises"; + +export enum LogLevel { + DEBUG = "debug", + INFO = "info", + WARNING = "warning", + ERROR = "error", + CRITICAL = "critical", +} + +/** + * Class for handling logging to disk or to stdout + * @param output BunFile of output (can be a normal file or something like Bun.stdout) + */ +export class LogManager { + constructor(private output: BunFile) { + void this.write( + `--- INIT LogManager at ${new Date().toISOString()} ---` + ); + } + + /** + * Logs a message to the output + * @param level Importance of the log + * @param entity Emitter of the log + * @param message Message to log + * @param showTimestamp Whether to show the timestamp in the log + */ + async log( + level: LogLevel, + entity: string, + message: string, + showTimestamp = true + ) { + await this.write( + `${showTimestamp ? new Date().toISOString() + " " : ""}[${level.toUpperCase()}] ${entity}: ${message}` + ); + } + + private async write(text: string) { + if (this.output == Bun.stdout) { + await Bun.write(Bun.stdout, text + "\n"); + } else { + if (!this.output.name) { + throw new Error(`Output file doesnt exist (and isnt stdout)`); + } + await appendFile(this.output.name, text + "\n"); + } + } + + /** + * Logs an error to the output, wrapper for log + * @param level Importance of the log + * @param entity Emitter of the log + * @param error Error to log + */ + async logError(level: LogLevel, entity: string, error: Error) { + await this.log(level, entity, error.message); + } + + /** + * Logs a request to the output + * @param req Request to log + * @param ip IP of the request + * @param logAllDetails Whether to log all details of the request + */ + async logRequest(req: Request, ip?: string, logAllDetails = false) { + let string = ip ? `${ip}: ` : ""; + + string += `${req.method} ${req.url}`; + + if (logAllDetails) { + string += `\n`; + string += ` [Headers]\n`; + // Pretty print headers + for (const [key, value] of req.headers.entries()) { + string += ` ${key}: ${value}\n`; + } + + // Pretty print body + string += ` [Body]\n`; + const content_type = req.headers.get("Content-Type"); + + if (content_type && content_type.includes("application/json")) { + const json = await req.json(); + const stringified = JSON.stringify(json, null, 4) + .split("\n") + .map(line => ` ${line}`) + .join("\n"); + + string += `${stringified}\n`; + } else if ( + content_type && + (content_type.includes("application/x-www-form-urlencoded") || + content_type.includes("multipart/form-data")) + ) { + const formData = await req.formData(); + for (const [key, value] of formData.entries()) { + if (value.toString().length < 300) { + string += ` ${key}: ${value.toString()}\n`; + } else { + string += ` ${key}: <${value.toString().length} bytes>\n`; + } + } + } else { + const text = await req.text(); + string += ` ${text}\n`; + } + } + await this.log(LogLevel.INFO, "Request", string); + } +} + +/** + * Outputs to multiple LogManager instances at once + */ +export class MultiLogManager { + constructor(private logManagers: LogManager[]) {} + + /** + * Logs a message to all logManagers + * @param level Importance of the log + * @param entity Emitter of the log + * @param message Message to log + * @param showTimestamp Whether to show the timestamp in the log + */ + async log( + level: LogLevel, + entity: string, + message: string, + showTimestamp = true + ) { + for (const logManager of this.logManagers) { + await logManager.log(level, entity, message, showTimestamp); + } + } + + /** + * Logs an error to all logManagers + * @param level Importance of the log + * @param entity Emitter of the log + * @param error Error to log + */ + async logError(level: LogLevel, entity: string, error: Error) { + for (const logManager of this.logManagers) { + await logManager.logError(level, entity, error); + } + } + + /** + * Logs a request to all logManagers + * @param req Request to log + * @param ip IP of the request + * @param logAllDetails Whether to log all details of the request + */ + async logRequest(req: Request, ip?: string, logAllDetails = false) { + for (const logManager of this.logManagers) { + await logManager.logRequest(req, ip, logAllDetails); + } + } + + /** + * Create a MultiLogManager from multiple LogManager instances + * @param logManagers LogManager instances to use + * @returns + */ + static fromLogManagers(...logManagers: LogManager[]) { + return new MultiLogManager(logManagers); + } +} diff --git a/packages/log-manager/package.json b/packages/log-manager/package.json new file mode 100644 index 00000000..679a8262 --- /dev/null +++ b/packages/log-manager/package.json @@ -0,0 +1,6 @@ +{ + "name": "log-manager", + "version": "0.0.0", + "main": "index.ts", + "dependencies": { } + } \ No newline at end of file diff --git a/packages/log-manager/tests/log-manager.test.ts b/packages/log-manager/tests/log-manager.test.ts new file mode 100644 index 00000000..6b8b7bf5 --- /dev/null +++ b/packages/log-manager/tests/log-manager.test.ts @@ -0,0 +1,231 @@ +// FILEPATH: /home/jessew/Dev/lysand/packages/log-manager/log-manager.test.ts +import { LogManager, LogLevel, MultiLogManager } from "../index"; +import type fs from "fs/promises"; +import { + describe, + it, + beforeEach, + expect, + jest, + mock, + type Mock, + test, +} from "bun:test"; +import type { BunFile } from "bun"; + +describe("LogManager", () => { + let logManager: LogManager; + let mockOutput: BunFile; + let mockAppend: Mock; + + beforeEach(async () => { + mockOutput = Bun.file("test.log"); + mockAppend = jest.fn(); + await mock.module("fs/promises", () => ({ + appendFile: mockAppend, + })); + logManager = new LogManager(mockOutput); + }); + + it("should initialize and write init log", () => { + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining("--- INIT LogManager at") + ); + }); + + it("should log message with timestamp", async () => { + await logManager.log(LogLevel.INFO, "TestEntity", "Test message"); + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining("[INFO] TestEntity: Test message") + ); + }); + + it("should log message without timestamp", async () => { + await logManager.log( + LogLevel.INFO, + "TestEntity", + "Test message", + false + ); + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + "[INFO] TestEntity: Test message\n" + ); + }); + + test.skip("should write to stdout", async () => { + logManager = new LogManager(Bun.stdout); + await logManager.log(LogLevel.INFO, "TestEntity", "Test message"); + + const writeMock = jest.fn(); + + await mock.module("Bun", () => ({ + stdout: Bun.stdout, + write: writeMock, + })); + + expect(writeMock).toHaveBeenCalledWith( + Bun.stdout, + expect.stringContaining("[INFO] TestEntity: Test message") + ); + }); + + it("should throw error if output file does not exist", () => { + mockAppend.mockImplementationOnce(() => { + return Promise.reject( + new Error("Output file doesnt exist (and isnt stdout)") + ); + }); + expect( + logManager.log(LogLevel.INFO, "TestEntity", "Test message") + ).rejects.toThrow(Error); + }); + + it("should log error message", async () => { + const error = new Error("Test error"); + await logManager.logError(LogLevel.ERROR, "TestEntity", error); + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining("[ERROR] TestEntity: Test error") + ); + }); + + it("should log basic request details", async () => { + const req = new Request("http://localhost/test", { method: "GET" }); + await logManager.logRequest(req, "127.0.0.1"); + + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining("127.0.0.1: GET http://localhost/test") + ); + }); + + describe("Request logger", () => { + it("should log all request details for JSON content type", async () => { + const req = new Request("http://localhost/test", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ test: "value" }), + }); + await logManager.logRequest(req, "127.0.0.1", true); + + const expectedLog = `127.0.0.1: POST http://localhost/test + [Headers] + content-type: application/json + [Body] + { + "test": "value" + } +`; + + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining(expectedLog) + ); + }); + + it("should log all request details for text content type", async () => { + const req = new Request("http://localhost/test", { + method: "POST", + headers: { "Content-Type": "text/plain" }, + body: "Test body", + }); + await logManager.logRequest(req, "127.0.0.1", true); + + const expectedLog = `127.0.0.1: POST http://localhost/test + [Headers] + content-type: text/plain + [Body] + Test body +`; + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining(expectedLog) + ); + }); + + it("should log all request details for FormData content-type", async () => { + const formData = new FormData(); + formData.append("test", "value"); + const req = new Request("http://localhost/test", { + method: "POST", + body: formData, + }); + await logManager.logRequest(req, "127.0.0.1", true); + + const expectedLog = `127.0.0.1: POST http://localhost/test + [Headers] + content-type: multipart/form-data; boundary=${ + req.headers.get("Content-Type")?.split("boundary=")[1] ?? "" + } + [Body] + test: value +`; + + expect(mockAppend).toHaveBeenCalledWith( + mockOutput.name, + expect.stringContaining( + expectedLog.replace("----", expect.any(String)) + ) + ); + }); + }); +}); + +describe("MultiLogManager", () => { + let multiLogManager: MultiLogManager; + let mockLogManagers: LogManager[]; + let mockLog: jest.Mock; + let mockLogError: jest.Mock; + let mockLogRequest: jest.Mock; + + beforeEach(() => { + mockLog = jest.fn(); + mockLogError = jest.fn(); + mockLogRequest = jest.fn(); + mockLogManagers = [ + { + log: mockLog, + logError: mockLogError, + logRequest: mockLogRequest, + }, + { + log: mockLog, + logError: mockLogError, + logRequest: mockLogRequest, + }, + ] as unknown as LogManager[]; + multiLogManager = MultiLogManager.fromLogManagers(...mockLogManagers); + }); + + it("should log message to all logManagers", async () => { + await multiLogManager.log(LogLevel.INFO, "TestEntity", "Test message"); + expect(mockLog).toHaveBeenCalledTimes(2); + expect(mockLog).toHaveBeenCalledWith( + LogLevel.INFO, + "TestEntity", + "Test message", + true + ); + }); + + it("should log error to all logManagers", async () => { + const error = new Error("Test error"); + await multiLogManager.logError(LogLevel.ERROR, "TestEntity", error); + expect(mockLogError).toHaveBeenCalledTimes(2); + expect(mockLogError).toHaveBeenCalledWith( + LogLevel.ERROR, + "TestEntity", + error + ); + }); + + it("should log request to all logManagers", async () => { + const req = new Request("http://localhost/test", { method: "GET" }); + await multiLogManager.logRequest(req, "127.0.0.1", true); + expect(mockLogRequest).toHaveBeenCalledTimes(2); + expect(mockLogRequest).toHaveBeenCalledWith(req, "127.0.0.1", true); + }); +}); diff --git a/packages/media-manager/backends/local.ts b/packages/media-manager/backends/local.ts new file mode 100644 index 00000000..d5a8fb99 --- /dev/null +++ b/packages/media-manager/backends/local.ts @@ -0,0 +1,64 @@ +import type { ConvertableMediaFormats } from "../media-converter"; +import { MediaConverter } from "../media-converter"; +import { MediaBackend, MediaBackendType, MediaHasher } from ".."; +import type { ConfigType } from "config-manager"; + +export class LocalMediaBackend extends MediaBackend { + constructor(config: ConfigType) { + super(config, MediaBackendType.LOCAL); + } + + public async addFile(file: File) { + if (this.shouldConvertImages(this.config)) { + const fileExtension = file.name.split(".").pop(); + const mediaConverter = new MediaConverter( + fileExtension as ConvertableMediaFormats, + this.config.media.conversion + .convert_to as ConvertableMediaFormats + ); + file = await mediaConverter.convert(file); + } + + const hash = await new MediaHasher().getMediaHash(file); + + const newFile = Bun.file( + `${this.config.media.local_uploads_folder}/${hash}` + ); + + if (await newFile.exists()) { + throw new Error("File already exists"); + } + + await Bun.write(newFile, file); + + return { + uploadedFile: file, + path: `./uploads/${file.name}`, + hash: hash, + }; + } + + public async getFileByHash( + hash: string, + databaseHashFetcher: (sha256: string) => Promise + ): Promise { + const filename = await databaseHashFetcher(hash); + + if (!filename) return null; + + return this.getFile(filename); + } + + public async getFile(filename: string): Promise { + const file = Bun.file( + `${this.config.media.local_uploads_folder}/${filename}` + ); + + if (!(await file.exists())) return null; + + return new File([await file.arrayBuffer()], filename, { + type: file.type, + lastModified: file.lastModified, + }); + } +} diff --git a/packages/media-manager/backends/s3.ts b/packages/media-manager/backends/s3.ts new file mode 100644 index 00000000..46c2cb41 --- /dev/null +++ b/packages/media-manager/backends/s3.ts @@ -0,0 +1,69 @@ +import { S3Client } from "@bradenmacdonald/s3-lite-client"; +import type { ConvertableMediaFormats } from "../media-converter"; +import { MediaConverter } from "../media-converter"; +import { MediaBackend, MediaBackendType, MediaHasher } from ".."; +import type { ConfigType } from "config-manager"; + +export class S3MediaBackend extends MediaBackend { + constructor( + config: ConfigType, + private s3Client = new S3Client({ + endPoint: config.s3.endpoint, + useSSL: true, + region: config.s3.region || "auto", + bucket: config.s3.bucket_name, + accessKey: config.s3.access_key, + secretKey: config.s3.secret_access_key, + }) + ) { + super(config, MediaBackendType.S3); + } + + public async addFile(file: File) { + if (this.shouldConvertImages(this.config)) { + const fileExtension = file.name.split(".").pop(); + const mediaConverter = new MediaConverter( + fileExtension as ConvertableMediaFormats, + this.config.media.conversion + .convert_to as ConvertableMediaFormats + ); + file = await mediaConverter.convert(file); + } + + const hash = await new MediaHasher().getMediaHash(file); + + await this.s3Client.putObject(file.name, file.stream(), { + size: file.size, + }); + + return { + uploadedFile: file, + hash: hash, + }; + } + + public async getFileByHash( + hash: string, + databaseHashFetcher: (sha256: string) => Promise + ): Promise { + const filename = await databaseHashFetcher(hash); + + if (!filename) return null; + + return this.getFile(filename); + } + + public async getFile(filename: string): Promise { + try { + await this.s3Client.statObject(filename); + } catch { + return null; + } + + const file = await this.s3Client.getObject(filename); + + return new File([await file.arrayBuffer()], filename, { + type: file.headers.get("Content-Type") || "undefined", + }); + } +} diff --git a/packages/media-manager/bun.lockb b/packages/media-manager/bun.lockb new file mode 100755 index 00000000..202c862d Binary files /dev/null and b/packages/media-manager/bun.lockb differ diff --git a/packages/media-manager/bunfig.toml b/packages/media-manager/bunfig.toml new file mode 100644 index 00000000..bea1efe1 --- /dev/null +++ b/packages/media-manager/bunfig.toml @@ -0,0 +1,2 @@ +[install.scopes] +"@jsr" = "https://npm.jsr.io" diff --git a/packages/media-manager/index.ts b/packages/media-manager/index.ts new file mode 100644 index 00000000..1dc24dba --- /dev/null +++ b/packages/media-manager/index.ts @@ -0,0 +1,101 @@ +import type { ConfigType } from "config-manager"; + +export enum MediaBackendType { + LOCAL = "local", + S3 = "s3", +} + +interface UploadedFileMetadata { + uploadedFile: File; + path?: string; + hash: string; +} + +export class MediaHasher { + /** + * Returns the SHA-256 hash of a file in hex format + * @param media The file to hash + * @returns The SHA-256 hash of the file in hex format + */ + public async getMediaHash(media: File) { + const hash = new Bun.SHA256() + .update(await media.arrayBuffer()) + .digest("hex"); + + return hash; + } +} + +export class MediaBackend { + constructor( + public config: ConfigType, + public backend: MediaBackendType + ) {} + + static async fromBackendType( + backend: MediaBackendType, + config: ConfigType + ): Promise { + switch (backend) { + case MediaBackendType.LOCAL: + return new (await import("./backends/local")).LocalMediaBackend( + config + ); + case MediaBackendType.S3: + return new (await import("./backends/s3")).S3MediaBackend( + config + ); + default: + throw new Error(`Unknown backend type: ${backend as any}`); + } + } + + public getBackendType() { + return this.backend; + } + + public shouldConvertImages(config: ConfigType) { + return config.media.conversion.convert_images; + } + + /** + * Fetches file from backend from SHA-256 hash + * @param file SHA-256 hash of wanted file + * @param databaseHashFetcher Function that takes in a sha256 hash as input and outputs the filename of that file in the database + * @returns The file as a File object + */ + public getFileByHash( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + file: string, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + databaseHashFetcher: (sha256: string) => Promise + ): Promise { + return Promise.reject( + new Error("Do not call MediaBackend directly: use a subclass") + ); + } + + /** + * Fetches file from backend from filename + * @param filename File name + * @returns The file as a File object + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public getFile(filename: string): Promise { + return Promise.reject( + new Error("Do not call MediaBackend directly: use a subclass") + ); + } + + /** + * Adds file to backend + * @param file File to add + * @returns Metadata about the uploaded file + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public addFile(file: File): Promise { + return Promise.reject( + new Error("Do not call MediaBackend directly: use a subclass") + ); + } +} diff --git a/packages/media-manager/media-converter.ts b/packages/media-manager/media-converter.ts new file mode 100644 index 00000000..2602f4cf --- /dev/null +++ b/packages/media-manager/media-converter.ts @@ -0,0 +1,94 @@ +/** + * @packageDocumentation + * @module MediaManager + * @description Handles media conversion between formats + */ +import sharp from "sharp"; + +export enum ConvertableMediaFormats { + PNG = "png", + WEBP = "webp", + JPEG = "jpeg", + JPG = "jpg", + AVIF = "avif", + JXL = "jxl", + HEIF = "heif", +} + +/** + * Handles media conversion between formats + */ +export class MediaConverter { + constructor( + public fromFormat: ConvertableMediaFormats, + public toFormat: ConvertableMediaFormats + ) {} + + /** + * Returns whether the media is convertable + * @returns Whether the media is convertable + */ + public isConvertable() { + return ( + this.fromFormat !== this.toFormat && + Object.values(ConvertableMediaFormats).includes(this.fromFormat) + ); + } + + /** + * Returns the file name with the extension replaced + * @param fileName File name to replace + * @returns File name with extension replaced + */ + private getReplacedFileName(fileName: string) { + return this.extractFilenameFromPath(fileName).replace( + new RegExp(`\\.${this.fromFormat}$`), + `.${this.toFormat}` + ); + } + + /** + * Extracts the filename from a path + * @param path Path to extract filename from + * @returns Extracted filename + */ + private extractFilenameFromPath(path: string) { + // Don't count escaped slashes as path separators + const pathParts = path.split(/(? = { + [P in keyof T]?: DeepPartial; +}; + +describe("MediaBackend", () => { + let mediaBackend: MediaBackend; + let mockConfig: ConfigType; + + beforeEach(() => { + mockConfig = { + media: { + conversion: { + convert_images: true, + }, + }, + } as ConfigType; + mediaBackend = new MediaBackend(mockConfig, MediaBackendType.S3); + }); + + it("should initialize with correct backend type", () => { + expect(mediaBackend.getBackendType()).toEqual(MediaBackendType.S3); + }); + + describe("fromBackendType", () => { + it("should return a LocalMediaBackend instance for LOCAL backend type", async () => { + const backend = await MediaBackend.fromBackendType( + MediaBackendType.LOCAL, + mockConfig + ); + expect(backend).toBeInstanceOf(LocalMediaBackend); + }); + + it("should return a S3MediaBackend instance for S3 backend type", async () => { + const backend = await MediaBackend.fromBackendType( + MediaBackendType.S3, + { + s3: { + endpoint: "localhost:4566", + region: "us-east-1", + bucket_name: "test-bucket", + access_key: "test-access", + public_url: "test", + secret_access_key: "test-secret", + }, + } as ConfigType + ); + expect(backend).toBeInstanceOf(S3MediaBackend); + }); + + it("should throw an error for unknown backend type", () => { + expect( + MediaBackend.fromBackendType("unknown" as any, mockConfig) + ).rejects.toThrow("Unknown backend type: unknown"); + }); + }); + + it("should check if images should be converted", () => { + expect(mediaBackend.shouldConvertImages(mockConfig)).toBe(true); + mockConfig.media.conversion.convert_images = false; + expect(mediaBackend.shouldConvertImages(mockConfig)).toBe(false); + }); + + it("should throw error when calling getFileByHash", () => { + const mockHash = "test-hash"; + const databaseHashFetcher = jest.fn().mockResolvedValue("test.jpg"); + + expect( + mediaBackend.getFileByHash(mockHash, databaseHashFetcher) + ).rejects.toThrow(Error); + }); + + it("should throw error when calling getFile", () => { + const mockFilename = "test.jpg"; + + expect(mediaBackend.getFile(mockFilename)).rejects.toThrow(Error); + }); + + it("should throw error when calling addFile", () => { + const mockFile = new File([""], "test.jpg"); + + expect(mediaBackend.addFile(mockFile)).rejects.toThrow(); + }); +}); + +describe("S3MediaBackend", () => { + let s3MediaBackend: S3MediaBackend; + let mockS3Client: Partial; + let mockConfig: DeepPartial; + let mockFile: File; + let mockMediaHasher: MediaHasher; + + beforeEach(() => { + mockConfig = { + s3: { + endpoint: "http://localhost:4566", + region: "us-east-1", + bucket_name: "test-bucket", + access_key: "test-access-key", + secret_access_key: "test-secret-access-key", + public_url: "test", + }, + media: { + conversion: { + convert_to: ConvertableMediaFormats.PNG, + }, + }, + }; + mockFile = new File([new TextEncoder().encode("test")], "test.jpg"); + mockMediaHasher = new MediaHasher(); + mockS3Client = { + putObject: jest.fn().mockResolvedValue({}), + statObject: jest.fn().mockResolvedValue({}), + getObject: jest.fn().mockResolvedValue({ + blob: jest.fn().mockResolvedValue(new Blob()), + headers: new Headers({ "Content-Type": "image/jpeg" }), + }), + } as Partial; + s3MediaBackend = new S3MediaBackend( + mockConfig as ConfigType, + mockS3Client as S3Client + ); + }); + + it("should initialize with correct type", () => { + expect(s3MediaBackend.getBackendType()).toEqual(MediaBackendType.S3); + }); + + it("should add file", async () => { + const mockHash = "test-hash"; + spyOn(mockMediaHasher, "getMediaHash").mockResolvedValue(mockHash); + + const result = await s3MediaBackend.addFile(mockFile); + + expect(result.uploadedFile).toEqual(mockFile); + expect(result.hash).toHaveLength(64); + expect(mockS3Client.putObject).toHaveBeenCalledWith( + mockFile.name, + expect.any(ReadableStream), + { size: mockFile.size } + ); + }); + + it("should get file by hash", async () => { + const mockHash = "test-hash"; + const mockFilename = "test.jpg"; + const databaseHashFetcher = jest.fn().mockResolvedValue(mockFilename); + mockS3Client.statObject = jest.fn().mockResolvedValue({}); + mockS3Client.getObject = jest.fn().mockResolvedValue({ + arrayBuffer: jest.fn().mockResolvedValue(new ArrayBuffer(10)), + headers: new Headers({ "Content-Type": "image/jpeg" }), + }); + + const file = await s3MediaBackend.getFileByHash( + mockHash, + databaseHashFetcher + ); + + expect(file).not.toBeNull(); + expect(file?.name).toEqual(mockFilename); + expect(file?.type).toEqual("image/jpeg"); + }); + + it("should get file", async () => { + const mockFilename = "test.jpg"; + mockS3Client.statObject = jest.fn().mockResolvedValue({}); + mockS3Client.getObject = jest.fn().mockResolvedValue({ + arrayBuffer: jest.fn().mockResolvedValue(new ArrayBuffer(10)), + headers: new Headers({ "Content-Type": "image/jpeg" }), + }); + + const file = await s3MediaBackend.getFile(mockFilename); + + expect(file).not.toBeNull(); + expect(file?.name).toEqual(mockFilename); + expect(file?.type).toEqual("image/jpeg"); + }); +}); + +describe("LocalMediaBackend", () => { + let localMediaBackend: LocalMediaBackend; + let mockConfig: ConfigType; + let mockFile: File; + let mockMediaHasher: MediaHasher; + + beforeEach(() => { + mockConfig = { + media: { + conversion: { + convert_images: true, + convert_to: ConvertableMediaFormats.PNG, + }, + local_uploads_folder: "./uploads", + }, + } as ConfigType; + mockFile = Bun.file(__dirname + "/megamind.jpg") as unknown as File; + mockMediaHasher = new MediaHasher(); + localMediaBackend = new LocalMediaBackend(mockConfig); + }); + + it("should initialize with correct type", () => { + expect(localMediaBackend.getBackendType()).toEqual( + MediaBackendType.LOCAL + ); + }); + + it("should add file", async () => { + const mockHash = "test-hash"; + spyOn(mockMediaHasher, "getMediaHash").mockResolvedValue(mockHash); + const mockMediaConverter = new MediaConverter( + ConvertableMediaFormats.JPG, + ConvertableMediaFormats.PNG + ); + spyOn(mockMediaConverter, "convert").mockResolvedValue(mockFile); + // @ts-expect-error This is a mock + spyOn(Bun, "file").mockImplementationOnce(() => ({ + exists: () => Promise.resolve(false), + })); + spyOn(Bun, "write").mockImplementationOnce(() => + Promise.resolve(mockFile.size) + ); + + const result = await localMediaBackend.addFile(mockFile); + + expect(result.uploadedFile).toEqual(mockFile); + expect(result.path).toEqual(`./uploads/megamind.png`); + expect(result.hash).toHaveLength(64); + }); + + it("should get file by hash", async () => { + const mockHash = "test-hash"; + const mockFilename = "test.jpg"; + const databaseHashFetcher = jest.fn().mockResolvedValue(mockFilename); + // @ts-expect-error This is a mock + spyOn(Bun, "file").mockImplementationOnce(() => ({ + exists: () => Promise.resolve(true), + arrayBuffer: () => Promise.resolve(new ArrayBuffer(8)), + type: "image/jpeg", + lastModified: 123456789, + })); + + const file = await localMediaBackend.getFileByHash( + mockHash, + databaseHashFetcher + ); + + expect(file).not.toBeNull(); + expect(file?.name).toEqual(mockFilename); + expect(file?.type).toEqual("image/jpeg"); + }); + + it("should get file", async () => { + const mockFilename = "test.jpg"; + // @ts-expect-error This is a mock + spyOn(Bun, "file").mockImplementationOnce(() => ({ + exists: () => Promise.resolve(true), + arrayBuffer: () => Promise.resolve(new ArrayBuffer(8)), + type: "image/jpeg", + lastModified: 123456789, + })); + + const file = await localMediaBackend.getFile(mockFilename); + + expect(file).not.toBeNull(); + expect(file?.name).toEqual(mockFilename); + expect(file?.type).toEqual("image/jpeg"); + }); +}); diff --git a/packages/media-manager/tests/media-manager.test.ts b/packages/media-manager/tests/media-manager.test.ts new file mode 100644 index 00000000..017f3b6a --- /dev/null +++ b/packages/media-manager/tests/media-manager.test.ts @@ -0,0 +1,65 @@ +// FILEPATH: /home/jessew/Dev/lysand/packages/media-manager/media-converter.test.ts +import { describe, it, expect, beforeEach } from "bun:test"; +import { MediaConverter, ConvertableMediaFormats } from "../media-converter"; + +describe("MediaConverter", () => { + let mediaConverter: MediaConverter; + + beforeEach(() => { + mediaConverter = new MediaConverter( + ConvertableMediaFormats.JPG, + ConvertableMediaFormats.PNG + ); + }); + + it("should initialize with correct formats", () => { + expect(mediaConverter.fromFormat).toEqual(ConvertableMediaFormats.JPG); + expect(mediaConverter.toFormat).toEqual(ConvertableMediaFormats.PNG); + }); + + it("should check if media is convertable", () => { + expect(mediaConverter.isConvertable()).toBe(true); + mediaConverter.toFormat = ConvertableMediaFormats.JPG; + expect(mediaConverter.isConvertable()).toBe(false); + }); + + it("should replace file name extension", () => { + const fileName = "test.jpg"; + const expectedFileName = "test.png"; + // Written like this because it's a private function + expect(mediaConverter["getReplacedFileName"](fileName)).toEqual( + expectedFileName + ); + }); + + describe("Filename extractor", () => { + it("should extract filename from path", () => { + const path = "path/to/test.jpg"; + const expectedFileName = "test.jpg"; + expect(mediaConverter["extractFilenameFromPath"](path)).toEqual( + expectedFileName + ); + }); + + it("should handle escaped slashes", () => { + const path = "path/to/test\\/test.jpg"; + const expectedFileName = "test\\/test.jpg"; + expect(mediaConverter["extractFilenameFromPath"](path)).toEqual( + expectedFileName + ); + }); + }); + + it("should convert media", async () => { + const file = Bun.file(__dirname + "/megamind.jpg"); + + const convertedFile = await mediaConverter.convert( + file as unknown as File + ); + + expect(convertedFile.name).toEqual("megamind.png"); + expect(convertedFile.type).toEqual( + `image/${ConvertableMediaFormats.PNG}` + ); + }); +}); diff --git a/packages/media-manager/tests/megamind.jpg b/packages/media-manager/tests/megamind.jpg new file mode 100644 index 00000000..0f8f035a Binary files /dev/null and b/packages/media-manager/tests/megamind.jpg differ diff --git a/packages/request-parser/index.ts b/packages/request-parser/index.ts new file mode 100644 index 00000000..6351fecc --- /dev/null +++ b/packages/request-parser/index.ts @@ -0,0 +1,170 @@ +/** + * RequestParser + * @file index.ts + * @module request-parser + * @description Parses Request object into a JavaScript object based on the content type + */ + +/** + * RequestParser + * Parses Request object into a JavaScript object + * based on the Content-Type header + * @param request Request object + * @returns JavaScript object of type T + */ +export class RequestParser { + constructor(public request: Request) {} + + /** + * Parse request body into a JavaScript object + * @returns JavaScript object of type T + * @throws Error if body is invalid + */ + async toObject() { + try { + switch (await this.determineContentType()) { + case "application/json": + return this.parseJson(); + case "application/x-www-form-urlencoded": + return this.parseFormUrlencoded(); + case "multipart/form-data": + return this.parseFormData(); + default: + return this.parseQuery(); + } + } catch { + return {} as T; + } + } + + /** + * Determine body content type + * If there is no Content-Type header, automatically + * guess content type. Cuts off after ";" character + * @returns Content-Type header value, or empty string if there is no body + * @throws Error if body is invalid + * @private + */ + private async determineContentType() { + if (this.request.headers.get("Content-Type")) { + return ( + this.request.headers.get("Content-Type")?.split(";")[0] ?? "" + ); + } + + // Check if body is valid JSON + try { + await this.request.json(); + return "application/json"; + } catch { + // This is not JSON + } + + // Check if body is valid FormData + try { + await this.request.formData(); + return "multipart/form-data"; + } catch { + // This is not FormData + } + + if (this.request.body) { + throw new Error("Invalid body"); + } + + // If there is no body, return query parameters + return ""; + } + + /** + * Parse FormData body into a JavaScript object + * @returns JavaScript object of type T + * @private + * @throws Error if body is invalid + */ + private async parseFormData(): Promise> { + const formData = await this.request.formData(); + const result: Partial = {}; + + for (const [key, value] of formData.entries()) { + if (value instanceof File) { + result[key as keyof T] = value as any; + } else if (key.endsWith("[]")) { + const arrayKey = key.slice(0, -2) as keyof T; + if (!result[arrayKey]) { + result[arrayKey] = [] as T[keyof T]; + } + + (result[arrayKey] as any[]).push(value); + } else { + result[key as keyof T] = value as any; + } + } + + return result; + } + + /** + * Parse application/x-www-form-urlencoded body into a JavaScript object + * @returns JavaScript object of type T + * @private + * @throws Error if body is invalid + */ + private async parseFormUrlencoded(): Promise> { + const formData = await this.request.formData(); + const result: Partial = {}; + + for (const [key, value] of formData.entries()) { + if (key.endsWith("[]")) { + const arrayKey = key.slice(0, -2) as keyof T; + if (!result[arrayKey]) { + result[arrayKey] = [] as T[keyof T]; + } + + (result[arrayKey] as any[]).push(value); + } else { + result[key as keyof T] = value as any; + } + } + + return result; + } + + /** + * Parse JSON body into a JavaScript object + * @returns JavaScript object of type T + * @private + * @throws Error if body is invalid + */ + private async parseJson(): Promise> { + try { + return (await this.request.json()) as T; + } catch { + return {}; + } + } + + /** + * Parse query parameters into a JavaScript object + * @private + * @throws Error if body is invalid + * @returns JavaScript object of type T + */ + private parseQuery(): Partial { + const result: Partial = {}; + const url = new URL(this.request.url); + + for (const [key, value] of url.searchParams.entries()) { + if (key.endsWith("[]")) { + const arrayKey = key.slice(0, -2) as keyof T; + if (!result[arrayKey]) { + result[arrayKey] = [] as T[keyof T]; + } + (result[arrayKey] as string[]).push(value); + } else { + result[key as keyof T] = value as any; + } + } + return result; + } +} diff --git a/packages/request-parser/package.json b/packages/request-parser/package.json new file mode 100644 index 00000000..89d30d2c --- /dev/null +++ b/packages/request-parser/package.json @@ -0,0 +1,6 @@ +{ + "name": "request-parser", + "version": "0.0.0", + "main": "index.ts", + "dependencies": {} +} \ No newline at end of file diff --git a/packages/request-parser/tests/request-parser.test.ts b/packages/request-parser/tests/request-parser.test.ts new file mode 100644 index 00000000..d6f4bf20 --- /dev/null +++ b/packages/request-parser/tests/request-parser.test.ts @@ -0,0 +1,158 @@ +import { describe, it, expect, test } from "bun:test"; +import { RequestParser } from ".."; + +describe("RequestParser", () => { + describe("Should parse query parameters correctly", () => { + test("With text parameters", async () => { + const request = new Request( + "http://localhost?param1=value1¶m2=value2" + ); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + }>(); + expect(result).toEqual({ param1: "value1", param2: "value2" }); + }); + + test("With Array", async () => { + const request = new Request( + "http://localhost?test[]=value1&test[]=value2" + ); + const result = await new RequestParser(request).toObject<{ + test: string[]; + }>(); + expect(result.test).toEqual(["value1", "value2"]); + }); + + test("With both at once", async () => { + const request = new Request( + "http://localhost?param1=value1¶m2=value2&test[]=value1&test[]=value2" + ); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + test: string[]; + }>(); + expect(result).toEqual({ + param1: "value1", + param2: "value2", + test: ["value1", "value2"], + }); + }); + }); + + it("should parse JSON body correctly", async () => { + const request = new Request("http://localhost", { + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ param1: "value1", param2: "value2" }), + }); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + }>(); + expect(result).toEqual({ param1: "value1", param2: "value2" }); + }); + + it("should handle invalid JSON body", async () => { + const request = new Request("http://localhost", { + headers: { "Content-Type": "application/json" }, + body: "invalid json", + }); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + }>(); + expect(result).toEqual({}); + }); + + describe("should parse form data correctly", () => { + test("With basic text parameters", async () => { + const formData = new FormData(); + formData.append("param1", "value1"); + formData.append("param2", "value2"); + const request = new Request("http://localhost", { + method: "POST", + body: formData, + }); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + }>(); + expect(result).toEqual({ param1: "value1", param2: "value2" }); + }); + + test("With File object", async () => { + const file = new File(["content"], "filename.txt", { + type: "text/plain", + }); + const formData = new FormData(); + formData.append("file", file); + const request = new Request("http://localhost", { + method: "POST", + body: formData, + }); + const result = await new RequestParser(request).toObject<{ + file: File; + }>(); + expect(result.file).toBeInstanceOf(File); + expect(await result.file?.text()).toEqual("content"); + }); + + test("With Array", async () => { + const formData = new FormData(); + formData.append("test[]", "value1"); + formData.append("test[]", "value2"); + const request = new Request("http://localhost", { + method: "POST", + body: formData, + }); + const result = await new RequestParser(request).toObject<{ + test: string[]; + }>(); + expect(result.test).toEqual(["value1", "value2"]); + }); + + test("With all three at once", async () => { + const file = new File(["content"], "filename.txt", { + type: "text/plain", + }); + const formData = new FormData(); + formData.append("param1", "value1"); + formData.append("param2", "value2"); + formData.append("file", file); + formData.append("test[]", "value1"); + formData.append("test[]", "value2"); + const request = new Request("http://localhost", { + method: "POST", + body: formData, + }); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + file: File; + test: string[]; + }>(); + expect(result).toEqual({ + param1: "value1", + param2: "value2", + file: file, + test: ["value1", "value2"], + }); + }); + + test("URL Encoded", async () => { + const request = new Request("http://localhost", { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + body: "param1=value1¶m2=value2", + }); + const result = await new RequestParser(request).toObject<{ + param1: string; + param2: string; + }>(); + expect(result).toEqual({ param1: "value1", param2: "value2" }); + }); + }); +}); diff --git a/pages/App.vue b/pages/App.vue new file mode 100644 index 00000000..3b798503 --- /dev/null +++ b/pages/App.vue @@ -0,0 +1,10 @@ + + + \ No newline at end of file diff --git a/pages/components/LoginInput.vue b/pages/components/LoginInput.vue new file mode 100644 index 00000000..b1ed5123 --- /dev/null +++ b/pages/components/LoginInput.vue @@ -0,0 +1,30 @@ + + + \ No newline at end of file diff --git a/pages/favicon.png b/pages/favicon.png new file mode 100644 index 00000000..c56caef9 Binary files /dev/null and b/pages/favicon.png differ diff --git a/pages/index.html b/pages/index.html new file mode 100644 index 00000000..5a0ba2d4 --- /dev/null +++ b/pages/index.html @@ -0,0 +1,16 @@ + + + + + + + + Lysand + + + +
+ + + + \ No newline at end of file diff --git a/pages/login.html b/pages/login.html deleted file mode 100644 index 4eb3abf5..00000000 --- a/pages/login.html +++ /dev/null @@ -1,445 +0,0 @@ - - - - Login with Lysand - {{STYLES}} - - - - - -
-
-
-
- -
- -
-
- -
-
- -
-
- -
-
- -
- -
-
-
-
- \ No newline at end of file diff --git a/pages/main.ts b/pages/main.ts new file mode 100644 index 00000000..92f1b4c5 --- /dev/null +++ b/pages/main.ts @@ -0,0 +1,16 @@ +import { createApp } from "vue"; +import "./style.css"; +import "virtual:uno.css"; +import { createRouter, createWebHistory } from "vue-router"; +import App from "./App.vue"; +import routes from "./routes"; + +const router = createRouter({ + history: createWebHistory(), + routes: routes, +}); + +const app = createApp(App); +app.use(router); + +app.mount("#app"); diff --git a/pages/pages/index.vue b/pages/pages/index.vue new file mode 100644 index 00000000..0c65d837 --- /dev/null +++ b/pages/pages/index.vue @@ -0,0 +1,44 @@ + + + \ No newline at end of file diff --git a/pages/pages/oauth/authorize.vue b/pages/pages/oauth/authorize.vue new file mode 100644 index 00000000..a4aac9f1 --- /dev/null +++ b/pages/pages/oauth/authorize.vue @@ -0,0 +1,79 @@ + + + \ No newline at end of file diff --git a/pages/pages/register/index.vue b/pages/pages/register/index.vue new file mode 100644 index 00000000..731e579f --- /dev/null +++ b/pages/pages/register/index.vue @@ -0,0 +1,149 @@ +