Compare commits

..

No commits in common. "main" and "v0.2.0" have entirely different histories.
main ... v0.2.0

689 changed files with 18199 additions and 165216 deletions

View file

@ -1,18 +0,0 @@
version = 1
test_patterns = ["**/*.test.ts"]
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
environment = ["nodejs"]
[[analyzers]]
name = "docker"
[analyzers.meta]
dockerfile_paths = ["Dockerfile"]

View file

@ -14,6 +14,4 @@ helm-charts
.idea
coverage*
uploads
logs
dist
pages/dist
logs

View file

@ -1,9 +0,0 @@
root = true
[*]
charset = utf-8
end_of_line = lf
indent_style = space
insert_final_newline = true
tab_width = 4
trim_trailing_whitespace = true

1
.envrc
View file

@ -1 +0,0 @@
use flake

22
.eslintrc.cjs Normal file
View file

@ -0,0 +1,22 @@
module.exports = {
extends: [
"eslint:recommended",
"plugin:@typescript-eslint/strict-type-checked",
"plugin:@typescript-eslint/stylistic",
"plugin:prettier/recommended",
],
parser: "@typescript-eslint/parser",
parserOptions: {
project: "./tsconfig.json",
},
ignorePatterns: ["node_modules/", "dist/", ".eslintrc.cjs"],
plugins: ["@typescript-eslint"],
root: true,
rules: {
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/consistent-type-exports": "error",
"@typescript-eslint/consistent-type-imports": "error"
},
};

39
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,39 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
## Describe the bug
A clear and concise description of what the bug is.
## Steps to reproduce
Steps to reproduce the behavior, such as a cURL command, HTTP request, situation or code repository
## Expected behavior
A clear and concise description of what you expected to happen.
## Screenshots
If applicable, add screenshots to help explain your problem.
## Logs
Please upload logs onto a service like [Pastebin](https://pastebin.com/) or [Hastebin](https://hastebin.com/) and paste the link here. Don't paste the logs directly into the GitHub issue, as it just looks ugly and is hard to read.
## Environment
- OS: [e.g. Fedora 39]
- Bun version
- Postgres version
- Lysand commit ID or version
## Additional context
Add any other context about the problem here.

View file

@ -0,0 +1,28 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
## Is your feature request related to a problem? Please describe.
A clear and concise description of what the problem is, such as "I'm always frustrated when [...]" or "I can't do [...]"
## Describe the solution you'd like
What would you like to see implemented?
## Describe alternatives you've considered
If applicable, describe any alternative solutions or features you've considered.
## Additional context
Add any other context or screenshots about the feature request here.
## Are you willing to work on this feature?
If you are willing to work on this feature, please say so here.

View file

@ -1,474 +0,0 @@
# You can change the URL to the commit/tag you are using
#:schema https://raw.githubusercontent.com/versia-pub/server/main/config/config.schema.json
# All values marked as "sensitive" can be set to "PATH:/path/to/file" to read the value from a file (e.g. a secret manager)
[postgres]
# PostgreSQL database configuration
host = "localhost"
port = 5432
username = "versia"
# Sensitive value
password = "versia"
database = "versia"
# Additional read-only replicas
# [[postgres.replicas]]
# host = "other-host"
# port = 5432
# username = "versia"
# password = "mycoolpassword2"
# database = "replica1"
[redis.queue]
# A Redis database used for managing queues.
# Required for federation
host = "localhost"
port = 6379
# Sensitive value
# password = "test"
database = 0
# A Redis database used for caching SQL queries.
# Optional, can be the same as the queue instance
# [redis.cache]
# host = "localhost"
# port = 6380
# database = 1
# password = ""
# Search and indexing configuration
[search]
# Enable indexing and searching?
enabled = false
# Optional if search is disabled
[search.sonic]
host = "localhost"
port = 40007
# Sensitive value
password = ""
[registration]
# Can users sign up freely?
allow = true
# NOT IMPLEMENTED
require_approval = false
# Message to show to users when registration is disabled
# message = "ran out of spoons to moderate registrations, sorry"
[http]
# URL that the instance will be accessible at
base_url = "http://0.0.0.0:8080"
# Address to bind to (0.0.0.0 is suggested for proxies)
bind = "0.0.0.0"
bind_port = 8080
# Bans IPv4 or IPv6 IPs (wildcards, networks and ranges are supported)
banned_ips = []
# Banned user agents, regex format
banned_user_agents = [
# "curl\/7.68.0",
# "wget\/1.20.3",
]
# URL to an eventual HTTP proxy
# Will be used for all outgoing requests
# proxy_address = "http://localhost:8118"
# TLS configuration. You should probably be using a reverse proxy instead of this
# [http.tls]
# key = "/path/to/key.pem"
# cert = "/path/to/cert.pem"
# Sensitive value
# passphrase = "awawa"
# ca = "/path/to/ca.pem"
[frontend]
# Enable custom frontends (warning: not enabling this will make Versia Server only accessible via the Mastodon API)
# Frontends also control the OpenID flow, so if you disable this, you will need to use the Mastodon frontend
enabled = true
# Path that frontend files are served from
# Edit this property to serve custom frontends
# If this is not set, Versia Server will also check
# the VERSIA_FRONTEND_PATH environment variable
# path = ""
[frontend.routes]
# Special routes for your frontend, below are the defaults for Versia-FE
# Can be set to a route already used by Versia Server, as long as it is on a different HTTP method
# e.g. /oauth/authorize is a POST-only route, so you can serve a GET route at /oauth/authorize
# home = "/"
# login = "/oauth/authorize"
# consent = "/oauth/consent"
# register = "/register"
# password_reset = "/oauth/reset"
[frontend.settings]
# Arbitrary key/value pairs to be passed to the frontend
# This can be used to set up custom themes, etc on supported frontends.
# theme = "dark"
# NOT IMPLEMENTED
[email]
# Enable email sending
send_emails = false
# If send_emails is true, the following settings are required
# [email.smtp]
# SMTP server to use for sending emails
# server = "smtp.example.com"
# port = 465
# username = "test@example.com"
# Sensitive value
# password = "password123"
# tls = true
[media]
# Can be "s3" or "local", where "local" uploads the file to the local filesystem
# Changing this value will not retroactively apply to existing data
# Don't forget to fill in the s3 config :3
backend = "local"
# If media backend is "local", this is the folder where the files will be stored
# Can be any path
uploads_path = "uploads"
[media.conversion]
# Whether to automatically convert images to another format on upload
convert_images = false
# Can be: "image/jxl", "image/webp", "image/avif", "image/png", "image/jpeg", "image/heif", "image/gif"
# JXL support will likely not work
convert_to = "image/webp"
# Also convert SVG images?
convert_vectors = false
# [s3]
# Can be left commented if you don't use the S3 media backend
# endpoint = "https://s3.example.com"
# Sensitive value
# access_key = "XXXXX"
# Sensitive value
# secret_access_key = "XXX"
# region = "us-east-1"
# bucket_name = "versia"
# public_url = "https://cdn.example.com"
[validation]
# Checks user data
# Does not retroactively apply to previously entered data
[validation.accounts]
max_displayname_characters = 50
max_username_characters = 30
max_bio_characters = 5000
max_avatar_bytes = 5_000_000
max_header_bytes = 5_000_000
# Regex is allowed here
disallowed_usernames = [
"well-known",
"about",
"activities",
"api",
"auth",
"dev",
"inbox",
"internal",
"main",
"media",
"nodeinfo",
"notice",
"oauth",
"objects",
"proxy",
"push",
"registration",
"relay",
"settings",
"status",
"tag",
"users",
"web",
"search",
"mfa",
]
max_field_count = 10
max_field_name_characters = 1000
max_field_value_characters = 1000
max_pinned_notes = 20
[validation.notes]
max_characters = 5000
allowed_url_schemes = [
"http",
"https",
"ftp",
"dat",
"dweb",
"gopher",
"hyper",
"ipfs",
"ipns",
"irc",
"xmpp",
"ircs",
"magnet",
"mailto",
"mumble",
"ssb",
"gemini",
]
max_attachments = 16
[validation.media]
max_bytes = 40_000_000
max_description_characters = 1000
# An empty array allows all MIME types
allowed_mime_types = []
[validation.emojis]
max_bytes = 1_000_000
max_shortcode_characters = 100
max_description_characters = 1000
[validation.polls]
max_options = 20
max_option_characters = 500
min_duration_seconds = 60
# 100 days
max_duration_seconds = 8_640_000
[validation.emails]
# Blocks over 10,000 common tempmail domains
disallow_tempmail = false
# Regex is allowed here
disallowed_domains = []
[validation.challenges]
# "Challenges" (aka captchas) are a way to verify that a user is human
# Versia Server's challenges use no external services, and are proof-of-work based
# This means that they do not require any user interaction, instead
# they require the user's computer to do a small amount of work
# The difficulty of the challenge, higher is will take more time to solve
difficulty = 50000
# Challenge expiration time in seconds
expiration = 300 # 5 minutes
# Leave this empty to generate a new key
# Sensitive value
key = "YBpAV0KZOeM/MZ4kOb2E9moH9gCUr00Co9V7ncGRJ3wbd/a9tLDKKFdI0BtOcnlpfx0ZBh0+w3WSvsl0TsesTg=="
# Block content that matches these regular expressions
[validation.filters]
note_content = [
# "(https?://)?(www\\.)?youtube\\.com/watch\\?v=[a-zA-Z0-9_-]+",
# "(https?://)?(www\\.)?youtu\\.be/[a-zA-Z0-9_-]+",
]
emoji_shortcode = []
username = []
displayname = []
bio = []
[notifications]
# Web Push Notifications configuration.
# Leave out to disable.
[notifications.push]
# Subject field embedded in the push notification
# subject = "mailto:joe@example.com"
#
[notifications.push.vapid_keys]
# VAPID keys for push notifications
# Run Versia Server with those values missing to generate new keys
# Sensitive value
public = "BBanhyj2_xWwbTsWld3T49VcAoKZHrVJTzF1f6Av2JwQY_wUi3CF9vZ0WeEcACRj6EEqQ7N35CkUh5epF7n4P_s"
# Sensitive value
private = "Eujaz7NsF0rKZOVrAFL7mMpFdl96f591ERsRn81unq0"
[defaults]
# Default visibility for new notes
# Can be public, unlisted, private or direct
# Private only sends to followers, unlisted doesn't show up in timelines
visibility = "public"
# Default language for new notes (ISO code)
language = "en"
# Default avatar, must be a valid URL or left out for a placeholder avatar
# avatar = ""
# Default header, must be a valid URL or left out for none
# header = ""
# A style name from https://www.dicebear.com/styles
placeholder_style = "thumbs"
[queues]
# Controls the delivery queue (for outbound federation)
[queues.delivery]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# Controls the inbox processing queue (for inbound federation)
[queues.inbox]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# Controls the fetch queue (for remote data refreshes)
[queues.fetch]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# Controls the push queue (for push notification delivery)
[queues.push]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# Controls the media queue (for media processing)
[queues.media]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
[federation]
# This is a list of domain names, such as "mastodon.social" or "pleroma.site"
# These changes will not retroactively apply to existing data before they were changed
# For that, please use the CLI (in a later release)
# These instances will not be federated with
blocked = []
# These instances' data will only be shown to followers, not in public timelines
followers_only = []
[federation.discard]
# These objects will be discarded when received from these instances
reports = []
deletes = []
updates = []
media = []
follows = []
# If instance reactions are blocked, likes will also be discarded
likes = []
reactions = []
banners = []
avatars = []
# For bridge software, such as versia-pub/activitypub
# Bridges must be hosted separately from the main Versia Server process
# [federation.bridge]
# Only versia-ap exists for now
# software = "versia-ap"
# If this is empty, any bridge with the correct token
# will be able to send data to your instance
# v4, v6, ranges and wildcards are supported
# allowed_ips = ["192.168.1.0/24"]
# Token for the bridge software
# Bridge must have the same token!
# Sensitive value
# token = "mycooltoken"
# url = "https://ap.versia.social"
[instance]
name = "Versia"
description = "A Versia Server instance"
# Paths to instance long description, terms of service, and privacy policy
# These will be parsed as Markdown
#
# extended_description_path = "config/extended_description.md"
# tos_path = "config/tos.md"
# privacy_policy_path = "config/privacy_policy.md"
# Primary instance languages. ISO 639-1 codes.
languages = ["en"]
[instance.contact]
email = "staff@yourinstance.com"
[instance.branding]
# logo = "https://cdn.example.com/logo.png"
# banner = "https://cdn.example.com/banner.png"
# Used for federation. If left empty or missing, the server will generate one for you.
[instance.keys]
# Sensitive value
public = "MCowBQYDK2VwAyEASN0V5OWRbhRCnuhxfRLqpUOfszHozvrLLVhlIYLNTZM="
# Sensitive value
private = "MC4CAQAwBQYDK2VwBCIEIKaxDGMaW71OcCGMY+GKTZPtLPNlTvMFe3G5qXVHPhQM"
[[instance.rules]]
# Short description of the rule
text = "No hate speech"
# Longer version of the rule with additional information
hint = "Hate speech includes slurs, threats, and harassment."
[[instance.rules]]
text = "No spam"
# [[instance.rules]]
# ...etc
[permissions]
# Control default permissions for users
# Note that an anonymous user having a permission will not allow them
# to do things that require authentication (e.g. 'owner:notes' -> posting a note will need
# auth, but viewing a note will not)
# See https://server.versia.pub/api/roles#list-of-permissions for a list of all permissions
# Defaults to being able to login and manage their own content
# anonymous = []
# Defaults to identical to anonymous
# default = []
# Defaults to being able to manage all instance data, content, and users
# admin = []
[logging]
# Available levels: trace, debug, info, warning, error, fatal
log_level = "info" # For console output
# [logging.file]
# path = "logs/versia.log"
# log_level = "info"
#
# [logging.file.rotation]
# max_size = 10_000_000 # 10 MB
# max_files = 10 # Keep 10 rotated files
#
# https://sentry.io support
# [logging.sentry]
# dsn = "https://example.com"
# debug = false
# sample_rate = 1.0
# traces_sample_rate = 1.0
# Can also be regex
# trace_propagation_targets = []
# max_breadcrumbs = 100
# environment = "production"
# log_level = "info"
[authentication]
# Run Versia Server with this value missing to generate a new key
key = "ZWcwanRaQAqY3ChUro/Jey9XGQjzsxEed5iqTp4yFr8W6vEnXdz91F/Pu/uf7HBMbNeIK7V6aHsM0lq9onrO8Q=="
# The provider MUST support OpenID Connect with .well-known discovery
# Most notably, GitHub does not support this
# Redirect URLs in your OpenID provider can be set to this:
# <base_url>/oauth/sso/<provider_id>/callback*
# The asterisk is important, as it allows for any query parameters to be passed
# Authentik for example uses regex so it can be set to (regex):
# <base_url>/oauth/sso/<provider_id>/callback.*
# [[authentication.openid_providers]]
# name = "CPlusPatch ID"
# id = "cpluspatch-id"
# This MUST match the provider's issuer URI, including the trailing slash (or lack thereof)
# url = "https://id.cpluspatch.com/application/o/versia-testing/"
# client_id = "XXXX"
# Sensitive value
# client_secret = "XXXXX"
# icon = "https://cpluspatch.com/images/icons/logo.svg"

View file

@ -1,22 +0,0 @@
We use full TypeScript and ESM with Bun for our codebase. Please include relevant and detailed JSDoc comments for all functions and classes. Use explicit type annotations for all variables and function return values, such as:
```typescript
/**
* Adds two numbers together.
*
* @param {number} a
* @param {number} b
* @returns {number}
*/
const add = (a: number, b: number): number => a + b;
```
We always write TypeScript with double quotes and four spaces for indentation, so when your responses include TypeScript code, please follow those conventions.
Our codebase uses Drizzle as an ORM, which is exposed in the `@versia-server/kit/db` and `@versia-server/kit/tables` packages. This project uses a monorepo structure with Bun as the package manager.
The app has two modes: worker and API. The worker mode is used for background tasks, while the API mode serves HTTP requests. The entry point for the worker is `worker.ts`, and for the API, it is `api.ts`.
Run the typechecker with `bun run typecheck` to ensure that all TypeScript code is type-checked correctly. Run tests with `bun test` to ensure that all tests pass. Run the linter and formatter with `bun lint` to ensure that the code adheres to our style guidelines, and `bun lint --write` to automatically fix minor/formatting issues.
Cover all new functionality with tests, and ensure that all tests pass before submitting your code.

View file

@ -1,27 +0,0 @@
name: Check Types
on:
workflow_call:
jobs:
tests:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup Bun
uses: oven-sh/setup-bun@v2
- name: Install NPM packages
run: |
bun install
- name: Run typechecks
run: |
bun run typecheck

View file

@ -1,27 +0,0 @@
name: Check Circular Imports
on:
workflow_call:
jobs:
tests:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup Bun
uses: oven-sh/setup-bun@v2
- name: Install NPM packages
run: |
bun install
- name: Run typechecks
run: |
bun run detect-circular

View file

@ -9,7 +9,7 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL Scan"
name: "CodeQL"
on:
push:
@ -46,11 +46,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"

96
.github/workflows/docker-publish.yml vendored Normal file
View file

@ -0,0 +1,96 @@
name: Build and publish Docker image
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
on:
schedule:
- cron: "22 5 * * *"
push:
branches: ["main"]
# Publish semver tags as releases.
tags: ["v*.*.*"]
pull_request:
branches: ["main"]
env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@6e04d228eb30da1757ee4e1dd75a0ec73a653e06 #v3.1.1
with:
cosign-release: "v2.1.1"
# Set up BuildKit Docker container builder to be able to build
# multi-platform images and export cache
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
# Sign the resulting Docker image digest except on PRs.
# This will only write to the public Rekor transparency log when the Docker
# repository is public to avoid leaking data. If you would like to publish
# transparency data even for private images, pass --force to cosign below.
# https://github.com/sigstore/cosign
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
env:
# https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable
TAGS: ${{ steps.meta.outputs.tags }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
# This step uses the identity token to provision an ephemeral certificate
# against the sigstore community Fulcio instance.
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}

View file

@ -1,98 +0,0 @@
name: Build Docker Images
on:
push:
branches: ["*"]
# Publish semver tags as releases.
tags: ["v*.*.*"]
pull_request:
branches: ["main"]
jobs:
lint:
uses: ./.github/workflows/lint.yml
check:
uses: ./.github/workflows/check.yml
tests:
uses: ./.github/workflows/tests.yml
detect-circular:
uses: ./.github/workflows/circular-imports.yml
build:
if: ${{ success() }}
needs: [lint, check, tests, detect-circular]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write
strategy:
matrix:
include:
- container: worker
image_name: ${{ github.repository_owner }}/worker
dockerfile: Worker.Dockerfile
- container: server
image_name: ${{ github.repository_owner }}/server
dockerfile: Dockerfile
env:
REGISTRY: ghcr.io
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: all
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ matrix.image_name }}
tags: |
type=schedule
type=ref,event=branch
type=ref,event=tag
type=ref,event=pr
type=sha
- name: Get the commit hash
run: echo "GIT_COMMIT=$(git rev-parse --short ${{ github.sha }})" >> $GITHUB_ENV
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_COMMIT=${{ env.GIT_COMMIT }}
file: ${{ matrix.dockerfile }}
provenance: mode=max
sbom: true
platforms: linux/amd64,linux/arm64
cache-from: type=gha
cache-to: type=gha,mode=max

View file

@ -1,56 +0,0 @@
name: Deploy Docs to GitHub Pages
on:
push:
branches: [main]
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: pages
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: oven-sh/setup-bun@v2
- name: Setup Pages
uses: actions/configure-pages@v4
- name: Install dependencies
run: bun install
- name: Build with VitePress
run: bun run --filter="@versia-server/api" docs:build
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: packages/api/docs/.vitepress/dist
# Deployment job
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
needs: build
runs-on: ubuntu-latest
name: Deploy
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

52
.github/workflows/eslint.yml vendored Normal file
View file

@ -0,0 +1,52 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# ESLint is a tool for identifying and reporting on patterns
# found in ECMAScript/JavaScript code.
# More details at https://github.com/eslint/eslint
# and https://eslint.org
name: ESLint Checks
on:
push:
branches: ["main"]
pull_request:
# The branches below must be a subset of the branches above
branches: ["main"]
schedule:
- cron: "35 17 * * 3"
jobs:
eslint:
name: Run eslint scanning
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
- name: Install NPM packages
run: |
bun install
- name: Generate Prisma Client
run: |
bunx prisma generate
- name: Run ESLint
run: |
bunx eslint . --config .eslintrc.cjs --ext .js,.jsx,.ts,.tsx --format @microsoft/eslint-formatter-sarif --output-file eslint-results.sarif
continue-on-error: true
- name: Upload analysis results to GitHub
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: eslint-results.sarif
wait-for-processing: true

View file

@ -1,27 +0,0 @@
name: Lint & Format
on:
workflow_call:
jobs:
tests:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup Bun
uses: oven-sh/setup-bun@v2
- name: Install NPM packages
run: |
bun install
- name: Run linting
run: |
bunx @biomejs/biome ci .

View file

@ -1,8 +0,0 @@
name: Mirror to Codeberg
on: [push]
jobs:
mirror:
name: Mirror
uses: versia-pub/.github/.github/workflows/mirror.yml@main
secrets: inherit

View file

@ -1,25 +0,0 @@
name: Nix Build
on:
pull_request:
push:
branches: ["*"]
workflow_dispatch:
jobs:
check:
runs-on: ubuntu-latest
permissions:
id-token: "write"
contents: "read"
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
with:
extra-conf: accept-flake-config = true
- uses: DeterminateSystems/magic-nix-cache-action@main
- uses: DeterminateSystems/flake-checker-action@main
- name: Build default package
run: nix build .
- name: Check flakes
run: nix flake check --allow-import-from-derivation

View file

@ -1,48 +0,0 @@
name: Build & Publish Packages
on:
workflow_dispatch:
inputs:
package:
description: "Package to publish"
required: true
type: choice
options:
- client
- sdk
tag:
description: "NPM tag to use"
required: true
type: choice
default: nightly
options:
- latest
- nightly
permissions:
contents: read
# For provenance generation
id-token: write
jobs:
publish:
runs-on: ubuntu-latest
environment: NPM Deploy
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
- name: Install
run: bun install --frozen-lockfile
- name: Publish to NPM
working-directory: packages/${{ inputs.package }}
run: bun publish --provenance --tag ${{ inputs.tag }} --access public
env:
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Publish to JSR
working-directory: packages/${{ inputs.package }}
run: bunx jsr publish --allow-slow-types --allow-dirty

View file

@ -1,36 +0,0 @@
name: Test Publish
on:
push:
permissions:
contents: read
# For provenance generation
id-token: write
jobs:
# Build job
build:
runs-on: ubuntu-latest
environment: NPM Deploy
strategy:
matrix:
package: ["sdk", "client"]
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
- name: Install
run: bun install --frozen-lockfile
- name: Publish to NPM
working-directory: packages/${{ matrix.package }}
env:
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}
run: bun publish --dry-run --access public
- name: Publish to JSR
working-directory: packages/${{ matrix.package }}
run: bunx jsr publish --allow-slow-types --allow-dirty --dry-run

View file

@ -1,53 +0,0 @@
name: Tests
on:
workflow_call:
jobs:
tests:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:17-alpine
ports:
- 5432:5432
env:
POSTGRES_DB: versia
POSTGRES_USER: versia
POSTGRES_PASSWORD: versia
volumes:
- versia-data:/var/lib/postgresql/data
options: --health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis:
image: redis:latest
ports:
- 6379:6379
options: --health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup Bun
uses: oven-sh/setup-bun@v2
- name: Install NPM packages
run: |
bun install
- name: Move workflow config to config folder
run: |
mv .github/config.workflow.toml config/config.toml
- name: Run tests
run: |
bun run test

21
.gitignore vendored
View file

@ -117,10 +117,6 @@ out
.nuxt
dist
# Nix build output
result
# Gatsby files
.cache/
@ -172,19 +168,4 @@ result
.yarn/install-state.gz
.pnp.\*
config/config.toml
config/config.internal.toml
uploads/
pages/dist
log.txt
*.log
build
config/extended_description_test.md
*.pem
oclif.manifest.json
.direnv/
tsconfig.tsbuildinfo
# Vitepress Docs
*/.vitepress/dist
*/.vitepress/cache
uploads/

View file

@ -1,7 +0,0 @@
{
"detectiveOptions": {
"ts": {
"skipTypeImports": true
}
}
}

1
.npmrc
View file

@ -1 +0,0 @@
@jsr:registry=https://npm.jsr.io

9
.prettierrc Normal file
View file

@ -0,0 +1,9 @@
{
"tabWidth": 4,
"useTabs": true,
"arrowParens": "avoid",
"bracketSameLine": true,
"bracketSpacing": true,
"jsxSingleQuote": false,
"trailingComma": "es5"
}

View file

@ -1,13 +0,0 @@
{
"recommendations": [
"biomejs.biome",
"ms-vscode-remote.remote-containers",
"oven.bun-vscode",
"vivaxy.vscode-conventional-commits",
"EditorConfig.EditorConfig",
"tamasfe.even-better-toml",
"YoavBls.pretty-ts-errors",
"eamodio.gitlens"
],
"unwantedRecommendations": []
}

48
.vscode/launch.json vendored
View file

@ -1,48 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Debug File",
"program": "${file}",
"cwd": "${workspaceFolder}",
"stopOnEntry": false,
"watchMode": false
},
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Run File",
"program": "${file}",
"cwd": "${workspaceFolder}",
"watchMode": false
},
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "attach",
"name": "Attach Bun",
"url": "ws://localhost:6499/",
"stopOnEntry": false
},
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Run index.ts",
"program": "${workspaceFolder}/index.ts",
"cwd": "${workspaceFolder}",
"watchMode": true
},
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Run tests",
"program": "test"
}
]
}

15
.vscode/settings.json vendored
View file

@ -1,15 +0,0 @@
{
"typescript.tsdk": "node_modules/typescript/lib",
"conventionalCommits.scopes": [
"database",
"api",
"cli",
"federation",
"config",
"worker",
"media",
"packages/client",
"packages/sdk"
],
"languageToolLinter.languageTool.ignoredWordsInWorkspace": ["versia"]
}

View file

@ -1,184 +0,0 @@
# `0.9.0` (upcoming)
## Features
### API
- [x] 🥺 Emoji Reactions are now available! You can react to any note with custom emojis.
- [x] 🔎 Added support for [batch account data API](https://docs.joinmastodon.org/methods/accounts/#index).
### Backend
- [x] 🚀 Upgraded Bun to `1.3.2`
# `0.8.0` • Federation 2: Electric Boogaloo
## Backwards Compatibility
Versia Server `0.8.0` is **not** backwards-compatible with `0.7.0`. This release includes some breaking changes to the database schema and configuration file.
Please see [Database Changes](#database-changes) and [New Configuration](#new-configuration) for more information.
## Features
### Federation
- [x] 🦄 Updated to [`Versia 0.5`](https://versia.pub/changelog).
- [x] 📦 Added support for new Versia features:
- [x] [**Instance Messaging Extension**](https://versia.pub/extensions/instance-messaging)
- [x] [**Shared Inboxes**](https://versia.pub/federation#inboxes)
- [x] 🔗 Changed entity URIs to be more readable (`example.org/objects/:id` → `example.org/{notes,likes,...}/:id`)
### API
- [x] 📲 Added [Push Notifications](https://docs.joinmastodon.org/methods/push) support.
- [x] 📖 Overhauled OpenAPI schemas to match [Mastodon API docs](https://docs.joinmastodon.org)
- [x] 👷 Improved [**Roles API**](https://server.versia.pub/api/roles) to allow for full role control (create, update, delete, assign).
- [x] ✏️ `<div>` and `<span>` tags are now allowed in Markdown.
- [x] 🔥 Removed nonstandard `/api/v1/accounts/id` endpoint (the same functionality was already possible with other endpoints).
- [x] ✨️ Implemented rate limiting support for API endpoints.
- [x] 🔒 Implemented `is_indexable` and `is_hiding_collections` fields to the [**Accounts API**](https://docs.joinmastodon.org/methods/accounts/#update_credentials).
- [x] ✨️ Muting other users now lets you specify a duration, after which the mute will be automatically removed.
- [x] 📰 All accounts now have an RSS/Atom feed attached to them.
### CLI
- [x] ⌨️ New commands!
- [x] ✨️ `cli user token` to generate API tokens.
- [x] 👷 Error messages are now prettier!
### Frontend
The way frontend is built and served has been changed. In the past, it was required to have a second process serving a frontend, which `versia-server` would proxy requests to. This is no longer the case.
Versia Server now serves static files directly from a configurable path, and `versia-fe` has been updated to support this.
### Backend
- [x] 🚀 Upgraded Bun to `1.2.13`
- [x] 🔥 Removed dependency on the `pg_uuidv7` extension. Versia Server can now be used with "vanilla" PostgreSQL.
- [x] 🖼️ Simplified media pipeline: this will improve S3 performance
- [x] 📈 It is now possible to disable media proxying for your CDN (offloading considerable bandwidth to your more optimized CDN).
- [x] 👷 Outbound federation, inbox processing, data fetching and media processing are now handled by a queue system.
- [x] 🌐 An administration panel is available at `/admin/queues` to monitor and manage queues.
- [x] 🔥 Removed support for **from-source** installations, as Versia Server is designed around containerization and maintaining support was a large burden.
- [x] ❄️ A [**Nix**](https://nixos.org/) package is now available for this project, packaged as a [Flake](https://wiki.nixos.org/wiki/Flakes). A **NixOS** module is also provided.
## New Configuration
Configuration parsing and validation has been overhauled. Unfortunately, this means that since a bunch of options have been renamed, you'll need to redownload [the default configuration file](config/config.example.toml) and reapply your changes.
## Database Changes
Various media-related attributes have been merged into a single `Medias` table. This will require a migration in order to preserve the old data.
Since very few instances are running `0.7.0`, we have decided to "rawdog it" instead of making a proper migration script (as that would take a ton of time that we don't have).
In the case that you've been running secret instances in the shadows, let us know and we'll help you out.
## Bug Fixes
- 🐛 All URIs in custom Markdown text are now correctly proxied.
- 🐛 Fixed several issues with the [ActivityPub Federation Bridge](https://github.com/versia-pub/activitypub) preventing it from operating properly.
- 🐛 Fixed incorrect content-type on some media when using S3.
- 🐛 All media content-type is now correctly fetched, instead of guessed from the file extension as before.
- 🐛 Fixed OpenAPI schema generation and `/docs` endpoint.
- 🐛 Logs folder is now automatically created if it doesn't exist.
- 🐛 Media hosted on the configured S3 bucket and on the local filesystem is no longer unnecessarily proxied.
- 🐛 Likes and Shares now federate properly.
# `0.7.0` • The Auth and APIs Update
> [!WARNING]
> This release marks the rename of the project from `Lysand` to `Versia`.
## Backwards Compatibility
Versia Server `0.7.0` is backwards compatible with `0.6.0`. However, some new features may not be available to older clients. Notably, `versia-fe` has had major improvements and will not work with `0.6.0`.
## Features
- Upgraded Bun to `1.1.34`. This brings performance upgrades and better stability.
- Added support for the [ActivityPub Federation Bridge](https://github.com/versia-pub/activitypub).
- Added support for the [Sonic](https://github.com/valeriansaliou/sonic) search indexer.
- Note deletions are now federated.
- Note edits are now federated.
- Added support for [Sentry](https://sentry.io).
- Added option for more federation debug logging.
- Added [**Roles API**](https://server.versia.pub/api/roles).
- Added [**Permissions API**](https://server.versia.pub/api/roles) and enabled it for every route.
- Added [**TOS and Privacy Policy**](https://server.versia.pub/api/mastodon) endpoints.
- Added [**Challenge API**](https://server.versia.pub/api/challenges). (basically CAPTCHAS). This can be enabled/disabled by administrators. No `versia-fe` support yet.
- Added ability to refetch user data from remote instances.
- Added ability to change the `username` of a user. ([Mastodon API extension](https://server.versia.pub/api/mastodon#api-v1-accounts-update-credentials)).
- Added an endpoint to get a user by its username.
- Add OpenID Connect registration support. Admins can now disable username/password registration entirely and still allow users to sign up via OpenID Connect.
- Add option to never convert vector images to a raster format.
- Refactor logging system to be more robust and easier to use. Log files are now automatically rotated.
- Add support for HTTP proxies.
- Add support for serving Versia over a Tor hidden service.
- Add global server error handler, to properly return 500 error messages to clients.
- Sign all federation HTTP requests.
- Add JSON schema for configuration file.
- Rewrite federation stack
- Updated federation to Versia 0.4
- Implement OAuth2 token revocation
- Add new **Plugin API**
## Plugin System
A new plugin system for extending Versia Server has been added in this release!
> [!NOTE]
>
> This is an internal feature and is not documented. Support for third-party plugins will be given on a "if we have time" basis, until the API is fully stabilized and documented
Plugins using this framework support:
- [x] Plugin hotswapping and hotreloading
- [x] Manifest files (JSON, JSON5, JSONC supported) with metadata (JSON schema provided)
- [x] Installation by dropping a folder into the plugins/ directory
- [x] Support for plugins having their own NPM dependencies
- [x] Support for storing plugins' configuration in the main config.toml (single source of truth)
- [x] Schema-based strict config validation (plugins can specify their own schemas)
- [x] Full type-safety
- [x] Custom hooks
- [x] FFI compatibility (with `bun:ffi` or Node's FFI)
- [x] Custom API route registration or overriding or middlewaring
- [x] Automatic OpenAPI schema generation for all installed plugins
- [x] End-to-end and unit testing supported
- [x] Automatic user input validation for API routes with schemas (specify a schema for the route and the server will take care of validating everything)
- [x] Access to internal database abstractions
- [x] Support for sending raw SQL to database (type-safe!)
- [x] Plugin autoload on startup with override controls (enable/disable)
As a demonstration of the power of this system and an effort to modularize the codebase further, OpenID functionality has been moved to a plugin. This plugin is required for login.
## Bug Fixes
- Fix favouriting/unfavouriting sometimes returning negative counts.
- Non-images will now properly be uploaded to object storage.
- Make account searches case-insensitive
- Fix image decoding error when passing media through proxy.
- OpenID Connect now correctly remembers and passes `state` parameter.
- OpenID Connect will not reject some correct but weird redirect URIs.
- Markdown posts will not have invisible anchor tags anymore (this messed up accessibility).
- Reverse proxies incorrectly reporting an HTTPS request as HTTP will now be handled correctly during OpenID Connect flows.
- API Relationships will now correctly return `requested_by`.
- Make process wait for Ctrl+C to exit on error, instead of exiting immediately. This fixes some issues with Docker restarting endlessly.
- Animated media will now stay animated when uploaded.
- Some instance metadata will no longer be missing from `/api/v2/instabnce` endpoint. In fact, it will now be more complete than Mastodon's implementation.
- The Origin HTTP header will no longer be used to determine the origin of a request. This was a security issue.
- New notes will no longer incorrectly be federated to _all_ remote users at once.
- Fix [Elk Client](https://elk.zone/) not being able to log in.
## Removals
- Remove old logging system, to be replaced by a new one.
- Removed Meilisearch support, in favor of Sonic. Follow instructions in the [installation guide](https://server.versia.pub/setup/installation) to set up Sonic.
- Removed explicit Glitch-FE support. Glitch-FE will still work, but must be hosted separately like any other frontend.
## Miscellaneous
- Remove Node.js from Docker build.
- Update all dependencies.

133
CODE_OF_CONDUCT.md Normal file
View file

@ -0,0 +1,133 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or advances of
any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address,
without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[INSERT CONTACT METHOD].
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of
actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the
community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

View file

@ -1,73 +1,84 @@
# Contributing to Versia
# Contributing to Lysand
> [!NOTE]
> This document was authored by [@CPlusPatch](https://github.com/CPlusPatch).
Thank you for your interest in contributing to Versia Server! We welcome contributions from everyone, regardless of their level of experience or expertise.
Thank you for your interest in contributing to Lysand! We welcome contributions from everyone, regardless of their level of experience or expertise.
# Tech Stack
Versia Server is built using the following technologies:
Lysand is built using the following technologies:
- [Bun](https://bun.sh) - A JavaScript runtime similar to Node.js, but faster and with more features
- [Bun](https://bun.sh) - A JavaScript runtime similar to Node.js, but improved
- [PostgreSQL](https://www.postgresql.org/) - A relational database
- [`pg_uuidv7`](https://github.com/fboulnois/pg_uuidv7) - A PostgreSQL extension that provides a UUIDv7 data type
- [UnoCSS](https://unocss.dev) - A utility-first CSS framework, used for the login page
- [Docker](https://www.docker.com/) - A containerization platform, used for development and deployment
- [Sharp](https://sharp.pixelplumbing.com/) - An image processing library, used for fast image resizing and converting
- [TypeScript](https://www.typescriptlang.org/) - A typed superset of JavaScript
- [ESLint](https://eslint.org/) - A JavaScript linter
- [Prettier](https://prettier.io/) - A code formatter
## Getting Started
To get started, please follow these steps:
1. Install the [Bun](https://bun.sh) runtime:
1. Fork the repository, clone it on your local system and make your own branch
2. Install the [Bun](https://bun.sh) runtime:
```sh
curl -fsSL https://bun.sh/install | bash
```
2. Clone this repository
1. Clone this repository
```bash
git clone https://github.com/versia-pub/server.git
git clone https://github.com/lysand-org/lysand.git
```
3. Install the dependencies
2. Install the dependencies
```bash
bun install
```
1. Set up a PostgreSQL database (you need a special extension, please look at [the database documentation](https://server.versia.pub/setup/database))
3. Set up a PostgreSQL database, using the `pg_uuidv7` extension
2. Copy the `config/config.example.toml` file to `config/config.toml` and edit it to set up the database connection and other settings.
You may use the following [Dockerfile](Postgres.Dockerfile) to set it up:
```Dockerfile
# Use the latest Postgres Docker image based on Alpine
FROM postgres:alpine
## HTTPS development
# Set working directory
WORKDIR /usr/src/app
To develop with HTTPS, you need to generate a self-signed certificate. We will use [`mkcert`](https://github.com/FiloSottile/mkcert) for this purpose.
# Install curl
RUN apk add --no-cache curl
1. Install `mkcert`:
2. Generate a certificate for the domain you are using:
```sh
mkcert -install
# You can change the domain to whatever you want, but it must resolve via /etc/hosts
# *.localhost domains are automatically aliased to localhost by DNS
mkcert -key-file config/versia.localhost-key.pem -cert-file config/versia.localhost.pem versia.localhost
```
3. Edit the config to use your database and HTTPS certificates, e.g:
```toml
[http]
base_url = "https://versia.localhost:9900"
bind = "versia.localhost"
bind_port = 9900 # Change the port to whatever you want
RUN cd "$(mktemp -d)" \
&& curl -LO "https://github.com/fboulnois/pg_uuidv7/releases/download/v1.3.0/{pg_uuidv7.tar.gz,SHA256SUMS}" \
&& tar xf pg_uuidv7.tar.gz \
&& sha256sum -c SHA256SUMS \
&& PG_MAJOR=$(pg_config --version | sed 's/^.* \([0-9]\{1,\}\).*$/\1/') \
&& cp "$PG_MAJOR/pg_uuidv7.so" "$(pg_config --pkglibdir)" \
&& cp sql/pg_uuidv7--1.3.sql pg_uuidv7.control "$(pg_config --sharedir)/extension"
# Add a script to run the CREATE EXTENSION command
RUN echo '#!/bin/sh\npsql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION pg_uuidv7;"' > /docker-entrypoint-initdb.d/init.sh
[http.tls]
enabled = true
key = "config/versia.localhost-key.pem"
cert = "config/versia.localhost.pem"
passphrase = ""
ca = ""
# Make the entrypoint script executable
RUN chmod +x /docker-entrypoint-initdb.d/init.sh
```
Now, running the server will use the certificate you generated.
4. Copy the `config.toml.example` file to `config.toml` and fill in the values (you can leave most things to the default, but you will need to configure things such as the database connection)
5. Generate the Prisma client:
```bash
bun prisma generate
```
6. Run migrations:
```bash
bun migrate
```
## Testing your changes
@ -82,42 +93,40 @@ If your port number is lower than 1024, you may need to run the command as root.
To run the tests, run:
```sh
bun run test
bun test
```
The `bun test` command will cause errors due to Bun bugs ([oven-sh/bun#7823](https://github.com/oven-sh/bun/issues/7823)). Use the `test` script instead.
The tests are located all around the codebase (filename `*.test.ts`) and follow a Jest-like syntax. The server should be shut down before running the tests.
The tests are located in the `tests/` directory and follow a Jest-like syntax. The server must be started with `bun dev` before running the tests.
## Code style
We use [Biome](https://biomejs.dev) to enforce a consistent code style. To check if your code is compliant, run:
We use ESLint and Prettier to enforce a consistent code style. To check if your code is compliant, run:
```sh
bun lint
```
To automatically fix the issues, run:
```sh
bun lint --write
bun lint --fix
```
You can also install the Biome Visual Studio Code extension and have it format your code automatically on save.
You should have the [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) and [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode) extensions installed in VSCode, if you use it. From the ESLint extension, you can automatically fix the issues with `Ctrl+Shift+P` and `ESLint: Fix all auto-fixable Problems`.
### TypeScript
ESLint and Prettier are also integrated in the CI pipeline, so your code will be automatically checked when you push it. If the pipeline fails, you will need to fix the issues before your pull request can be merged.
Linting should not be ignored, except if they are false positives, in which case you can use a comment to disable the rule for the line or the file. If you need to disable a rule, please add a comment explaining why.
Code style such as brackets, spaces/tabs, etc are enforced by Prettier's ESLint plugin. You can find the simple configuration in the `.prettierrc` file.
### ESLint rules
ESLint errors should not be ignored, except if they are false positives, in which case you can use a comment to disable the rule for the line or the file. If you need to disable a rule, please add a comment explaining why.
TypeScript errors should be ignored with `// @ts-expect-error` comments, as well as with a reason for being ignored.
To scan for all TypeScript errors, run:
```sh
bun typecheck
```
### Commit messages
We use [Conventional Commits](https://www.conventionalcommits.org) for our commit messages. This allows us to automatically generate the changelog and the version number, while also making it easier to understand what changes were made in each commit.
We use [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) for our commit messages. This allows us to automatically generate the changelog and the version number.
> **Note**: I don't actually enforce this rule, but it would be great if you could follow it.
### Pull requests
@ -129,15 +138,13 @@ We use Bun's integrated testing system to write tests. You can find more informa
Tests **should** be written for all API routes and all functions that are not trivial. If you are not sure whether you should write a test for something, you probably should.
#### Adding per-route tests
To add tests for a route, create a `route_file_name.test.ts` file in the same directory as the route itself. See [this example](/api/api/v1/timelines/home.test.ts) for help writing tests.
To help with the creation of tests, you may find [GitHub Copilot](https://copilot.github.com/) useful (or some of its free alternatives like [Codeium](https://codeium.com/)). Please do not blindly copy the code that it generates, but use it as a starting point for your own tests. I recognize that writing tests is very tedious, which is why LLMs can come in handy.
### Writing documentation
Documentation for the Versia protocol is available on [versia.pub](https://versia.pub/). If you are thinking of modifying the protocol, please make sure to send a pull request over there to get it approved and merged before you send your pull request here.
Documentation for the Lysand protocol is available on [lysand.org](https://lysand.org/). If you are thinking of modifying the protocol, please make sure to send a pull request over there to get it approved and merged before you send your pull request here.
This project should not need much documentation, but if you think that something needs to be documented, please add it to the README, docs or contribution guide.
This project should not need much documentation, but if you think that something needs to be documented, please add it to the README or contribution guide.
## Reporting bugs
@ -146,11 +153,11 @@ If you find a bug, please open an issue on GitHub. Please make sure to include t
- The steps to reproduce the bug
- The expected behavior
- The actual behavior
- The version of Versia Server you are using
- The version of Lysand you are using
- The version of Bun you are using
- The version of PostgreSQL you are using
- Your operating system and version
# License
Versia Server is licensed under the [AGPLv3 or later](https://www.gnu.org/licenses/agpl-3.0.en.html) license. By contributing to Versia, you agree to license your contributions under the same license.
Lysand is licensed under the [AGPLv3](https://www.gnu.org/licenses/agpl-3.0.en.html) license. By contributing to Lysand, you agree to license your contributions under the same license.

View file

@ -1,51 +1,44 @@
# Node is required for building the project
FROM imbios/bun-node:latest-23-alpine AS base
# use the official Bun image
# see all versions at https://hub.docker.com/r/oven/bun/tags
FROM oven/bun:1.0.14-alpine as base
WORKDIR /usr/src/app
# Install dependencies into temp directory
# This will cache them and speed up future builds
RUN apk add vips-dev
# Required for Prisma to work
COPY --from=node:18-alpine /usr/local/bin/node /usr/local/bin/node
# install dependencies into temp directory
# this will cache them and speed up future builds
FROM base AS install
RUN mkdir -p /temp/dev
COPY package.json bun.lockb /temp/dev/
RUN cd /temp/dev && bun install --frozen-lockfile
RUN mkdir -p /temp
COPY . /temp
WORKDIR /temp
RUN bun install --production
# install with --production (exclude devDependencies)
RUN mkdir -p /temp/prod
COPY package.json bun.lockb /temp/prod/
RUN cd /temp/prod && bun install --frozen-lockfile --production.
FROM base AS build
# copy production dependencies and source code into final image
FROM base AS release
# Copy the project
RUN mkdir -p /temp
COPY . /temp
# Copy dependencies
COPY --from=install /temp/node_modules /temp/node_modules
# Build the project
WORKDIR /temp
RUN bun run build api
WORKDIR /temp/dist
# Copy production dependencies and source code into final image
FROM oven/bun:1.3.2-alpine
# Install libstdc++ for Bun and create app directory
# Create app directory
RUN mkdir -p /app
COPY --from=install /temp/prod/node_modules /app/node_modules
COPY . /app
COPY --from=build /temp/dist /app/dist
COPY entrypoint.sh /app
LABEL org.opencontainers.image.authors="Gaspard Wierzbinski (https://cpluspatch.com)"
LABEL org.opencontainers.image.source="https://github.com/versia-pub/server"
LABEL org.opencontainers.image.vendor="Versia Pub"
LABEL org.opencontainers.image.licenses="AGPL-3.0-or-later"
LABEL org.opencontainers.image.title="Versia Server"
LABEL org.opencontainers.image.description="Versia Server Docker image"
# Set current Git commit hash as an environment variable
ARG GIT_COMMIT
ENV GIT_COMMIT=$GIT_COMMIT
LABEL org.opencontainers.image.authors "Gaspard Wierzbinski (https://cpluspatch.dev)"
LABEL org.opencontainers.image.source "https://github.com/lysand-org/lysand"
LABEL org.opencontainers.image.vendor "Lysand Org"
LABEL org.opencontainers.image.licenses "AGPL-3.0"
LABEL org.opencontainers.image.title "Lysand Server"
LABEL org.opencontainers.image.description "Lysand Server docker image"
# CD to app
WORKDIR /app
RUN bunx prisma generate
# CD to app
WORKDIR /app
ENV NODE_ENV=production
ENTRYPOINT [ "/bin/sh", "/app/entrypoint.sh" ]
# Run migrations and start the server
CMD [ "bun", "run", "api.js" ]
ENTRYPOINT [ "bun", "migrate", "&&", "bun", "run", "index.ts" ]

21
Postgres.Dockerfile Normal file
View file

@ -0,0 +1,21 @@
# Use the latest Postgres Docker image based on Alpine
FROM postgres:alpine
# Set working directory
WORKDIR /usr/src/app
# Install curl
RUN apk add --no-cache curl
RUN cd "$(mktemp -d)" \
&& curl -LO "https://github.com/fboulnois/pg_uuidv7/releases/download/v1.3.0/{pg_uuidv7.tar.gz,SHA256SUMS}" \
&& tar xf pg_uuidv7.tar.gz \
&& sha256sum -c SHA256SUMS \
&& PG_MAJOR=$(pg_config --version | sed 's/^.* \([0-9]\{1,\}\).*$/\1/') \
&& cp "$PG_MAJOR/pg_uuidv7.so" "$(pg_config --pkglibdir)" \
&& cp sql/pg_uuidv7--1.3.sql pg_uuidv7.control "$(pg_config --sharedir)/extension"
# Add a script to run the CREATE EXTENSION command
RUN echo '#!/bin/sh\npsql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION pg_uuidv7;"' > /docker-entrypoint-initdb.d/init.sh
# Make the entrypoint script executable
RUN chmod +x /docker-entrypoint-initdb.d/init.sh

512
README.md
View file

@ -1,52 +1,21 @@
<div align="center">
<a href="https://versia.pub">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://cdn.versia.pub/branding/logo-dark.svg">
<source media="(prefers-color-scheme: light)" srcset="https://cdn.versia.pub/branding/logo-light.svg">
<img src="https://cdn.versia.pub/branding/logo-dark.svg" alt="Versia Logo" height="110" />
</picture>
</a>
</div>
<p align="center">
<a href="https://lysand.org"><img src="https://cdn-web.cpluspatch.com/lysand.webp" alt="Lysand Logo" height=130></a>
</p>
![Postgres](https://img.shields.io/badge/postgres-%23316192.svg?style=for-the-badge&logo=postgresql&logoColor=white) ![Bun](https://img.shields.io/badge/Bun-%23000000.svg?style=for-the-badge&logo=bun&logoColor=white) ![VS Code Insiders](https://img.shields.io/badge/VS%20Code%20Insiders-35b393.svg?style=for-the-badge&logo=visual-studio-code&logoColor=white) ![TypeScript](https://img.shields.io/badge/typescript-%23007ACC.svg?style=for-the-badge&logo=typescript&logoColor=white) ![Linux](https://img.shields.io/badge/Linux-FCC624?style=for-the-badge&logo=linux&logoColor=black) ![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white) ![ESLint](https://img.shields.io/badge/ESLint-4B3263?style=for-the-badge&logo=eslint&logoColor=white) [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa?style=for-the-badge)](code_of_conduct.md)
<h2 align="center">
<strong><code>Versia Server</code></strong>
</h2>
<div align="center">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon@latest/icons/typescript/typescript-original.svg" height="42" width="52" alt="TypeScript logo">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon/icons/postgresql/postgresql-original.svg" height="42" width="52" alt="PostgreSQL logo">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon/icons/docker/docker-original.svg" height="42" width="52" alt="Docker logo">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon/icons/bun/bun-original.svg" height="42" width="52" alt="Bun logo">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon/icons/vscode/vscode-original.svg" height="42" width="52" alt="VSCode logo">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon/icons/sentry/sentry-original.svg" height="42" width="52" alt="Sentry logo">
<img src="https://cdn.jsdelivr.net/gh/devicons/devicon/icons/linux/linux-original.svg" height="42" width="52" alt="Linux logo">
</div>
<br/>
## What is this?
**Versia Server** (formerly Lysand Server) is a federated social network server based on the [Versia](https://versia.pub) protocol. It is currently in beta phase, with basic federation and almost complete Mastodon API support.
This is a project to create a federated social network based on the [Lysand](https://lysand.org) protocol. It is currently in alpha phase, with basic federation and API support.
### Goals
This project aims to be a fully featured social network, with a focus on privacy, security, and performance. It will implement the Mastodon API for support with clients that already support Mastodon or Pleroma.
- **Privacy**: Versia Server is designed to be as private as possible. Unnecessary data is not stored, and data that is stored is done so securely.
- **Configurability**: High configurability is a key feature of Versia Server. Almost every aspect of the server can be configured to suit your needs. If you feel like something is missing, please open an issue.
- **Security**: Versia Server is designed with security in mind. It is built with modern security practices and technologies, and is constantly updated to ensure the highest level of security.
- **Performance**: Efficiency and speed are a key focus of Versia Server. The design is non-monolithic, and is built to be as fast as possible.
- **Mastodon API compatibility**: Versia Server is designed to be compatible with the Mastodon API, with [`glitch-soc`](https://github.com/glitch-soc/mastodon) extensions.
### Anti-Goals
- **Monolithic design**: Modularity and scaling is important to this project. This means that it is not a single, monolithic application, but rather a collection of smaller, more focused applications. (API layer, queue, database, frontend, etc.)
- **Complexity**: Both in code and in function, Versia Server should be as simple as possible. This does not mean adding no features or functionality, but rather that the features and functionality that are added should be well-written and easy to understand.
- **Bloat**: Versia Server should not be bloated with unnecessary features, packages, dependencies or code. It should be as lightweight as possible, while still being feature-rich.
> **Note:** This project is not affiliated with Mastodon or Pleroma, and is not a fork of either project. It is a new project built from the ground up.
## Features
- [x] Versia Working Draft 4 federation (partial)
- [x] Inbound federation
- [x] Hyper fast (thousands of HTTP requests per second)
- [x] S3 or local media storage
- [x] Deduplication of uploaded files
@ -54,204 +23,323 @@
- [x] Configurable defaults
- [x] Full regex-based filters for posts, users and media
- [x] Custom emoji support
- [x] Users can upload their own emojis for themselves
- [x] Automatic image conversion to WebP or other formats
- [x] Scripting-compatible CLI with JSON and CSV outputs
- [x] Markdown support just about everywhere: posts, profiles, profile fields, etc. Code blocks, tables, and more are supported.
- [ ] Advanced moderation tools (work in progress)
- [x] Fully compliant Mastodon API support (partial)
- [x] Glitch-SOC extensions
- [x] Full compatibility with many clients such as Megalodon
- [x] Ability to use your own frontends
- [x] Non-monolithic architecture, microservices can be hosted in infinite amounts on infinite servers
- [x] Ability to use all your threads
- [x] Support for SSO providers, as well as SSO-only registration.
- [x] Fully written in TypeScript and thoroughly unit tested
- [x] Automatic signed container builds for easy deployment
- [x] Docker and Podman supported
- [x] Invisible, Proof-of-Work local CAPTCHA for API requests
- [x] Advanced Roles and Permissions API.
- [x] HTTP proxy support
- [x] Tor hidden service support
- [x] Sentry logging support
- [x] Ability to change the domain name in a single config change, without any database edits
- [ ] Moderation tools
- [ ] Full Mastodon API support
- [ ] Outbound federation
## Screenshots
## Benchmarks
You can visit [social.lysand.org](https://social.lysand.org) to see a live instance of Versia Server with Versia-FE.
> **Note**: These benchmarks are not representative of real-world performance, and are only meant to be used as a rough guide.
### Timeline Benchmarks
You may run the following command to benchmark the `/api/v1/timelines/home` endpoint:
```bash
TOKEN=token_here bun benchmark:timeline <request_count>
```
The `request_count` variable is optional and defaults to 100. `TOKEN` is your personal user token, used to login to the API.
On a quad-core laptop:
```
$ bun run benchmarks/timelines.ts 100
✓ All requests succeeded
✓ 100 requests fulfilled in 0.12611s
```
```
$ bun run benchmarks/timelines.ts 1000
✓ All requests succeeded
✓ 1000 requests fulfilled in 0.90925s
```
```
$ bun run benchmarks/timelines.ts 10000
✓ All requests succeeded
✓ 10000 requests fulfilled in 12.44852s
```
Lysand is extremely fast and can handle tens of thousands of HTTP requests per second on a good server.
## How do I run it?
Please see the [installation guide](https://server.versia.pub/setup/installation) for more information on how to install Versia.
### Requirements
- The [Bun Runtime](https://bun.sh), version 1.0.5 or later (usage of the latest version is recommended)
- A PostgreSQL database
- (Optional but recommended) A Linux-based operating system
- (Optional if you want search) A working Meiliseach instance
> **Note**: We will not be offerring support to Windows or MacOS users. If you are using one of these operating systems, please use a virtual machine or container to run Lysand.
### Installation
1. Clone this repository
```bash
git clone https://github.com/lysand-org/lysand.git
```
2. Install the dependencies
```bash
bun install
```
3. Set up a PostgreSQL database, using the `pg_uuidv7` extension
You may use the following [Dockerfile](Postgres.Dockerfile) to set it up:
```Dockerfile
# Use the latest Postgres Docker image based on Alpine
FROM postgres:alpine
# Set working directory
WORKDIR /usr/src/app
# Install curl
RUN apk add --no-cache curl
RUN cd "$(mktemp -d)" \
&& curl -LO "https://github.com/fboulnois/pg_uuidv7/releases/download/v1.3.0/{pg_uuidv7.tar.gz,SHA256SUMS}" \
&& tar xf pg_uuidv7.tar.gz \
&& sha256sum -c SHA256SUMS \
&& PG_MAJOR=$(pg_config --version | sed 's/^.* \([0-9]\{1,\}\).*$/\1/') \
&& cp "$PG_MAJOR/pg_uuidv7.so" "$(pg_config --pkglibdir)" \
&& cp sql/pg_uuidv7--1.3.sql pg_uuidv7.control "$(pg_config --sharedir)/extension"
# Add a script to run the CREATE EXTENSION command
RUN echo '#!/bin/sh\npsql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION pg_uuidv7;"' > /docker-entrypoint-initdb.d/init.sh
# Make the entrypoint script executable
RUN chmod +x /docker-entrypoint-initdb.d/init.sh
```
4. Copy the `config.toml.example` file to `config.toml` and fill in the values (you can leave most things to the default, but you will need to configure things such as the database connection)
5. Run migrations:
```bash
bun migrate
```
6. (If you want search)
Create a Meilisearch instance (using Docker is recommended). For a [`docker-compose`] file, copy the `meilisearch` service from the [`docker-compose.yml`](docker-compose.yml) file.
Set up Meiliseach's API key by passing the `MEILI_MASTER_KEY` environment variable to the server. Then, enale and configure search in the config file.
### Running
To run the server, simply run the following command:
```bash
bun start
```
### Using the CLI
Lysand includes a built-in CLI for managing the server. To use it, simply run the following command:
```bash
bun cli
```
You can use the `help` command to see a list of available commands. These include creating users, deleting users and more.
#### Scripting with the CLI
Some CLI commands that return data as tables can be used in scripts. To do so, you can use the `--json` flag to output the data as JSON instead of a table, or even `--csv` to output the data as CSV. See `bun cli help` for more information.
Flags can be used in any order and anywhere in the script (except for the `bun cli` command itself). The command arguments themselves must be in the correct order, however.
### Rebuilding the Search Index
You may use the `bun cli index rebuild` command to automatically push all posts and users to Meilisearch, if it is configured. This is useful if you have just set up Meilisearch, or if you accidentally deleted something.
### Using Database Commands
The `bun prisma` commands allows you to use Prisma commands without needing to add in environment variables for the database config. Just run Prisma commands as you would normally, replacing `bunx prisma` with `bun prisma`.
## With Docker
> **Note**: Docker is currently broken, as Bun with Prisma does not work well with Docker yet for unknown reasons. The following instructions are for when this is fixed.
>
> These instructions will probably also work with Podman and other container runtimes.
You can also run Lysand using Docker. To do so, you can:
1. Acquire the Postgres Dockerfile from above
2. Use this repository's [`docker-compose.yml`](docker-compose.yml) file
3. Create the `lysand-net` docker network:
```bash
docker network create lysand-net
```
1. Fill in the config file (see [Installation](#installation))
2. Run the following command:
```bash
docker-compose up -d
```
You may need root privileges to run Docker commands.
### Running CLI commands inside Docker
You can run CLI commands inside Docker using the following command:
```bash
sudo docker exec -it lysand bun cli ...
```
### Running migrations inside Docker
You can run migrations inside Docker using the following command (if needed):
```bash
sudo docker exec -it lysand bun migrate
```
## Contributing
Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information.
## Planned Extra Features
- Send notifications to moderators when a report is received
- Email notifications on certain actions
## Federation
> **Warning**: Federation has not been tested outside of automated tests. It is not recommended to use this software in production.
The following extensions are currently supported or being worked on:
- `pub.versia:custom_emojis`: Custom emojis
- `pub.versia:instance_messaging`: Instance Messaging
- `pub.versia:likes`: Likes
- `pub.versia:share`: Share
- `pub.versia:reactions`: Reactions
- `org.lysand:custom_emojis`: Custom emojis
## API
Versia Server implements the Mastodon API (as well as `glitch-soc` extensions). The API is currently almost fully complete, with some fringe functionality still being worked on.
Lysand implements the Mastodon API, with some extensions. The API is currently in early alpha, and is not recommended for use in production.
Working endpoints are:
- [x] `/api/v1/accounts/:id/block`
- [x] `/api/v1/accounts/:id/follow`
- [x] `/api/v1/accounts/:id/followers`
- [x] `/api/v1/accounts/:id/following`
- [ ] `/api/v1/accounts/:id/lists`
- [x] `/api/v1/accounts/:id/mute`
- [x] `/api/v1/accounts/:id/note`
- [x] `/api/v1/accounts/:id/pin`
- [x] `/api/v1/accounts/:id/remove_from_followers`
- [x] `/api/v1/accounts/:id/statuses`
- [x] `/api/v1/accounts/:id/unblock`
- [x] `/api/v1/accounts/:id/unfollow`
- [x] `/api/v1/accounts/:id/unmute`
- [x] `/api/v1/accounts/:id/unpin`
- [x] `/api/v1/accounts/:id`
- [x] `/api/v1/accounts/familiar_followers`
- [x] `/api/v1/accounts/relationships`
- [x] `/api/v1/accounts/update_credentials`
- [x] `/api/v1/accounts/verify_credentials`
- [x] `/api/v1/accounts`
- [ ] `/api/v1/announcements/:id/dismiss`
- [ ] `/api/v1/announcements/:id/reactions/:name` (`PUT`, `DELETE`)
- [ ] `/api/v1/announcements`
- [x] `/api/v1/apps/verify_credentials`
- [x] `/api/v1/apps`
- [x] `/api/v1/blocks`
- [ ] `/api/v1/conversations/:id/read`
- [ ] `/api/v1/conversations/:id`
- [ ] `/api/v1/conversations`
- [x] `/api/v1/custom_emojis`
- [ ] `/api/v1/directory`
- [ ] `/api/v1/domain_blocks` (`GET`, `POST`, `DELETE`)
- [ ] `/api/v1/endorsements`
- [x] `/api/v1/favourites`
- [ ] `/api/v1/featured_tags/:id` (`DELETE`)
- [ ] `/api/v1/featured_tags/suggestions`
- [ ] `/api/v1/featured_tags` (`GET`, `POST`)
- [x] `/api/v1/follow_requests/:account_id/authorize`
- [x] `/api/v1/follow_requests/:account_id/reject`
- [x] `/api/v1/follow_requests`
- [ ] `/api/v1/follow_suggestions`
- [ ] `/api/v1/followed_tags`
- [ ] `/api/v1/instance/activity`
- [ ] `/api/v1/instance/domain_blocks`
- [x] `/api/v1/instance/extended_description`
- [ ] `/api/v1/instance/peers`
- [x] `/api/v1/instance/rules`
- [x] `/api/v1/instance`
- [ ] `/api/v1/lists/:id/accounts` (`GET`, `POST`, `DELETE`)
- [ ] `/api/v1/lists/:id` (`GET`, `PUT`, `DELETE`)
- [ ] `/api/v1/lists` (`GET`, `POST`)
- [x] `/api/v1/markers` (`GET`, `POST`)
- [x] `/api/v1/media/:id`
- [x] `/api/v1/mutes`
- [x] `/api/v1/notifications/:id/dismiss`
- [x] `/api/v1/notifications/:id`
- [x] `/api/v1/notifications/clear`
- [x] `/api/v1/notifications`
- [ ] `/api/v1/polls/:id/votes`
- [ ] `/api/v1/polls/:id`
- [ ] `/api/v1/preferences`
- [x] `/api/v1/profile/avatar` (`DELETE`)
- [x] `/api/v1/profile/header` (`DELETE`)
- [ ] `/api/v1/reports`
- [ ] `/api/v1/scheduled_statuses/:id` (`GET`, `PUT`, `DELETE`)
- [ ] `/api/v1/scheduled_statuses`
- [ ] `/api/v1/statuses/:id/bookmark`
- [x] `/api/v1/statuses/:id/context`
- [x] `/api/v1/statuses/:id/favourite`
- [x] `/api/v1/statuses/:id/favourited_by`
- [ ] `/api/v1/statuses/:id/history`
- [x] `/api/v1/statuses/:id/mute`
- [x] `/api/v1/statuses/:id/pin`
- [x] `/api/v1/statuses/:id/reblog`
- [x] `/api/v1/statuses/:id/reblogged_by`
- [x] `/api/v1/statuses/:id/source`
- [ ] `/api/v1/statuses/:id/translate`
- [ ] `/api/v1/statuses/:id/unbookmark`
- [x] `/api/v1/statuses/:id/unfavourite`
- [x] `/api/v1/statuses/:id/unmute`
- [x] `/api/v1/statuses/:id/unpin`
- [x] `/api/v1/statuses/:id/unreblog`
- [x] `/api/v1/statuses/:id` (`GET`, `DELETE`)
- [x] `/api/v1/statuses/:id` (`PUT`)
- [x] `/api/v1/statuses`
- [ ] `/api/v1/suggestions/:account_id` (`DELETE`)
- [ ] `/api/v1/tags/:id/follow`
- [ ] `/api/v1/tags/:id/unfollow`
- [ ] `/api/v1/tags/:id`
- [x] `/api/v1/timelines/home`
- [ ] `/api/v1/timelines/list/:list_id`
- [x] `/api/v1/timelines/public`
- [ ] `/api/v1/timelines/tag/:hashtag`
- [ ] `/api/v1/trends/links`
- [ ] `/api/v1/trends/statuses`
- [ ] `/api/v1/trends/tags`
- [ ] `/api/v2/filters/:filter_id/keywords` (`GET`, `POST`)
- [ ] `/api/v2/filters/:filter_id/statuses` (`GET`, `POST`)
- [x] `/api/v2/filters/:id` (`GET`, `PUT`, `DELETE`)
- [ ] `/api/v2/filters/keywords/:id` (`GET`, `PUT`, `DELETE`)
- [ ] `/api/v2/filters/statuses/:id` (`GET`, `DELETE`)
- [x] `/api/v2/filters` (`GET`, `POST`)
- [x] `/api/v2/instance`
- [x] `/api/v2/media`
- [x] `/api/v2/search`
- [ ] `/api/v2/suggestions`
- [x] `/oauth/authorize`
- [x] `/oauth/token`
- [x] `/oauth/revoke`
- Admin API
- `/api/v1/accounts`
- `/api/v1/accounts/:id`
- `/api/v1/accounts/:id/statuses`
- `/api/v1/accounts/:id/follow`
- `/api/v1/accounts/:id/unfollow`
- `/api/v1/accounts/:id/block`
- `/api/v1/accounts/:id/unblock`
- `/api/v1/accounts/:id/mute`
- `/api/v1/accounts/:id/unmute`
- `/api/v1/accounts/:id/pin`
- `/api/v1/accounts/:id/unpin`
- `/api/v1/accounts/:id/note`
- `/api/v1/accounts/:id/remove_from_followers`
- `/api/v1/accounts/relationships`
- `/api/v1/accounts/update_credentials`
- `/api/v1/accounts/verify_credentials`
- `/api/v1/accounts/familiar_followers`
- `/api/v1/profile/avatar` (`DELETE`)
- `/api/v1/profile/header` (`DELETE`)
- `/api/v1/statuses/:id` (`GET`, `DELETE`)
- `/api/v1/statuses/:id/context`
- `/api/v1/statuses/:id/favourite`
- `/api/v1/statuses/:id/unfavourite`
- `/api/v1/statuses/:id/favourited_by`
- `/api/v1/statuses/:id/reblogged_by`
- `/api/v1/statuses/:id/reblog`
- `/api/v1/statuses/:id/unreblog`
- `/api/v1/statuses/:id/pin`
- `/api/v1/statuses/:id/unpin`
- `/api/v1/statuses`
- `/api/v1/timelines/public`
- `/api/v1/timelines/home`
- `/api/v1/apps`
- `/api/v1/instance`
- `/api/v1/custom_emojis`
- `/api/v1/apps/verify_credentials`
- `/oauth/authorize`
- `/oauth/token`
- `/api/v1/blocks`
- `/api/v1/mutes`
- `/api/v2/media`
### Main work to do for API
Tests needed but completed:
- [ ] Announcements
- [ ] Polls
- [ ] Tags
- [ ] Lists
- [ ] Scheduled statuses
- [ ] WebSockets
- [ ] Push notifications
- [ ] Trends
- [ ] Suggestions
- [ ] Bookmarks
- [ ] Translation
- [ ] Reports
- [ ] Admin API
- `/api/v1/media/:id`
- `/api/v1/favourites`
- `/api/v1/accounts/:id/followers`
- `/api/v1/accounts/:id/following`
- `/api/v2/search`
## Versia Server API
Endpoints left:
For Versia Server's own custom API, please see the [API documentation](https://server.versia.pub/api/emojis).
- `/api/v1/reports`
- `/api/v1/accounts/:id/lists`
- `/api/v1/follow_requests`
- `/api/v1/follow_requests/:account_id/authorize`
- `/api/v1/follow_requests/:account_id/reject`
- `/api/v1/follow_suggestions`
- `/api/v1/domain_blocks` (`GET`, `POST`, `DELETE`)
- `/api/v2/filters` (`GET`, `POST`)
- `/api/v2/filters/:id` (`GET`, `PUT`, `DELETE`)
- `/api/v2/filters/:filter_id/keywords` (`GET`, `POST`)
- `/api/v2/filters/keywords/:id` (`GET`, `PUT`, `DELETE`)
- `/api/v2/filters/:filter_id/statuses` (`GET`, `POST`)
- `/api/v2/filters/statuses/:id` (`GET`, `DELETE`)
- `/api/v1/endorsements`
- `/api/v1/featured_tags` (`GET`, `POST`)
- `/api/v1/featured_tags/:id` (`DELETE`)
- `/api/v1/featured_tags/suggestions`
- `/api/v1/preferences`
- `/api/v1/followed_tags`
- `/api/v2/suggestions`
- `/api/v1/suggestions/:account_id` (`DELETE`)
- `/api/v1/tags/:id`
- `/api/v1/tags/:id/follow`
- `/api/v1/tags/:id/unfollow`
- `/api/v1/statuses/:id/translate`
- `/api/v1/statuses/:id/bookmark`
- `/api/v1/statuses/:id/unbookmark`
- `/api/v1/statuses/:id/mute`
- `/api/v1/statuses/:id/unmute`
- `/api/v1/statuses/:id` (`PUT`)
- `/api/v1/statuses/:id/history`
- `/api/v1/statuses/:id/source`
- `/api/v1/polls/:id`
- `/api/v1/polls/:id/votes`
- `/api/v1/scheduled_statuses`
- `/api/v1/scheduled_statuses/:id` (`GET`, `PUT`, `DELETE`)
- `/api/v1/timelines/tag/:hashtag`
- `/api/v1/timelines/list/:list_id`
- `/api/v1/conversations`
- `/api/v1/conversations/:id`
- `/api/v1/conversations/:id/read`
- `/api/v1/lists` (`GET`, `POST`)
- `/api/v1/lists/:id` (`GET`, `PUT`, `DELETE`)
- `/api/v1/markers` (`GET`, `POST`)
- `/api/v1/lists/:id/accounts` (`GET`, `POST`, `DELETE`)
- `/api/v1/notifications`
- `/api/v1/notifications/:id`
- `/api/v1/notifications/clear`
- `/api/v1/notifications/:id/dismiss`
- `/api/v2/instance`
- `/api/v1/instance/peers`
- `/api/v1/instance/activity`
- `/api/v1/instance/rules`
- `/api/v1/instance/domain_blocks`
- `/api/v1/instance/extended_description`
- `/api/v1/directory`
- `/api/v1/trends/tags`
- `/api/v1/trends/statuses`
- `/api/v1/trends/links`
- `/api/v1/announcements`
- `/api/v1/announcements/:id/dismiss`
- `/api/v1/announcements/:id/reactions/:name` (`PUT`, `DELETE`)
- Admin API
WebSocket Streaming API also needed to be added (and push notifications)
## License
This project is licensed under the [AGPL-3.0-or-later](LICENSE).
All Versia assets (icon, logo, banners, etc) are licensed under [CC-BY-NC-SA-4.0](https://creativecommons.org/licenses/by-nc-sa/4.0)
## Thanks!
Thanks to [**Fastly**](https://fastly.com) for providing us with support and resources to build Versia!
<br />
<p align="center">
<a href="https://fastly.com">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="assets/fastly-red.svg">
<source media="(prefers-color-scheme: light)" srcset="assets/fastly-red.svg">
<img src="assets/fastly-red.svg" alt="Fastly Logo" height="110" />
</picture>
</a>
</p>
This project is licensed under the [AGPL-3.0](LICENSE).

22
SECURITY.md Normal file
View file

@ -0,0 +1,22 @@
# Security Policy
## Supported Versions
For now, only the released latest version of Lysand is supported for security updates. This will change as Lysand exits alpha status.
## Reporting a Vulnerability
If you find a vulnerability, please report it to [@CPlusPatch](https://github.com/CPlusPatch) at the following contact endpoints:
- [Matrix](https://matrix.to/#/@jesse:cpluspatch.dev)
- [E-mail](mailto:contact@cpluspatch.com)
Please do not report vulnerabilities publicly until they have been patched. If you would like to be credited for your discovery, please include your name and/or GitHub username in your report.
## Vulnerability Disclosure Policy
Lysand is an open-source project, and as such, we welcome security researchers to audit our code and report vulnerabilities. We will do our best to patch vulnerabilities as quickly as possible, and will credit researchers for their discoveries if they wish to be credited.
For security reasons, we ask that you do not publicly disclose vulnerabilities until they have been patched. We will do our best to patch vulnerabilities as quickly as possible, and will credit researchers for their discoveries if they wish to be credited.
Thank you for helping to keep Lysand secure! :3

View file

@ -1,51 +0,0 @@
# Node is required for building the project
FROM imbios/bun-node:latest-23-alpine AS base
# Install dependencies into temp directory
# This will cache them and speed up future builds
FROM base AS install
RUN mkdir -p /temp
COPY . /temp
WORKDIR /temp
RUN bun install --production
FROM base AS build
# Copy the project
RUN mkdir -p /temp
COPY . /temp
# Copy dependencies
COPY --from=install /temp/node_modules /temp/node_modules
# Build the project
WORKDIR /temp
RUN bun run build worker
WORKDIR /temp/dist
# Copy production dependencies and source code into final image
FROM oven/bun:1.3.2-alpine
# Install libstdc++ for Bun and create app directory
RUN mkdir -p /app
COPY --from=build /temp/dist /app/dist
COPY entrypoint.sh /app
LABEL org.opencontainers.image.authors="Gaspard Wierzbinski (https://cpluspatch.com)"
LABEL org.opencontainers.image.source="https://github.com/versia-pub/server"
LABEL org.opencontainers.image.vendor="Versia Pub"
LABEL org.opencontainers.image.licenses="AGPL-3.0-or-later"
LABEL org.opencontainers.image.title="Versia Server Worker"
LABEL org.opencontainers.image.description="Versia Server Worker Docker image"
# Set current Git commit hash as an environment variable
ARG GIT_COMMIT
ENV GIT_COMMIT=$GIT_COMMIT
# CD to app
WORKDIR /app
ENV NODE_ENV=production
ENTRYPOINT [ "/bin/sh", "/app/entrypoint.sh" ]
# Run migrations and start the server
CMD [ "bun", "run", "worker.js" ]

19
api.ts
View file

@ -1,19 +0,0 @@
import process from "node:process";
import { appFactory } from "@versia-server/api";
import { config } from "@versia-server/config";
import { Youch } from "youch";
import { createServer } from "@/server.ts";
process.on("SIGINT", () => {
process.exit();
});
process.on("uncaughtException", async (error) => {
const youch = new Youch();
console.error(await youch.toANSI(error));
});
await import("@versia-server/api/setup");
createServer(config, await appFactory());

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 198.27"><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><g id="Fastly_Logo_-_Red" data-name="Fastly Logo - Red"><g id="Fastly_Logo_-_Red-2" data-name="Fastly Logo - Red"><polygon points="348.44 20.35 348.44 153.94 388.57 153.94 388.57 133.53 375.31 133.53 375.31 0 348.44 0 348.44 20.35"/><path d="M0,133.53H13.64V69.08H0V51.35l13.64-2.24V31.17C13.64,9.43,18.37,0,46.09,0A115.17,115.17,0,0,1,65.38,2L61.7,23.85a49.78,49.78,0,0,0-9-.78c-9.76,0-12.23,1-12.23,10.51V49.11H60.79v20H40.51v64.45H54v20.4H0Z"/><path d="M334.78,127.08a53.11,53.11,0,0,1-10.54.84c-11.06.27-10.1-3.36-10.1-13.78V69.08h21v-20h-21V0H287.27V119.71c0,23.5,5.8,34.23,31.08,34.23,6,0,14.21-1.54,20.42-2.87Z"/><path d="M501.7,133.63a10.14,10.14,0,1,1-10.19,10.14,10.14,10.14,0,0,1,10.19-10.14m0,18.68a8.55,8.55,0,0,0,8.51-8.54,8.5,8.5,0,1,0-8.51,8.54m1.88-3.56-2.05-3h-1.42v3h-2.29v-10H502c2.46,0,4,1.24,4,3.45a3,3,0,0,1-2.08,3.09l2.49,3.42Zm-3.47-5h1.82c1,0,1.74-.4,1.74-1.5s-.7-1.45-1.68-1.45h-1.88Z"/><path d="M253.72,69V65.46A115.8,115.8,0,0,0,233.14,64c-12.5,0-14,6.63-14,10.23,0,5.08,1.74,7.83,15.29,10.79,19.8,4.45,39.69,9.09,39.69,33.64,0,23.29-12,35.32-37.21,35.32-16.88,0-33.26-3.63-45.76-6.8V127.08h20.35v3.56c8.75,1.69,17.93,1.52,22.73,1.52,13.34,0,15.49-7.17,15.49-11,0-5.29-3.82-7.83-16.32-10.37-23.56-4-42.25-12.07-42.25-36,0-22.65,15.14-31.54,40.37-31.54,17.09,0,30.08,2.65,42.59,5.83V69Z"/><path d="M127.84,85.09,118,93.69a5.25,5.25,0,1,0,3.19,3.2l8.72-9.75Z"/><path d="M171.25,127.07V43.46H144.37V51a55,55,0,0,0-18.11-6.77v-9.1h3.28V28.28H102.48v6.83h3.28v9.17a55.32,55.32,0,1,0,38.76,101.87l4.77,7.78h28.38V127.07Zm-26.64-26.83A28.42,28.42,0,0,1,117.73,127v-3.18h-3.22V127a28.43,28.43,0,0,1-26.68-26.89H91V96.91H87.85a28.42,28.42,0,0,1,26.66-26.65v3.16h3.22V70.25A28.42,28.42,0,0,1,144.61,97h-3.2v3.22Z"/><path d="M456.58,49.11H512v20H498.75l-34,83.62c-9.74,23.48-25.74,45.59-50.1,45.59a93.67,93.67,0,0,1-19.5-2l2.43-24.39a68.7,68.7,0,0,0,10.63,1.1c11.3,0,24-7,28-19.19L401.82,69.06H388.57v-20H444v20H430.78l19.51,48h0l19.51-48H456.58Z"/></g></g></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 198.27"><defs><style>.cls-1{fill:#ff282d;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><g id="Fastly_Logo_-_Red" data-name="Fastly Logo - Red"><g id="Fastly_Logo_-_Red-2" data-name="Fastly Logo - Red"><polygon class="cls-1" points="348.44 20.35 348.44 153.94 388.57 153.94 388.57 133.53 375.31 133.53 375.31 0 348.44 0 348.44 20.35"/><path class="cls-1" d="M0,133.53H13.64V69.08H0V51.35l13.64-2.24V31.17C13.64,9.43,18.37,0,46.09,0A115.17,115.17,0,0,1,65.38,2L61.7,23.85a49.78,49.78,0,0,0-9-.78c-9.76,0-12.23,1-12.23,10.51V49.11H60.79v20H40.51v64.45H54v20.4H0Z"/><path class="cls-1" d="M334.78,127.08a53.11,53.11,0,0,1-10.54.84c-11.06.27-10.1-3.36-10.1-13.78V69.08h21v-20h-21V0H287.27V119.71c0,23.5,5.8,34.23,31.08,34.23,6,0,14.21-1.54,20.42-2.87Z"/><path class="cls-1" d="M501.7,133.63a10.14,10.14,0,1,1-10.19,10.14,10.14,10.14,0,0,1,10.19-10.14m0,18.68a8.55,8.55,0,0,0,8.51-8.54,8.5,8.5,0,1,0-8.51,8.54m1.88-3.56-2.05-3h-1.42v3h-2.29v-10H502c2.46,0,4,1.24,4,3.45a3,3,0,0,1-2.08,3.09l2.49,3.42Zm-3.47-5h1.82c1,0,1.74-.4,1.74-1.5s-.7-1.45-1.68-1.45h-1.88Z"/><path class="cls-1" d="M253.72,69V65.46A115.8,115.8,0,0,0,233.14,64c-12.5,0-14,6.63-14,10.23,0,5.08,1.74,7.83,15.29,10.79,19.8,4.45,39.69,9.09,39.69,33.64,0,23.29-12,35.32-37.21,35.32-16.88,0-33.26-3.63-45.76-6.8V127.08h20.35v3.56c8.75,1.69,17.93,1.52,22.73,1.52,13.34,0,15.49-7.17,15.49-11,0-5.29-3.82-7.83-16.32-10.37-23.56-4-42.25-12.07-42.25-36,0-22.65,15.14-31.54,40.37-31.54,17.09,0,30.08,2.65,42.59,5.83V69Z"/><path class="cls-1" d="M127.84,85.09,118,93.69a5.25,5.25,0,1,0,3.19,3.2l8.72-9.75Z"/><path class="cls-1" d="M171.25,127.07V43.46H144.37V51a55,55,0,0,0-18.11-6.77v-9.1h3.28V28.28H102.48v6.83h3.28v9.17a55.32,55.32,0,1,0,38.76,101.87l4.77,7.78h28.38V127.07Zm-26.64-26.83A28.42,28.42,0,0,1,117.73,127v-3.18h-3.22V127a28.43,28.43,0,0,1-26.68-26.89H91V96.91H87.85a28.42,28.42,0,0,1,26.66-26.65v3.16h3.22V70.25A28.42,28.42,0,0,1,144.61,97h-3.2v3.22Z"/><path class="cls-1" d="M456.58,49.11H512v20H498.75l-34,83.62c-9.74,23.48-25.74,45.59-50.1,45.59a93.67,93.67,0,0,1-19.5-2l2.43-24.39a68.7,68.7,0,0,0,10.63,1.1c11.3,0,24-7,28-19.19L401.82,69.06H388.57v-20H444v20H430.78l19.51,48h0l19.51-48H456.58Z"/></g></g></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.2 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 198.27"><defs><style>.cls-1{fill:#fff;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><g id="Fastly_Logo_-_Red" data-name="Fastly Logo - Red"><g id="Fastly_Logo_-_Red-2" data-name="Fastly Logo - Red"><polygon class="cls-1" points="348.44 20.35 348.44 153.94 388.57 153.94 388.57 133.53 375.31 133.53 375.31 0 348.44 0 348.44 20.35"/><path class="cls-1" d="M0,133.53H13.64V69.08H0V51.35l13.64-2.24V31.17C13.64,9.43,18.37,0,46.09,0A115.17,115.17,0,0,1,65.38,2L61.7,23.85a49.78,49.78,0,0,0-9-.78c-9.76,0-12.23,1-12.23,10.51V49.11H60.79v20H40.51v64.45H54v20.4H0Z"/><path class="cls-1" d="M334.78,127.08a53.11,53.11,0,0,1-10.54.84c-11.06.27-10.1-3.36-10.1-13.78V69.08h21v-20h-21V0H287.27V119.71c0,23.5,5.8,34.23,31.08,34.23,6,0,14.21-1.54,20.42-2.87Z"/><path class="cls-1" d="M501.7,133.63a10.14,10.14,0,1,1-10.19,10.14,10.14,10.14,0,0,1,10.19-10.14m0,18.68a8.55,8.55,0,0,0,8.51-8.54,8.5,8.5,0,1,0-8.51,8.54m1.88-3.56-2.05-3h-1.42v3h-2.29v-10H502c2.46,0,4,1.24,4,3.45a3,3,0,0,1-2.08,3.09l2.49,3.42Zm-3.47-5h1.82c1,0,1.74-.4,1.74-1.5s-.7-1.45-1.68-1.45h-1.88Z"/><path class="cls-1" d="M253.72,69V65.46A115.8,115.8,0,0,0,233.14,64c-12.5,0-14,6.63-14,10.23,0,5.08,1.74,7.83,15.29,10.79,19.8,4.45,39.69,9.09,39.69,33.64,0,23.29-12,35.32-37.21,35.32-16.88,0-33.26-3.63-45.76-6.8V127.08h20.35v3.56c8.75,1.69,17.93,1.52,22.73,1.52,13.34,0,15.49-7.17,15.49-11,0-5.29-3.82-7.83-16.32-10.37-23.56-4-42.25-12.07-42.25-36,0-22.65,15.14-31.54,40.37-31.54,17.09,0,30.08,2.65,42.59,5.83V69Z"/><path class="cls-1" d="M127.84,85.09,118,93.69a5.25,5.25,0,1,0,3.19,3.2l8.72-9.75Z"/><path class="cls-1" d="M171.25,127.07V43.46H144.37V51a55,55,0,0,0-18.11-6.77v-9.1h3.28V28.28H102.48v6.83h3.28v9.17a55.32,55.32,0,1,0,38.76,101.87l4.77,7.78h28.38V127.07Zm-26.64-26.83A28.42,28.42,0,0,1,117.73,127v-3.18h-3.22V127a28.43,28.43,0,0,1-26.68-26.89H91V96.91H87.85a28.42,28.42,0,0,1,26.66-26.65v3.16h3.22V70.25A28.42,28.42,0,0,1,144.61,97h-3.2v3.22Z"/><path class="cls-1" d="M456.58,49.11H512v20H498.75l-34,83.62c-9.74,23.48-25.74,45.59-50.1,45.59a93.67,93.67,0,0,1-19.5-2l2.43-24.39a68.7,68.7,0,0,0,10.63,1.1c11.3,0,24-7,28-19.19L401.82,69.06H388.57v-20H444v20H430.78l19.51,48h0l19.51-48H456.58Z"/></g></g></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.2 KiB

File diff suppressed because it is too large Load diff

18
benchmarks/fetch.ts Normal file
View file

@ -0,0 +1,18 @@
const timeBefore = performance.now();
const requests: Promise<Response>[] = [];
// Repeat 1000 times
for (let i = 0; i < 1000; i++) {
requests.push(
fetch(`https://mastodon.social`, {
method: "GET",
})
);
}
await Promise.all(requests);
const timeAfter = performance.now();
console.log(`Time taken: ${timeAfter - timeBefore}ms`);

1
benchmarks/posting.ts Normal file
View file

@ -0,0 +1 @@
//

View file

@ -1,46 +0,0 @@
import type { Status } from "@versia/client/schemas";
import {
fakeRequest,
getTestStatuses,
getTestUsers,
} from "@versia-server/tests";
import { bench, run } from "mitata";
import type { z } from "zod";
const { users, tokens, deleteUsers } = await getTestUsers(5);
await getTestStatuses(40, users[0]);
const testTimeline = async (): Promise<void> => {
const response = await fakeRequest("/api/v1/timelines/home", {
headers: {
Authorization: `Bearer ${tokens[0].data.accessToken}`,
},
});
const objects = (await response.json()) as z.infer<typeof Status>[];
if (objects.length !== 20) {
throw new Error("Invalid response (not 20 objects)");
}
};
const testInstance = async (): Promise<void> => {
const response = await fakeRequest("/api/v2/instance", {
headers: {
Authorization: `Bearer ${tokens[0].data.accessToken}`,
},
});
const object = (await response.json()) as Record<string, unknown>;
if (typeof object !== "object") {
throw new Error("Invalid response (not an object)");
}
};
bench("timeline", testTimeline).range("amount", 1, 1000);
bench("instance", testInstance).range("amount", 1, 1000);
await run();
await deleteUsers();

56
benchmarks/timelines.ts Normal file
View file

@ -0,0 +1,56 @@
/**
* Usage: TOKEN=your_token_here bun benchmark:timeline <request_count>
*/
import { getConfig } from "@config";
import chalk from "chalk";
const config = getConfig();
const token = process.env.TOKEN;
const requestCount = Number(process.argv[2]) || 100;
if (!token) {
console.log(
`${chalk.red(
"✗"
)} No token provided. Provide one via the TOKEN environment variable.`
);
process.exit(1);
}
const fetchTimeline = () =>
fetch(`${config.http.base_url}/api/v1/timelines/home`, {
headers: {
Authorization: `Bearer ${token}`,
},
}).then(res => res.ok);
const timeNow = performance.now();
const requests = Array.from({ length: requestCount }, () => fetchTimeline());
Promise.all(requests)
.then(results => {
const timeTaken = performance.now() - timeNow;
if (results.every(t => t)) {
console.log(`${chalk.green("✓")} All requests succeeded`);
} else {
console.log(
`${chalk.red("✗")} ${
results.filter(t => !t).length
} requests failed`
);
}
console.log(
`${chalk.green("✓")} ${
requests.length
} requests fulfilled in ${chalk.bold(
(timeTaken / 1000).toFixed(5)
)}s`
);
})
.catch(err => {
console.log(`${chalk.red("✗")} ${err}`);
process.exit(1);
});

View file

@ -1,178 +0,0 @@
{
"$schema": "https://biomejs.dev/schemas/2.3.4/schema.json",
"assist": {
"actions": {
"source": {
"organizeImports": "on"
}
}
},
"vcs": {
"clientKind": "git",
"enabled": true,
"useIgnoreFile": true
},
"linter": {
"enabled": true,
"rules": {
"style": {
"useNamingConvention": {
"level": "warn",
"options": {
"requireAscii": false,
"strictCase": false,
"conventions": [
{
"selector": {
"kind": "typeProperty"
},
"formats": [
"camelCase",
"CONSTANT_CASE",
"PascalCase",
"snake_case"
]
},
{
"selector": {
"kind": "objectLiteralProperty",
"scope": "any"
},
"formats": [
"camelCase",
"CONSTANT_CASE",
"PascalCase",
"snake_case"
]
}
]
}
},
"useLiteralEnumMembers": "error",
"noNegationElse": "error",
"noYodaExpression": "error",
"useBlockStatements": "error",
"useCollapsedElseIf": "error",
"useConsistentArrayType": {
"level": "error",
"options": {
"syntax": "shorthand"
}
},
"useConsistentBuiltinInstantiation": "error",
"useExplicitLengthCheck": "error",
"useForOf": "error",
"useNodeAssertStrict": "error",
"useShorthandAssign": "error",
"useThrowNewError": "error",
"useThrowOnlyError": "error",
"useNodejsImportProtocol": "error",
"useAsConstAssertion": "error",
"useEnumInitializers": "error",
"useSelfClosingElements": "error",
"useConst": "error",
"useSingleVarDeclarator": "error",
"noUnusedTemplateLiteral": "error",
"useNumberNamespace": "error",
"useAtIndex": "warn",
"noInferrableTypes": "error",
"useCollapsedIf": "warn",
"useExponentiationOperator": "error",
"useTemplate": "error",
"noParameterAssign": "error",
"noNonNullAssertion": "error",
"useDefaultParameterLast": "error",
"useConsistentMemberAccessibility": {
"level": "warn",
"options": {
"accessibility": "explicit"
}
},
"useImportType": "error",
"useExportType": "error",
"noUselessElse": "error",
"noProcessEnv": "error",
"useShorthandFunctionType": "error",
"useArrayLiterals": "error",
"noCommonJs": "warn",
"noExportedImports": "warn",
"noSubstr": "warn",
"useTrimStartEnd": "warn",
"noRestrictedImports": {
"options": {
"paths": {
"~/packages/": "Use the appropriate package instead of importing from the packages directory directly."
}
},
"level": "error"
}
},
"performance": {
"noDynamicNamespaceImportAccess": "warn"
},
"correctness": {
"useImportExtensions": "error",
"noConstantMathMinMaxClamp": "error",
"noUndeclaredDependencies": "error",
"noUnusedFunctionParameters": "error",
"noUnusedImports": "error",
"noUnusedPrivateClassMembers": "error"
},
"nursery": {
"noFloatingPromises": "error"
},
"complexity": {
"noForEach": "error",
"noImportantStyles": "off",
"noUselessStringConcat": "error",
"useDateNow": "error",
"noUselessStringRaw": "warn",
"noUselessEscapeInRegex": "warn",
"useSimplifiedLogicExpression": "error",
"useWhile": "error",
"useNumericLiterals": "error",
"noArguments": "error",
"noCommaOperator": "error"
},
"suspicious": {
"noDuplicateTestHooks": "error",
"noOctalEscape": "error",
"noTemplateCurlyInString": "warn",
"noEmptyBlockStatements": "error",
"useAdjacentOverloadSignatures": "warn",
"useGuardForIn": "warn",
"noDuplicateElseIf": "warn",
"noEvolvingTypes": "error",
"noIrregularWhitespace": "warn",
"noExportsInTest": "error",
"noVar": "error",
"useAwait": "error",
"useErrorMessage": "error",
"useNumberToFixedDigitsArgument": "error"
}
}
},
"overrides": [
{
"includes": ["**/packages/client/versia/client.ts"],
"linter": {
"rules": {
"style": {
"useNamingConvention": "off"
}
}
}
}
],
"formatter": {
"enabled": true,
"indentStyle": "space",
"indentWidth": 4
},
"javascript": {
"globals": ["HTMLRewriter", "BufferEncoding"]
},
"files": {
"includes": ["**"]
}
}

View file

@ -1,55 +0,0 @@
import process from "node:process";
import { $, build, file, write } from "bun";
import manifest from "./package.json" with { type: "json" };
console.log("Building...");
await $`rm -rf dist && mkdir dist`;
const type = process.argv[2] as "api" | "worker";
if (type !== "api" && type !== "worker") {
throw new Error("Invalid build type. Use 'api' or 'worker'.");
}
const packages = Object.keys(manifest.dependencies)
.filter((dep) => dep.startsWith("@versia"))
.filter((dep) => dep !== "@versia-server/tests");
await build({
entrypoints: [`./${type}.ts`],
outdir: "dist",
target: "bun",
splitting: true,
minify: true,
external: [...packages],
});
console.log("Copying files...");
// Copy each package into dist/node_modules
for (const pkg of packages) {
const directory = pkg.split("/")[1] || pkg;
await $`mkdir -p dist/node_modules/${pkg}`;
// Copy the built package files
await $`cp -rL packages/${directory}/{dist,package.json} dist/node_modules/${pkg}`;
// Rewrite package.json "exports" field to point to the dist directory and use .js extension
const packageJsonPath = `dist/node_modules/${pkg}/package.json`;
const packageJson = await file(packageJsonPath).json();
for (const [key, value] of Object.entries(packageJson.exports) as [
string,
{ import?: string },
][]) {
if (value.import) {
packageJson.exports[key] = {
import: value.import
.replace("./", "./dist/")
.replace(/\.ts$/, ".js"),
};
}
}
await write(packageJsonPath, JSON.stringify(packageJson, null, 4));
}
console.log("Build complete!");

1739
bun.lock

File diff suppressed because it is too large Load diff

BIN
bun.lockb Executable file

Binary file not shown.

View file

@ -1,8 +0,0 @@
[install.scopes]
"@jsr" = "https://npm.jsr.io"
[test]
preload = ["./packages/tests/setup.ts"]
[install]
linker = "hoisted"

64
classes/activitypub.ts Normal file
View file

@ -0,0 +1,64 @@
import type { APActivity, APActor } from "activitypub-types";
export class RemoteActor {
private actorData: APActor | null;
private actorUri: string;
constructor(actor: APActor | string) {
if (typeof actor === "string") {
this.actorUri = actor;
this.actorData = null;
} else {
this.actorUri = actor.id || "";
this.actorData = actor;
}
}
public async fetch() {
const response = await fetch(this.actorUri);
const actorJson = (await response.json()) as APActor;
this.actorData = actorJson;
}
public getData() {
return this.actorData;
}
}
export class RemoteActivity {
private data: APActivity | null;
private uri: string;
constructor(uri: string, data: APActivity | null) {
this.uri = uri;
this.data = data;
}
public async fetch() {
const response = await fetch(this.uri);
const json = (await response.json()) as APActivity;
this.data = json;
}
public getData() {
return this.data;
}
public async getActor() {
if (!this.data) {
throw new Error("No data");
}
if (Array.isArray(this.data.actor)) {
throw new Error("Multiple actors");
}
if (typeof this.data.actor === "string") {
const actor = new RemoteActor(this.data.actor);
await actor.fetch();
return actor.getData();
}
return new RemoteActor(this.data.actor as any);
}
}

273
classes/media.ts Normal file
View file

@ -0,0 +1,273 @@
import type { GetObjectCommandOutput } from "@aws-sdk/client-s3";
import {
GetObjectCommand,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import type { ConfigType } from "@config";
import sharp from "sharp";
import { exists, mkdir } from "fs/promises";
class MediaBackend {
backend: string;
constructor(backend: string) {
this.backend = backend;
}
/**
* Adds media to the media backend
* @param media
* @returns The hash of the file in SHA-256 (hex format) with the file extension added to it
*/
async addMedia(media: File) {
const hash = new Bun.SHA256()
.update(await media.arrayBuffer())
.digest("hex");
return `${hash}.${media.name.split(".").pop()}`;
}
async convertMedia(media: File, config: ConfigType) {
const sharpCommand = sharp(await media.arrayBuffer());
// Rename ".jpg" files to ".jpeg" to avoid sharp errors
let name = media.name;
if (media.name.endsWith(".jpg")) {
name = media.name.replace(".jpg", ".jpeg");
}
const fileFormatToConvertTo = config.media.conversion.convert_to;
switch (fileFormatToConvertTo) {
case "png":
return new File(
[(await sharpCommand.png().toBuffer()).buffer],
// Replace the file extension with PNG
name.replace(/\.[^/.]+$/, ".png"),
{
type: "image/png",
}
);
case "webp":
return new File(
[(await sharpCommand.webp().toBuffer()).buffer],
// Replace the file extension with WebP
name.replace(/\.[^/.]+$/, ".webp"),
{
type: "image/webp",
}
);
case "jpeg":
return new File(
[(await sharpCommand.jpeg().toBuffer()).buffer],
// Replace the file extension with JPEG
name.replace(/\.[^/.]+$/, ".jpeg"),
{
type: "image/jpeg",
}
);
case "avif":
return new File(
[(await sharpCommand.avif().toBuffer()).buffer],
// Replace the file extension with AVIF
name.replace(/\.[^/.]+$/, ".avif"),
{
type: "image/avif",
}
);
// Needs special build of libvips
case "jxl":
return new File(
[(await sharpCommand.jxl().toBuffer()).buffer],
// Replace the file extension with JXL
name.replace(/\.[^/.]+$/, ".jxl"),
{
type: "image/jxl",
}
);
case "heif":
return new File(
[(await sharpCommand.heif().toBuffer()).buffer],
// Replace the file extension with HEIF
name.replace(/\.[^/.]+$/, ".heif"),
{
type: "image/heif",
}
);
default:
return media;
}
}
/**
* Retrieves element from media backend by hash
* @param hash The hash of the element in SHA-256 hex format
* @param extension The extension of the file
* @returns The file as a File object
*/
// eslint-disable-next-line @typescript-eslint/require-await, @typescript-eslint/no-unused-vars
async getMediaByHash(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
hash: string
): Promise<File | null> {
return new File([], "test");
}
}
/**
* S3 Backend, stores files in S3
*/
export class S3Backend extends MediaBackend {
client: S3Client;
config: ConfigType;
constructor(config: ConfigType) {
super("s3");
this.config = config;
this.client = new S3Client({
endpoint: this.config.s3.endpoint,
region: this.config.s3.region || "auto",
credentials: {
accessKeyId: this.config.s3.access_key,
secretAccessKey: this.config.s3.secret_access_key,
},
});
}
async addMedia(media: File): Promise<string> {
if (this.config.media.conversion.convert_images) {
media = await this.convertMedia(media, this.config);
}
const hash = await super.addMedia(media);
if (!hash) {
throw new Error("Failed to hash file");
}
// Check if file is already present
const existingFile = await this.getMediaByHash(hash);
if (existingFile) {
// File already exists, so return the hash without uploading it
return hash;
}
const command = new PutObjectCommand({
Bucket: this.config.s3.bucket_name,
Key: hash,
Body: Buffer.from(await media.arrayBuffer()),
ContentType: media.type,
ContentLength: media.size,
Metadata: {
"x-amz-meta-original-name": media.name,
},
});
const response = await this.client.send(command);
if (response.$metadata.httpStatusCode !== 200) {
throw new Error("Failed to upload file");
}
return hash;
}
async getMediaByHash(hash: string): Promise<File | null> {
const command = new GetObjectCommand({
Bucket: this.config.s3.bucket_name,
Key: hash,
});
let response: GetObjectCommandOutput;
try {
response = await this.client.send(command);
} catch {
return null;
}
if (response.$metadata.httpStatusCode !== 200) {
throw new Error("Failed to get file");
}
const body = await response.Body?.transformToByteArray();
if (!body) {
throw new Error("Failed to get file");
}
return new File([body], hash, {
type: response.ContentType,
});
}
}
/**
* Local backend, stores files on filesystem
*/
export class LocalBackend extends MediaBackend {
config: ConfigType;
constructor(config: ConfigType) {
super("local");
this.config = config;
}
async addMedia(media: File): Promise<string> {
if (this.config.media.conversion.convert_images) {
media = await this.convertMedia(media, this.config);
}
const hash = await super.addMedia(media);
if (!(await exists(`${process.cwd()}/uploads`))) {
await mkdir(`${process.cwd()}/uploads`);
}
await Bun.write(Bun.file(`${process.cwd()}/uploads/${hash}`), media);
return hash;
}
async getMediaByHash(hash: string): Promise<File | null> {
const file = Bun.file(`${process.cwd()}/uploads/${hash}`);
if (!(await file.exists())) {
return null;
}
return new File([await file.arrayBuffer()], `${hash}`, {
type: file.type,
});
}
}
export const uploadFile = (file: File, config: ConfigType) => {
const backend = config.media.backend;
if (backend === "local") {
return new LocalBackend(config).addMedia(file);
} else if (backend === "s3") {
return new S3Backend(config).addMedia(file);
}
};
export const getFile = (
hash: string,
extension: string,
config: ConfigType
) => {
const backend = config.media.backend;
if (backend === "local") {
return new LocalBackend(config).getMediaByHash(hash);
} else if (backend === "s3") {
return new S3Backend(config).getMediaByHash(hash);
}
return null;
};

View file

@ -1,17 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager/Utils
*/
import { SHA256 } from "bun";
/**
* Generates a SHA-256 hash for a given file.
* @param file - The file to hash.
* @returns A promise that resolves to the SHA-256 hash of the file in hex format.
*/
export const getMediaHash = async (file: File): Promise<string> => {
const arrayBuffer = await file.arrayBuffer();
const hash = new SHA256().update(arrayBuffer).digest("hex");
return hash;
};

View file

@ -1,63 +0,0 @@
import { describe, expect, it } from "bun:test";
import { mockModule } from "@versia-server/tests";
import sharp from "sharp";
import { calculateBlurhash } from "./blurhash.ts";
describe("BlurhashPreprocessor", () => {
it("should calculate blurhash for a valid image", async () => {
const inputBuffer = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r: 255, g: 0, b: 0 },
},
})
.png()
.toBuffer();
const inputFile = new File([inputBuffer as BlobPart], "test.png", {
type: "image/png",
});
const result = await calculateBlurhash(inputFile);
expect(result).toBeTypeOf("string");
expect(result).not.toBe("");
});
it("should return null blurhash for an invalid image", async () => {
const invalidFile = new File(["invalid image data"], "invalid.png", {
type: "image/png",
});
const result = await calculateBlurhash(invalidFile);
expect(result).toBeNull();
});
it("should handle errors during blurhash calculation", async () => {
const inputBuffer = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r: 255, g: 0, b: 0 },
},
})
.png()
.toBuffer();
const inputFile = new File([inputBuffer as BlobPart], "test.png", {
type: "image/png",
});
using __ = await mockModule("blurhash", () => ({
encode: (): void => {
throw new Error("Test error");
},
}));
const result = await calculateBlurhash(inputFile);
expect(result).toBeNull();
});
});

View file

@ -1,37 +0,0 @@
import { encode } from "blurhash";
import sharp from "sharp";
export const calculateBlurhash = async (file: File): Promise<string | null> => {
try {
const arrayBuffer = await file.arrayBuffer();
const metadata = await sharp(arrayBuffer).metadata();
return new Promise<string | null>((resolve) => {
sharp(arrayBuffer)
.raw()
.ensureAlpha()
.toBuffer((err, buffer) => {
if (err) {
resolve(null);
return;
}
try {
resolve(
encode(
new Uint8ClampedArray(buffer),
metadata?.width ?? 0,
metadata?.height ?? 0,
4,
4,
) as string,
);
} catch {
resolve(null);
}
});
});
} catch {
return null;
}
};

View file

@ -1,134 +0,0 @@
import { describe, expect, it } from "bun:test";
import sharp from "sharp";
import { convertImage } from "./image-conversion.ts";
describe("ImageConversionPreprocessor", () => {
it("should convert a JPEG image to WebP", async () => {
const inputBuffer = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r: 255, g: 0, b: 0 },
},
})
.jpeg()
.toBuffer();
const inputFile = new File([inputBuffer as BlobPart], "test.jpg", {
type: "image/jpeg",
});
const result = await convertImage(inputFile, "image/webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("test.webp");
const resultBuffer = await result.arrayBuffer();
const metadata = await sharp(resultBuffer).metadata();
expect(metadata.format).toBe("webp");
});
it("should not convert SVG when convert_vector is false", async () => {
const svgContent =
'<svg xmlns="http://www.w3.org/2000/svg"><rect width="100" height="100" fill="red"/></svg>';
const inputFile = new File([svgContent], "test.svg", {
type: "image/svg+xml",
});
const result = await convertImage(inputFile, "image/webp");
expect(result).toBe(inputFile);
});
it("should convert SVG when convert_vector is true", async () => {
const svgContent =
'<svg xmlns="http://www.w3.org/2000/svg"><rect width="100" height="100" fill="red"/></svg>';
const inputFile = new File([svgContent], "test.svg", {
type: "image/svg+xml",
});
const result = await convertImage(inputFile, "image/webp", {
convertVectors: true,
});
expect(result.type).toBe("image/webp");
expect(result.name).toBe("test.webp");
});
it("should not convert unsupported file types", async () => {
const inputFile = new File(["test content"], "test.txt", {
type: "text/plain",
});
const result = await convertImage(inputFile, "image/webp");
expect(result).toBe(inputFile);
});
it("should throw an error for unsupported output format", async () => {
const inputBuffer = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r: 255, g: 0, b: 0 },
},
})
.png()
.toBuffer();
const inputFile = new File([inputBuffer as BlobPart], "test.png", {
type: "image/png",
});
await expect(convertImage(inputFile, "image/bmp")).rejects.toThrow(
"Unsupported output format: image/bmp",
);
});
it("should convert animated GIF to WebP while preserving animation", async () => {
// Create a simple animated GIF
const inputBuffer = await sharp({
create: {
width: 100,
height: 100,
channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 1 },
},
})
.gif()
.toBuffer();
const inputFile = new File([inputBuffer as BlobPart], "animated.gif", {
type: "image/gif",
});
const result = await convertImage(inputFile, "image/webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("animated.webp");
const resultBuffer = await result.arrayBuffer();
const metadata = await sharp(resultBuffer).metadata();
expect(metadata.format).toBe("webp");
});
it("should handle files with spaces in the name", async () => {
const inputBuffer = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r: 255, g: 0, b: 0 },
},
})
.png()
.toBuffer();
const inputFile = new File(
[inputBuffer as BlobPart],
"test image with spaces.png",
{ type: "image/png" },
);
const result = await convertImage(inputFile, "image/webp");
expect(result.type).toBe("image/webp");
expect(result.name).toBe("test image with spaces.webp");
});
});

View file

@ -1,109 +0,0 @@
/**
* @packageDocumentation
* @module MediaManager/Preprocessors
*/
import sharp from "sharp";
/**
* Supported input media formats.
*/
const supportedInputFormats = [
"image/png",
"image/jpeg",
"image/webp",
"image/avif",
"image/svg+xml",
"image/gif",
"image/tiff",
];
/**
* Supported output media formats.
*/
const supportedOutputFormats = [
"image/jpeg",
"image/png",
"image/webp",
"image/avif",
"image/gif",
"image/tiff",
];
/**
* Checks if a file is convertible.
* @param file - The file to check.
* @returns True if the file is convertible, false otherwise.
*/
const isConvertible = (
file: File,
options?: { convertVectors?: boolean },
): boolean => {
if (file.type === "image/svg+xml" && !options?.convertVectors) {
return false;
}
return supportedInputFormats.includes(file.type);
};
/**
* Extracts the filename from a path.
* @param path - The path to extract the filename from.
* @returns The extracted filename.
*/
const extractFilenameFromPath = (path: string): string => {
const pathParts = path.split(/(?<!\\)\//);
return pathParts.at(-1) as string;
};
/**
* Replaces the file extension in the filename.
* @param fileName - The original filename.
* @param newExtension - The new extension.
* @returns The filename with the new extension.
*/
const getReplacedFileName = (fileName: string, newExtension: string): string =>
extractFilenameFromPath(fileName).replace(/\.[^/.]+$/, `.${newExtension}`);
/**
* Converts an image file to the format specified in the configuration.
*
* @param file - The image file to convert.
* @param targetFormat - The target format to convert to.
* @returns The converted image file.
*/
export const convertImage = async (
file: File,
targetFormat: string,
options?: {
convertVectors?: boolean;
},
): Promise<File> => {
if (!isConvertible(file, options)) {
return file;
}
if (!supportedOutputFormats.includes(targetFormat)) {
throw new Error(`Unsupported output format: ${targetFormat}`);
}
const sharpCommand = sharp(await file.arrayBuffer(), {
animated: true,
});
const commandName = targetFormat.split("/")[1] as
| "jpeg"
| "png"
| "webp"
| "avif"
| "gif"
| "tiff";
const convertedBuffer = await sharpCommand[commandName]().toBuffer();
return new File(
[convertedBuffer as BlobPart],
getReplacedFileName(file.name, commandName),
{
type: targetFormat,
lastModified: Date.now(),
},
);
};

995
cli.ts Normal file
View file

@ -0,0 +1,995 @@
import type { Prisma } from "@prisma/client";
import chalk from "chalk";
import { client } from "~database/datasource";
import { createNewLocalUser } from "~database/entities/User";
import Table from "cli-table";
import { rebuildSearchIndexes, MeiliIndexType } from "@meilisearch";
import { getConfig } from "@config";
import { uploadFile } from "~classes/media";
import { getUrl } from "~database/entities/Attachment";
import { mkdir, exists } from "fs/promises";
import extract from "extract-zip";
const args = process.argv;
/**
* Make the text have a width of 20 characters, padding with gray dots
* Text can be a Chalk string, in which case formatting codes should not be counted in text length
* @param text The text to align
*/
const alignDots = (text: string, length = 20) => {
// Remove formatting codes
// eslint-disable-next-line no-control-regex
const textLength = text.replace(/\u001b\[\d+m/g, "").length;
const dots = ".".repeat(length - textLength);
return `${text}${chalk.gray(dots)}`;
};
const alignDotsSmall = (text: string, length = 16) => alignDots(text, length);
const help = `
${chalk.bold(`Usage: bun cli <command> ${chalk.blue("[...flags]")} [...args]`)}
${chalk.bold("Commands:")}
${alignDots(chalk.blue("help"), 24)} Show this help message
${alignDots(chalk.blue("user"), 24)} Manage users
${alignDots(chalk.blue("create"))} Create a new user
${alignDotsSmall(chalk.green("username"))} Username of the user
${alignDotsSmall(chalk.green("password"))} Password of the user
${alignDotsSmall(chalk.green("email"))} Email of the user
${alignDotsSmall(
chalk.yellow("--admin")
)} Make the user an admin (optional)
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli user create admin password123 admin@gmail.com --admin`
)}
${alignDots(chalk.blue("delete"))} Delete a user
${alignDotsSmall(chalk.green("username"))} Username of the user
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli user delete admin`
)}
${alignDots(chalk.blue("list"))} List all users
${alignDotsSmall(
chalk.yellow("--admins")
)} List only admins (optional)
${chalk.bold("Example:")} ${chalk.bgGray(`bun cli user list`)}
${alignDots(chalk.blue("search"))} Search for a user
${alignDotsSmall(chalk.green("query"))} Query to search for
${alignDotsSmall(
chalk.yellow("--displayname")
)} Search by display name (optional)
${alignDotsSmall(chalk.yellow("--bio"))} Search in bio (optional)
${alignDotsSmall(
chalk.yellow("--local")
)} Search in local users (optional)
${alignDotsSmall(
chalk.yellow("--remote")
)} Search in remote users (optional)
${alignDotsSmall(
chalk.yellow("--email")
)} Search in emails (optional)
${alignDotsSmall(chalk.yellow("--json"))} Output as JSON (optional)
${alignDotsSmall(chalk.yellow("--csv"))} Output as CSV (optional)
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli user search admin`
)}
${alignDots(chalk.blue("note"), 24)} Manage notes
${alignDots(chalk.blue("delete"))} Delete a note
${alignDotsSmall(chalk.green("id"))} ID of the note
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli note delete 018c1838-6e0b-73c4-a157-a91ea4e25d1d`
)}
${alignDots(chalk.blue("search"))} Search for a status
${alignDotsSmall(chalk.green("query"))} Query to search for
${alignDotsSmall(
chalk.yellow("--local")
)} Search in local statuses (optional)
${alignDotsSmall(
chalk.yellow("--remote")
)} Search in remote statuses (optional)
${alignDotsSmall(chalk.yellow("--json"))} Output as JSON (optional)
${alignDotsSmall(chalk.yellow("--csv"))} Output as CSV (optional)
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli note search hello`
)}
${alignDots(chalk.blue("index"), 24)} Manage user and status indexes
${alignDots(chalk.blue("rebuild"))} Rebuild the index
${alignDotsSmall(
chalk.green("batch-size")
)} The number of items to index at once (optional, default 100)
${alignDotsSmall(
chalk.yellow("--statuses")
)} Only rebuild the statuses index (optional)
${alignDotsSmall(
chalk.yellow("--users")
)} Only rebuild the users index (optional)
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli index rebuild --users 200`
)}
${alignDots(chalk.blue("emoji"), 24)} Manage custom emojis
${alignDots(chalk.blue("add"))} Add a custom emoji
${alignDotsSmall(chalk.green("name"))} Name of the emoji
${alignDotsSmall(chalk.green("url"))} URL of the emoji
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli emoji add bun https://bun.com/bun.png`
)}
${alignDots(chalk.blue("delete"))} Delete a custom emoji
${alignDotsSmall(chalk.green("name"))} Name of the emoji
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli emoji delete bun`
)}
${alignDots(chalk.blue("list"))} List all custom emojis
${chalk.bold("Example:")} ${chalk.bgGray(`bun cli emoji list`)}
${alignDots(chalk.blue("search"))} Search for a custom emoji
${alignDotsSmall(chalk.green("query"))} Query to search for
${alignDotsSmall(
chalk.yellow("--local")
)} Search in local emojis (optional, default)
${alignDotsSmall(
chalk.yellow("--remote")
)} Search in remote emojis (optional)
${alignDotsSmall(chalk.yellow("--json"))} Output as JSON (optional)
${alignDotsSmall(chalk.yellow("--csv"))} Output as CSV (optional)
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli emoji search bun`
)}
${alignDots(chalk.blue("import"))} Import a Pleroma emoji pack
${alignDotsSmall(chalk.green("url"))} URL of the emoji pack
${chalk.bold("Example:")} ${chalk.bgGray(
`bun cli emoji import https://site.com/neofox/manifest.json`
)}
`;
if (args.length < 3) {
console.log(help);
process.exit(0);
}
const command = args[2];
const config = getConfig();
switch (command) {
case "help":
console.log(help);
break;
case "user":
switch (args[3]) {
case "create": {
// Check if --admin flag is provided
const argsWithFlags = args.filter(arg => arg.startsWith("--"));
const argsWithoutFlags = args.filter(
arg => !arg.startsWith("--")
);
const username = argsWithoutFlags[4];
const password = argsWithoutFlags[5];
const email = argsWithoutFlags[6];
const admin = argsWithFlags.includes("--admin");
// Check if username, password and email are provided
if (!username || !password || !email) {
console.log(
`${chalk.red(``)} Missing username, password or email`
);
process.exit(1);
}
// Check if user already exists
const user = await client.user.findFirst({
where: {
OR: [{ username }, { email }],
},
});
if (user) {
console.log(`${chalk.red(``)} User already exists`);
process.exit(1);
}
// Create user
const newUser = await createNewLocalUser({
email: email,
password: password,
username: username,
admin: admin,
});
console.log(
`${chalk.green(``)} Created user ${chalk.blue(
newUser.username
)}${admin ? chalk.green(" (admin)") : ""}`
);
break;
}
case "delete": {
const username = args[4];
if (!username) {
console.log(`${chalk.red(``)} Missing username`);
process.exit(1);
}
const user = await client.user.findFirst({
where: {
username: username,
},
});
if (!user) {
console.log(`${chalk.red(``)} User not found`);
process.exit(1);
}
await client.user.delete({
where: {
id: user.id,
},
});
console.log(
`${chalk.green(``)} Deleted user ${chalk.blue(
user.username
)}`
);
break;
}
case "list": {
const admins = args.includes("--admins");
const users = await client.user.findMany({
where: {
isAdmin: admins || undefined,
},
take: 200,
});
console.log(
`${chalk.green(``)} Found ${chalk.blue(
users.length
)} users`
);
for (const user of users) {
console.log(
`\t${chalk.blue(user.username)} ${chalk.gray(
user.email
)} ${chalk.green(user.isAdmin ? "Admin" : "User")}`
);
}
break;
}
case "search": {
const argsWithoutFlags = args.filter(
arg => !arg.startsWith("--")
);
const query = argsWithoutFlags[4];
if (!query) {
console.log(`${chalk.red(``)} Missing query`);
process.exit(1);
}
const displayname = args.includes("--displayname");
const bio = args.includes("--bio");
const local = args.includes("--local");
const remote = args.includes("--remote");
const email = args.includes("--email");
const json = args.includes("--json");
const csv = args.includes("--csv");
const queries: Prisma.UserWhereInput[] = [];
if (displayname) {
queries.push({
displayName: {
contains: query,
mode: "insensitive",
},
});
}
if (bio) {
queries.push({
note: {
contains: query,
mode: "insensitive",
},
});
}
if (local) {
queries.push({
instanceId: null,
});
}
if (remote) {
queries.push({
instanceId: {
not: null,
},
});
}
if (email) {
queries.push({
email: {
contains: query,
mode: "insensitive",
},
});
}
const users = await client.user.findMany({
where: {
AND: queries,
},
include: {
instance: true,
},
take: 40,
});
if (json || csv) {
if (json) {
console.log(JSON.stringify(users, null, 4));
}
if (csv) {
// Convert the outputted JSON to CSV
// Remove all object children from each object
const items = users.map(user => {
const item = {
...user,
instance: undefined,
endpoints: undefined,
source: undefined,
};
return item;
});
const replacer = (key: string, value: any): any =>
value === null ? "" : value; // Null values are returned as empty strings
const header = Object.keys(items[0]);
const csv = [
header.join(","), // header row first
...items.map(row =>
header
.map(fieldName =>
// @ts-expect-error This is fine
JSON.stringify(row[fieldName], replacer)
)
.join(",")
),
].join("\r\n");
console.log(csv);
}
} else {
console.log(
`${chalk.green(``)} Found ${chalk.blue(
users.length
)} users`
);
const table = new Table({
head: [
chalk.white(chalk.bold("Username")),
chalk.white(chalk.bold("Email")),
chalk.white(chalk.bold("Display Name")),
chalk.white(chalk.bold("Admin?")),
chalk.white(chalk.bold("Instance URL")),
],
});
for (const user of users) {
table.push([
chalk.yellow(`@${user.username}`),
chalk.green(user.email),
chalk.blue(user.displayName),
chalk.red(user.isAdmin ? "Yes" : "No"),
chalk.blue(
user.instanceId
? user.instance?.base_url
: "Local"
),
]);
}
console.log(table.toString());
}
break;
}
default:
console.log(`Unknown command ${chalk.blue(command)}`);
break;
}
break;
case "note": {
switch (args[3]) {
case "delete": {
const id = args[4];
if (!id) {
console.log(`${chalk.red(``)} Missing ID`);
process.exit(1);
}
const note = await client.status.findFirst({
where: {
id: id,
},
});
if (!note) {
console.log(`${chalk.red(``)} Note not found`);
process.exit(1);
}
await client.status.delete({
where: {
id: note.id,
},
});
console.log(
`${chalk.green(``)} Deleted note ${chalk.blue(note.id)}`
);
break;
}
case "search": {
const argsWithoutFlags = args.filter(
arg => !arg.startsWith("--")
);
const query = argsWithoutFlags[4];
if (!query) {
console.log(`${chalk.red(``)} Missing query`);
process.exit(1);
}
const local = args.includes("--local");
const remote = args.includes("--remote");
const json = args.includes("--json");
const csv = args.includes("--csv");
const queries: Prisma.StatusWhereInput[] = [];
if (local) {
queries.push({
instanceId: null,
});
}
if (remote) {
queries.push({
instanceId: {
not: null,
},
});
}
const statuses = await client.status.findMany({
where: {
AND: queries,
content: {
contains: query,
mode: "insensitive",
},
},
take: 40,
include: {
author: true,
instance: true,
},
});
if (json || csv) {
if (json) {
console.log(JSON.stringify(statuses, null, 4));
}
if (csv) {
// Convert the outputted JSON to CSV
// Remove all object children from each object
const items = statuses.map(status => {
const item = {
...status,
author: undefined,
instance: undefined,
};
return item;
});
const replacer = (key: string, value: any): any =>
value === null ? "" : value; // Null values are returned as empty strings
const header = Object.keys(items[0]);
const csv = [
header.join(","), // header row first
...items.map(row =>
header
.map(fieldName =>
// @ts-expect-error This is fine
JSON.stringify(row[fieldName], replacer)
)
.join(",")
),
].join("\r\n");
console.log(csv);
}
} else {
console.log(
`${chalk.green(``)} Found ${chalk.blue(
statuses.length
)} statuses`
);
const table = new Table({
head: [
chalk.white(chalk.bold("Username")),
chalk.white(chalk.bold("Instance URL")),
chalk.white(chalk.bold("Content")),
],
});
for (const status of statuses) {
table.push([
chalk.yellow(`@${status.author.username}`),
chalk.blue(
status.instanceId
? status.instance?.base_url
: "Local"
),
chalk.green(status.content.slice(0, 50)),
]);
}
console.log(table.toString());
}
break;
}
default:
console.log(`Unknown command ${chalk.blue(command)}`);
break;
}
break;
}
case "index": {
if (!config.meilisearch.enabled) {
console.log(
`${chalk.red(``)} Meilisearch is not enabled in the config`
);
process.exit(1);
}
switch (args[3]) {
case "rebuild": {
const statuses = args.includes("--statuses");
const users = args.includes("--users");
const argsWithoutFlags = args.filter(
arg => !arg.startsWith("--")
);
const batchSize = Number(argsWithoutFlags[4]) || 100;
const neither = !statuses && !users;
if (statuses || neither) {
console.log(
`${chalk.yellow(``)} ${chalk.bold(
`Rebuilding Meilisearch index for statuses`
)}`
);
const timeBefore = performance.now();
await rebuildSearchIndexes(
[MeiliIndexType.Statuses],
batchSize
);
console.log(
`${chalk.green(``)} ${chalk.bold(
`Meilisearch index for statuses rebuilt in ${chalk.bgGreen(
(performance.now() - timeBefore).toFixed(2)
)}ms`
)}`
);
}
if (users || neither) {
console.log(
`${chalk.yellow(``)} ${chalk.bold(
`Rebuilding Meilisearch index for users`
)}`
);
const timeBefore = performance.now();
await rebuildSearchIndexes(
[MeiliIndexType.Accounts],
batchSize
);
console.log(
`${chalk.green(``)} ${chalk.bold(
`Meilisearch index for users rebuilt in ${chalk.bgGreen(
(performance.now() - timeBefore).toFixed(2)
)}ms`
)}`
);
}
break;
}
default:
console.log(`Unknown command ${chalk.blue(command)}`);
break;
}
break;
}
case "emoji": {
switch (args[3]) {
case "add": {
const name = args[4];
const url = args[5];
if (!name || !url) {
console.log(`${chalk.red(``)} Missing name or URL`);
process.exit(1);
}
const content_type = `image/${url
.split(".")
.pop()
?.replace("jpg", "jpeg")}}`;
const emoji = await client.emoji.create({
data: {
shortcode: name,
url: url,
visible_in_picker: true,
content_type: content_type,
},
});
console.log(
`${chalk.green(``)} Created emoji ${chalk.blue(
emoji.shortcode
)}`
);
break;
}
case "delete": {
const name = args[4];
if (!name) {
console.log(`${chalk.red(``)} Missing name`);
process.exit(1);
}
const emoji = await client.emoji.findFirst({
where: {
shortcode: name,
},
});
if (!emoji) {
console.log(`${chalk.red(``)} Emoji not found`);
process.exit(1);
}
await client.emoji.delete({
where: {
id: emoji.id,
},
});
console.log(
`${chalk.green(``)} Deleted emoji ${chalk.blue(
emoji.shortcode
)}`
);
break;
}
case "list": {
const emojis = await client.emoji.findMany();
console.log(
`${chalk.green(``)} Found ${chalk.blue(
emojis.length
)} emojis`
);
for (const emoji of emojis) {
console.log(
`\t${chalk.blue(emoji.shortcode)} ${chalk.gray(
emoji.url
)}`
);
}
break;
}
case "search": {
const argsWithoutFlags = args.filter(
arg => !arg.startsWith("--")
);
const query = argsWithoutFlags[4];
if (!query) {
console.log(`${chalk.red(``)} Missing query`);
process.exit(1);
}
const local = args.includes("--local");
const remote = args.includes("--remote");
const json = args.includes("--json");
const csv = args.includes("--csv");
const queries: Prisma.EmojiWhereInput[] = [];
if (local) {
queries.push({
instanceId: null,
});
}
if (remote) {
queries.push({
instanceId: {
not: null,
},
});
}
const emojis = await client.emoji.findMany({
where: {
AND: queries,
shortcode: {
contains: query,
mode: "insensitive",
},
},
take: 40,
include: {
instance: true,
},
});
if (json || csv) {
if (json) {
console.log(JSON.stringify(emojis, null, 4));
}
if (csv) {
// Convert the outputted JSON to CSV
// Remove all object children from each object
const items = emojis.map(emoji => {
const item = {
...emoji,
instance: undefined,
};
return item;
});
const replacer = (key: string, value: any): any =>
value === null ? "" : value; // Null values are returned as empty strings
const header = Object.keys(items[0]);
const csv = [
header.join(","), // header row first
...items.map(row =>
header
.map(fieldName =>
// @ts-expect-error This is fine
JSON.stringify(row[fieldName], replacer)
)
.join(",")
),
].join("\r\n");
console.log(csv);
}
} else {
console.log(
`${chalk.green(``)} Found ${chalk.blue(
emojis.length
)} emojis`
);
const table = new Table({
head: [
chalk.white(chalk.bold("Shortcode")),
chalk.white(chalk.bold("Instance URL")),
chalk.white(chalk.bold("URL")),
],
});
for (const emoji of emojis) {
table.push([
chalk.yellow(`:${emoji.shortcode}:`),
chalk.blue(
emoji.instanceId
? emoji.instance?.base_url
: "Local"
),
chalk.gray(emoji.url),
]);
}
console.log(table.toString());
}
break;
}
case "import": {
const url = args[4];
if (!url) {
console.log(`${chalk.red(``)} Missing URL`);
process.exit(1);
}
const response = await fetch(url);
if (!response.ok) {
console.log(`${chalk.red(``)} Failed to fetch emoji pack`);
process.exit(1);
}
const res = (await response.json()) as Record<
string,
{
description: string;
files: string;
homepage: string;
src: string;
src_sha256?: string;
license?: string;
}
>;
const pack = Object.values(res)[0];
// Fetch emoji list from `files`, can be a relative URL
if (!pack.files) {
console.log(`${chalk.red(``)} Missing files`);
process.exit(1);
}
let pack_url = pack.files;
if (!pack.files.includes("http")) {
// Is relative URL to pack manifest URL
pack_url =
url.split("/").slice(0, -1).join("/") +
"/" +
pack.files;
}
const zip = new File(
[await (await fetch(pack.src)).arrayBuffer()],
"emoji.zip",
{
type: "application/zip",
}
);
// Check if the SHA256 hash matches
const hasher = new Bun.SHA256();
hasher.update(await zip.arrayBuffer());
const hash = hasher.digest("hex");
if (pack.src_sha256 && pack.src_sha256 !== hash) {
console.log(`${chalk.red(``)} SHA256 hash does not match`);
console.log(
`${chalk.red(``)} Expected ${chalk.blue(
pack.src_sha256
)}, got ${chalk.blue(hash)}`
);
process.exit(1);
}
// Store file in /tmp
const tempDirectory = `/tmp/lysand-${hash}`;
if (!(await exists(tempDirectory))) {
await mkdir(tempDirectory);
}
await Bun.write(`${tempDirectory}/emojis.zip`, zip);
// Extract zip
await extract(`${tempDirectory}/emojis.zip`, {
dir: tempDirectory,
});
// In the format
// emoji_name: emoji_url
const pack_response = (await (
await fetch(pack_url)
).json()) as Record<string, string>;
let emojisCreated = 0;
for (const [name, path] of Object.entries(pack_response)) {
// Check if emoji already exists
const existingEmoji = await client.emoji.findFirst({
where: {
shortcode: name,
instanceId: null,
},
});
if (existingEmoji) {
console.log(
`${chalk.red(``)} Emoji ${chalk.blue(
name
)} already exists`
);
continue;
}
// Get emoji URL, as it can be relative
const emoji = Bun.file(`${tempDirectory}/${path}`);
const content_type = emoji.type;
const hash = await uploadFile(emoji as File, config);
if (!hash) {
console.log(
`${chalk.red(``)} Failed to upload emoji ${name}`
);
process.exit(1);
}
const finalUrl = getUrl(hash, config);
// Create emoji
await client.emoji.create({
data: {
shortcode: name,
url: finalUrl,
visible_in_picker: true,
content_type: content_type,
},
});
emojisCreated++;
console.log(
`${chalk.green(``)} Created emoji ${chalk.blue(name)}`
);
}
console.log(
`${chalk.green(``)} Imported ${chalk.blue(
emojisCreated
)} emojis`
);
break;
}
default:
console.log(`Unknown command ${chalk.blue(command)}`);
break;
}
break;
}
default:
console.log(`Unknown command ${chalk.blue(command)}`);
break;
}
process.exit(0);

View file

@ -1,36 +0,0 @@
import { completionsPlugin } from "@clerc/plugin-completions";
import { friendlyErrorPlugin } from "@clerc/plugin-friendly-error";
import { helpPlugin } from "@clerc/plugin-help";
import { notFoundPlugin } from "@clerc/plugin-not-found";
import { versionPlugin } from "@clerc/plugin-version";
import { setupDatabase } from "@versia-server/kit/db";
import { searchManager } from "@versia-server/kit/search";
import { Clerc } from "clerc";
import pkg from "../package.json" with { type: "json" };
import { rebuildIndexCommand } from "./index/rebuild.ts";
import { refetchInstanceCommand } from "./instance/refetch.ts";
import { createUserCommand } from "./user/create.ts";
import { deleteUserCommand } from "./user/delete.ts";
import { refetchUserCommand } from "./user/refetch.ts";
import { generateTokenCommand } from "./user/token.ts";
await setupDatabase(false);
await searchManager.connect(true);
Clerc.create()
.scriptName("cli")
.name("Versia Server CLI")
.description("CLI interface for Versia Server")
.version(pkg.version)
.use(helpPlugin())
.use(versionPlugin())
.use(completionsPlugin())
.use(notFoundPlugin())
.use(friendlyErrorPlugin())
.command(createUserCommand)
.command(deleteUserCommand)
.command(generateTokenCommand)
.command(refetchUserCommand)
.command(rebuildIndexCommand)
.command(refetchInstanceCommand)
.parse();

View file

@ -1,62 +0,0 @@
import { config } from "@versia-server/config";
import { SonicIndexType, searchManager } from "@versia-server/kit/search";
// @ts-expect-error - Root import is required or the Clec type definitions won't work
// biome-ignore lint/correctness/noUnusedImports: Root import is required or the Clec type definitions won't work
import { defineCommand, type Root } from "clerc";
import ora from "ora";
export const rebuildIndexCommand = defineCommand(
{
name: "index rebuild",
description: "Rebuild the search index.",
parameters: ["<type>"],
flags: {
"batch-size": {
description: "Number of records to process at once",
type: Number,
alias: "b",
default: 100,
},
},
},
async (context) => {
const { "batch-size": batchSize } = context.flags;
const { type } = context.parameters;
if (!config.search.enabled) {
throw new Error(
"Search is not enabled in the instance configuration.",
);
}
const spinner = ora("Rebuilding search indexes").start();
switch (type) {
case "accounts":
await searchManager.rebuildSearchIndexes(
[SonicIndexType.Accounts],
batchSize,
(progress) => {
spinner.text = `Rebuilding search indexes (${(progress * 100).toFixed(2)}%)`;
},
);
break;
case "statuses":
await searchManager.rebuildSearchIndexes(
[SonicIndexType.Statuses],
batchSize,
(progress) => {
spinner.text = `Rebuilding search indexes (${(progress * 100).toFixed(2)}%)`;
},
);
break;
default: {
throw new Error(
"Invalid index type. Can be 'accounts' or 'statuses'.",
);
}
}
spinner.succeed("Search indexes rebuilt");
},
);

View file

@ -1,37 +0,0 @@
import { Instance } from "@versia-server/kit/db";
import { FetchJobType, fetchQueue } from "@versia-server/kit/queues/fetch";
import { Instances } from "@versia-server/kit/tables";
import chalk from "chalk";
// @ts-expect-error - Root import is required or the Clec type definitions won't work
// biome-ignore lint/correctness/noUnusedImports: Root import is required or the Clec type definitions won't work
import { defineCommand, type Root } from "clerc";
import { eq } from "drizzle-orm";
export const refetchInstanceCommand = defineCommand(
{
name: "instance refetch",
description: "Refetches metadata from remote instances.",
parameters: ["<url_or_host>"],
},
async (context) => {
const { urlOrHost } = context.parameters;
const host = URL.canParse(urlOrHost)
? new URL(urlOrHost).host
: urlOrHost;
const instance = await Instance.fromSql(eq(Instances.baseUrl, host));
if (!instance) {
throw new Error(`Instance ${chalk.gray(host)} not found.`);
}
await fetchQueue.add(FetchJobType.Instance, {
uri: new URL(`https://${instance.data.baseUrl}`).origin,
});
console.info(
`Refresh job enqueued for ${chalk.gray(instance.data.baseUrl)}.`,
);
},
);

View file

@ -1,90 +0,0 @@
import { config } from "@versia-server/config";
import { User } from "@versia-server/kit/db";
import { searchManager } from "@versia-server/kit/search";
import { Users } from "@versia-server/kit/tables";
import chalk from "chalk";
// @ts-expect-error - Root import is required or the Clec type definitions won't work
// biome-ignore lint/correctness/noUnusedImports: Root import is required or the Clec type definitions won't work
import { defineCommand, type Root } from "clerc";
import { and, eq, isNull } from "drizzle-orm";
import { renderUnicodeCompact } from "uqr";
export const createUserCommand = defineCommand(
{
name: "user create",
description: "Create a new user.",
parameters: ["<username>"],
flags: {
password: {
description: "Password for the new user",
type: String,
alias: "p",
},
email: {
description: "Email for the new user",
type: String,
alias: "e",
},
admin: {
description: "Make the new user an admin",
type: Boolean,
alias: "a",
},
},
},
async (context) => {
const { admin, email, password } = context.flags;
const { username } = context.parameters;
if (!/^[a-z0-9_-]+$/.test(username)) {
throw new Error("Username must be alphanumeric and lowercase.");
}
// Check if user already exists
const existingUser = await User.fromSql(
and(eq(Users.username, username), isNull(Users.instanceId)),
);
if (existingUser) {
throw new Error(`User ${chalk.gray(username)} is taken.`);
}
const user = await User.register(username, {
email,
password,
isAdmin: admin,
});
// Add to search index
await searchManager.addUser(user);
if (!user) {
throw new Error("Failed to create user.");
}
console.info(`User ${chalk.gray(username)} created.`);
if (!password) {
const token = await user.resetPassword();
const link = new URL(
`${config.frontend.routes.password_reset}?${new URLSearchParams(
{
token,
},
)}`,
config.http.base_url,
);
console.info(`Password reset link for ${chalk.gray(username)}:`);
console.info(chalk.blue(link.href));
const qrcode = renderUnicodeCompact(link.href, {
border: 2,
});
// Pad all lines of QR code with spaces
console.info(`\n ${qrcode.replaceAll("\n", "\n ")}`);
}
},
);

View file

@ -1,60 +0,0 @@
import confirm from "@inquirer/confirm";
import chalk from "chalk";
// @ts-expect-error - Root import is required or the Clec type definitions won't work
// biome-ignore lint/correctness/noUnusedImports: Root import is required or the Clec type definitions won't work
import { defineCommand, type Root } from "clerc";
import { retrieveUser } from "../utils.ts";
export const deleteUserCommand = defineCommand(
{
name: "user delete",
alias: "user rm",
description:
"Delete a user from the database. Can use username or handle.",
parameters: ["<username_or_handle>"],
flags: {
confirm: {
description: "Ask for confirmation before deleting the user",
type: Boolean,
alias: "c",
default: true,
},
},
},
async (context) => {
const { confirm: confirmFlag } = context.flags;
const { usernameOrHandle } = context.parameters;
const user = await retrieveUser(usernameOrHandle);
if (!user) {
throw new Error(`User ${chalk.gray(usernameOrHandle)} not found.`);
}
console.info(`About to delete user ${chalk.gray(user.data.username)}!`);
console.info(`Username: ${chalk.blue(user.data.username)}`);
console.info(`Display Name: ${chalk.blue(user.data.displayName)}`);
console.info(`Created At: ${chalk.blue(user.data.createdAt)}`);
console.info(
`Instance: ${chalk.blue(user.data.instance?.baseUrl || "Local")}`,
);
if (confirmFlag) {
const choice = await confirm({
message: `Are you sure you want to delete this user? ${chalk.red(
"This is irreversible.",
)}`,
});
if (!choice) {
throw new Error("Operation aborted.");
}
}
await user.delete();
console.info(
`User ${chalk.gray(user.data.username)} has been deleted.`,
);
},
);

View file

@ -1,43 +0,0 @@
import { User } from "@versia-server/kit/db";
import chalk from "chalk";
// @ts-expect-error - Root import is required or the Clec type definitions won't work
// biome-ignore lint/correctness/noUnusedImports: Root import is required or the Clec type definitions won't work
import { defineCommand, type Root } from "clerc";
import ora from "ora";
import { retrieveUser } from "../utils.ts";
export const refetchUserCommand = defineCommand(
{
name: "user refetch",
description: "Refetches user data from their remote instance.",
parameters: ["<handle>"],
},
async (context) => {
const { handle } = context.parameters;
const user = await retrieveUser(handle);
if (!user) {
throw new Error(`User ${chalk.gray(handle)} not found.`);
}
if (user.local) {
throw new Error(
"This user is local and as such cannot be refetched.",
);
}
const spinner = ora("Refetching user").start();
try {
await User.fromVersia(user.uri);
} catch (error) {
spinner.fail(
`Failed to refetch user ${chalk.gray(user.data.username)}`,
);
throw error;
}
spinner.succeed(`User ${chalk.gray(user.data.username)} refetched.`);
},
);

View file

@ -1,50 +0,0 @@
import { Client, Token } from "@versia-server/kit/db";
import { randomUUIDv7 } from "bun";
import chalk from "chalk";
// @ts-expect-error - Root import is required or the Clec type definitions won't work
// biome-ignore lint/correctness/noUnusedImports: Root import is required or the Clec type definitions won't work
import { defineCommand, type Root } from "clerc";
import { randomString } from "@/math.ts";
import { retrieveUser } from "../utils.ts";
export const generateTokenCommand = defineCommand(
{
name: "user token",
description: "Generates a new access token for a user.",
parameters: ["<username>"],
},
async (context) => {
const { username } = context.parameters;
const user = await retrieveUser(username);
if (!user) {
throw new Error(`User ${chalk.gray(username)} not found.`);
}
const application = await Client.insert({
id:
user.id +
Buffer.from(
crypto.getRandomValues(new Uint8Array(32)),
).toString("base64"),
name: "Versia",
redirectUris: [],
scopes: ["openid", "profile", "email"],
secret: "",
});
const token = await Token.insert({
id: randomUUIDv7(),
accessToken: randomString(64, "base64url"),
scopes: ["read", "write", "follow"],
userId: user.id,
clientId: application.id,
});
console.info(
`Token generated for user ${chalk.gray(user.data.username)}.`,
);
console.info(`Access Token: ${chalk.blue(token.data.accessToken)}`);
},
);

View file

@ -1,23 +0,0 @@
import { Instance, User } from "@versia-server/kit/db";
import { parseUserAddress } from "@versia-server/kit/parsers";
import { Users } from "@versia-server/kit/tables";
import { and, eq, isNull } from "drizzle-orm";
export const retrieveUser = async (
usernameOrHandle: string,
): Promise<User | null> => {
const { username, domain } = parseUserAddress(usernameOrHandle);
const instance = domain ? await Instance.resolveFromHost(domain) : null;
const user = await User.fromSql(
and(
eq(Users.username, username),
instance
? eq(Users.instanceId, instance.data.id)
: isNull(Users.instanceId),
),
);
return user;
};

View file

@ -1 +0,0 @@
../config

View file

@ -1,480 +1,261 @@
# You can change the URL to the commit/tag you are using
#:schema https://raw.githubusercontent.com/versia-pub/server/main/config/config.schema.json
# All values marked as "sensitive" can be set to "PATH:/path/to/file" to read the value from a file (e.g. a secret manager)
[postgres]
# PostgreSQL database configuration
[database]
host = "localhost"
port = 5432
username = "versia"
# Sensitive value
password = "mycoolpassword"
database = "versia"
# Additional read-only replicas
# [[postgres.replicas]]
# host = "other-host"
# port = 5432
# username = "versia"
# password = "mycoolpassword2"
# database = "replica1"
username = "lysand"
password = "password123"
database = "lysand"
[redis.queue]
# A Redis database used for managing queues.
# Required for federation
host = "localhost"
port = 6379
# Sensitive value
# password = "test"
password = ""
database = 0
# A Redis database used for caching SQL queries.
# Optional, can be the same as the queue instance
# [redis.cache]
# host = "localhost"
# port = 6380
# database = 1
# password = ""
# Search and indexing configuration
[search]
# Enable indexing and searching?
[redis.cache]
host = "localhost"
port = 6379
password = ""
database = 1
enabled = false
# Optional if search is disabled
# [search.sonic]
# host = "localhost"
# port = 7700
# Sensitive value
# password = "test"
[registration]
# Can users sign up freely?
allow = true
# NOT IMPLEMENTED
require_approval = false
# Message to show to users when registration is disabled
# message = "ran out of spoons to moderate registrations, sorry"
[meilisearch]
host = "localhost"
port = 40007
api_key = ""
enabled = true
[http]
# URL that the instance will be accessible at
base_url = "https://example.com"
# Address to bind to (0.0.0.0 is suggested for proxies)
bind = "0.0.0.0"
bind_port = 8080
base_url = "https://lysand.social"
bind = "http://localhost"
bind_port = "8080"
# Bans IPv4 or IPv6 IPs (wildcards, networks and ranges are supported)
banned_ips = []
# Banned user agents, regex format
banned_user_agents = [
# "curl\/7.68.0",
# "wget\/1.20.3",
]
# URL to an eventual HTTP proxy
# Will be used for all outgoing requests
# proxy_address = "http://localhost:8118"
# TLS configuration. You should probably be using a reverse proxy instead of this
# [http.tls]
# key = "/path/to/key.pem"
# cert = "/path/to/cert.pem"
# Sensitive value
# passphrase = "awawa"
# ca = "/path/to/ca.pem"
[frontend]
# Enable custom frontends (warning: not enabling this will make Versia Server only accessible via the Mastodon API)
# Frontends also control the OpenID flow, so if you disable this, you will need to use the Mastodon frontend
enabled = true
# Path that frontend files are served from
# Edit this property to serve custom frontends
# If this is not set, Versia Server will also check
# the VERSIA_FRONTEND_PATH environment variable
# path = ""
[frontend.routes]
# Special routes for your frontend, below are the defaults for Versia-FE
# Can be set to a route already used by Versia Server, as long as it is on a different HTTP method
# e.g. /oauth/authorize is a POST-only route, so you can serve a GET route at /oauth/authorize
# home = "/"
# login = "/oauth/authorize"
# consent = "/oauth/consent"
# register = "/register"
# password_reset = "/oauth/reset"
[frontend.settings]
# Arbitrary key/value pairs to be passed to the frontend
# This can be used to set up custom themes, etc on supported frontends.
# theme = "dark"
# NOT IMPLEMENTED
[email]
# Enable email sending
send_emails = false
# If send_emails is true, the following settings are required
# [email.smtp]
[smtp]
# SMTP server to use for sending emails
# server = "smtp.example.com"
# port = 465
# username = "test@example.com"
# Sensitive value
# password = "password123"
# tls = true
server = "smtp.example.com"
port = 465
username = "test@example.com"
password = "password123"
tls = true
[media]
# Can be "s3" or "local", where "local" uploads the file to the local filesystem
# Changing this value will not retroactively apply to existing data
# Don't forget to fill in the s3 config :3
# If you need to change this value after setting up your instance, you must move all the files
# from one backend to the other manually
backend = "s3"
# If media backend is "local", this is the folder where the files will be stored
# Can be any path
uploads_path = "uploads"
# Whether to check the hash of media when uploading to avoid duplication
deduplicate_media = true
[media.conversion]
# Whether to automatically convert images to another format on upload
convert_images = true
# Can be: "image/jxl", "image/webp", "image/avif", "image/png", "image/jpeg", "image/heif", "image/gif"
convert_images = false
# Can be: "jxl", "webp", "avif", "png", "jpg", "heif"
# JXL support will likely not work
convert_to = "image/webp"
# Also convert SVG images?
convert_vectors = false
convert_to = "webp"
# [s3]
# Can be left commented if you don't use the S3 media backend
# endpoint = "https://s3.example.com"
# Sensitive value
# access_key = "XXXXX"
# Sensitive value
# secret_access_key = "XXX"
# region = "us-east-1"
# bucket_name = "versia"
# public_url = "https://cdn.example.com"
# Adds a prefix to the uploaded files
# path = "versia"
# Use path-style URLs during upload (e.g. https://s3.example.com/versia)
# instead of the default virtual-hosted style (e.g. https://versia.s3.example.com)
# This is required for some S3-compatible services, such as MinIO
# path_style = true
[s3]
# Can be left blank if you don't use the S3 media backend
endpoint = "https://s3-us-west-2.amazonaws.com"
access_key = ""
secret_access_key = ""
region = "us-west-2"
bucket_name = "lysand"
public_url = "https://cdn.example.com"
[email]
# Sends an email to moderators when a report is received
# NOT IMPLEMENTED
send_on_report = false
# Sends an email to moderators when a user is suspended
# NOT IMPLEMENTED
send_on_suspend = false
# Sends an email to moderators when a user is unsuspended
# NOT IMPLEMENTED
send_on_unsuspend = false
[validation]
# Checks user data
# Does not retroactively apply to previously entered data
[validation.accounts]
max_displayname_characters = 50
max_username_characters = 30
max_bio_characters = 5000
max_avatar_bytes = 5_000_000
max_header_bytes = 5_000_000
# Regex is allowed here
disallowed_usernames = [
"well-known",
"about",
"activities",
"api",
"auth",
"dev",
"inbox",
"internal",
"main",
"media",
"nodeinfo",
"notice",
"oauth",
"objects",
"proxy",
"push",
"registration",
"relay",
"settings",
"status",
"tag",
"users",
"web",
"search",
"mfa",
# Self explanatory
max_displayname_size = 50
max_bio_size = 160
max_note_size = 5000
max_avatar_size = 5_000_000
max_header_size = 5_000_000
max_media_size = 40_000_000
max_media_attachments = 10
max_media_description_size = 1000
max_poll_options = 20
max_poll_option_size = 500
min_poll_duration = 60
max_poll_duration = 1893456000
max_username_size = 30
# An array of strings, defaults are from Akkoma
username_blacklist = [
".well-known",
"~",
"about",
"activities",
"api",
"auth",
"dev",
"inbox",
"internal",
"main",
"media",
"nodeinfo",
"notice",
"oauth",
"objects",
"proxy",
"push",
"registration",
"relay",
"settings",
"status",
"tag",
"users",
"web",
"search",
"mfa",
]
max_field_count = 10
max_field_name_characters = 1000
max_field_value_characters = 1000
max_pinned_notes = 20
# Whether to blacklist known temporary email providers
blacklist_tempmail = false
# Additional email providers to blacklist
email_blacklist = []
# Valid URL schemes, otherwise the URL is parsed as text
url_scheme_whitelist = [
"http",
"https",
"ftp",
"dat",
"dweb",
"gopher",
"hyper",
"ipfs",
"ipns",
"irc",
"xmpp",
"ircs",
"magnet",
"mailto",
"mumble",
"ssb",
"gemini",
] # NOT IMPLEMENTED
[validation.notes]
max_characters = 5000
allowed_url_schemes = [
"http",
"https",
"ftp",
"dat",
"dweb",
"gopher",
"hyper",
"ipfs",
"ipns",
"irc",
"xmpp",
"ircs",
"magnet",
"mailto",
"mumble",
"ssb",
"gemini",
enforce_mime_types = false
allowed_mime_types = [
"image/jpeg",
"image/png",
"image/gif",
"image/heic",
"image/heif",
"image/webp",
"image/avif",
"video/webm",
"video/mp4",
"video/quicktime",
"video/ogg",
"audio/wave",
"audio/wav",
"audio/x-wav",
"audio/x-pn-wave",
"audio/vnd.wave",
"audio/ogg",
"audio/vorbis",
"audio/mpeg",
"audio/mp3",
"audio/webm",
"audio/flac",
"audio/aac",
"audio/m4a",
"audio/x-m4a",
"audio/mp4",
"audio/3gpp",
"video/x-ms-asf",
]
max_attachments = 16
[validation.media]
max_bytes = 40_000_000
max_description_characters = 1000
# An empty array allows all MIME types
allowed_mime_types = []
[validation.emojis]
max_bytes = 1_000_000
max_shortcode_characters = 100
max_description_characters = 1000
[validation.polls]
max_options = 20
max_option_characters = 500
min_duration_seconds = 60
# 100 days
max_duration_seconds = 8_640_000
[validation.emails]
# Blocks over 10,000 common tempmail domains
disallow_tempmail = false
# Regex is allowed here
disallowed_domains = []
# [validation.challenges]
# "Challenges" (aka captchas) are a way to verify that a user is human
# Versia Server's challenges use no external services, and are proof-of-work based
# This means that they do not require any user interaction, instead
# they require the user's computer to do a small amount of work
# The difficulty of the challenge, higher is will take more time to solve
# difficulty = 50000
# Challenge expiration time in seconds
# expiration = 300 # 5 minutes
# Leave this empty to generate a new key
# Sensitive value
# key = ""
# Block content that matches these regular expressions
[validation.filters]
note_content = [
# "(https?://)?(www\\.)?youtube\\.com/watch\\?v=[a-zA-Z0-9_-]+",
# "(https?://)?(www\\.)?youtu\\.be/[a-zA-Z0-9_-]+",
]
emoji_shortcode = []
username = []
displayname = []
bio = []
[notifications]
# Web Push Notifications configuration.
# Leave out to disable.
# [notifications.push]
# Subject field embedded in the push notification
# subject = "mailto:joe@example.com"
#
# [notifications.push.vapid_keys]
# VAPID keys for push notifications
# Run Versia Server with those values missing to generate new keys
# Sensitive value
# public = ""
# Sensitive value
# private = ""
[defaults]
# Default visibility for new notes
# Can be public, unlisted, private or direct
# Private only sends to followers, unlisted doesn't show up in timelines
visibility = "public"
# Default language for new notes (ISO code)
# Default language for new notes
language = "en"
# Default avatar, must be a valid URL or left out for a placeholder avatar
# avatar = ""
# Default header, must be a valid URL or left out for none
# header = ""
# A style name from https://www.dicebear.com/styles
placeholder_style = "thumbs"
# Default avatar, must be a valid URL or ""
avatar = ""
# Default header, must be a valid URL or ""
header = ""
[queues]
# Controls the delivery queue (for outbound federation)
[queues.delivery]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
[activitypub]
# Use ActivityPub Tombstones instead of deleting objects
use_tombstones = true
# Fetch all members of collections (followers, following, etc) when receiving them
# WARNING: This can be a lot of data, and is not recommended
fetch_all_collection_members = false # NOT IMPLEMENTED
# Controls the inbox processing queue (for inbound federation)
[queues.inbox]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# The following values must be instance domain names without "https" or glob patterns
# Rejects all activities from these instances (fediblocking)
reject_activities = []
# Force posts from this instance to be followers only
force_followers_only = [] # NOT IMPLEMENTED
# Discard all reports from these instances
discard_reports = [] # NOT IMPLEMENTED
# Discard all deletes from these instances
discard_deletes = []
# Discard all updates (edits) from these instances
discard_updates = []
# Discard all banners from these instances
discard_banners = [] # NOT IMPLEMENTED
# Discard all avatars from these instances
discard_avatars = [] # NOT IMPLEMENTED
# Discard all follow requests from these instances
discard_follows = []
# Force set these instances' media as sensitive
force_sensitive = [] # NOT IMPLEMENTED
# Remove theses instances' media
remove_media = [] # NOT IMPLEMENTED
# Controls the fetch queue (for remote data refreshes)
[queues.fetch]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# Controls the push queue (for push notification delivery)
[queues.push]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
# Controls the media queue (for media processing)
[queues.media]
# Time in seconds to remove completed jobs
remove_after_complete_seconds = 31536000
# Time in seconds to remove failed jobs
remove_after_failure_seconds = 31536000
[federation]
# This is a list of domain names, such as "mastodon.social" or "pleroma.site"
# These changes will not retroactively apply to existing data before they were changed
# For that, please use the CLI (in a later release)
# These instances will not be federated with
blocked = []
# These instances' data will only be shown to followers, not in public timelines
followers_only = []
[federation.discard]
# These objects will be discarded when received from these instances
reports = []
deletes = []
updates = []
media = []
follows = []
# If instance reactions are blocked, likes will also be discarded
likes = []
reactions = []
banners = []
avatars = []
# For bridge software, such as versia-pub/activitypub
# Bridges must be hosted separately from the main Versia Server process
# [federation.bridge]
# Only versia-ap exists for now
# software = "versia-ap"
# If this is empty, any bridge with the correct token
# will be able to send data to your instance
# v4, v6, ranges and wildcards are supported
# allowed_ips = ["192.168.1.0/24"]
# Token for the bridge software
# Bridge must have the same token!
# Sensitive value
# token = "mycooltoken"
# url = "https://ap.versia.social"
# Whether to verify HTTP signatures for every request (warning: can slow down your server
# significantly depending on processing power)
authorized_fetch = false
[instance]
name = "Versia"
description = "A Versia Server instance"
name = "Lysand"
description = "A test instance of Lysand"
# URL to your instance logo (jpg files should be renamed to jpeg)
logo = ""
# URL to your instance banner (jpg files should be renamed to jpeg)
banner = ""
# Paths to instance long description, terms of service, and privacy policy
# These will be parsed as Markdown
#
# extended_description_path = "config/extended_description.md"
# tos_path = "config/tos.md"
# privacy_policy_path = "config/privacy_policy.md"
# Primary instance languages. ISO 639-1 codes.
languages = ["en"]
[instance.contact]
# email = "staff@yourinstance.com"
[instance.branding]
# logo = "https://cdn.example.com/logo.png"
# banner = "https://cdn.example.com/banner.png"
# Used for federation. If left empty or missing, the server will generate one for you.
# [instance.keys]
# Sensitive value
# public = ""
# Sensitive value
# private = ""
[[instance.rules]]
# Short description of the rule
text = "No hate speech"
# Longer version of the rule with additional information
hint = "Hate speech includes slurs, threats, and harassment."
[[instance.rules]]
text = "No spam"
# [[instance.rules]]
# ...etc
[permissions]
# Control default permissions for users
# Note that an anonymous user having a permission will not allow them
# to do things that require authentication (e.g. 'owner:notes' -> posting a note will need
# auth, but viewing a note will not)
# See https://server.versia.pub/api/roles#list-of-permissions for a list of all permissions
# Defaults to being able to login and manage their own content
# anonymous = []
# Defaults to identical to anonymous
# default = []
# Defaults to being able to manage all instance data, content, and users
# admin = []
[filters]
# Drop notes with these regex filters (only applies to new activities)
note_filters = [
# "(https?://)?(www\\.)?youtube\\.com/watch\\?v=[a-zA-Z0-9_-]+",
# "(https?://)?(www\\.)?youtu\\.be/[a-zA-Z0-9_-]+",
]
# Drop users with these regex filters (only applies to new activities)
username_filters = []
# Drop users with these regex filters (only applies to new activities)
displayname_filters = []
# Drop users with these regex filters (only applies to new activities)
bio_filters = []
emoji_filters = [] # NOT IMPLEMENTED
[logging]
# Log all requests (warning: this is a lot of data)
log_requests = true
# Log request and their contents (warning: this is a lot of data)
log_requests_verbose = false
# Available levels: trace, debug, info, warning, error, fatal
log_level = "info" # For console output
# Log all filtered objects
log_filters = true
# [logging.file]
# path = "logs/versia.log"
# log_level = "info"
#
# [logging.file.rotation]
# max_size = 10_000_000 # 10 MB
# max_files = 10 # Keep 10 rotated files
#
# https://sentry.io support
# [logging.sentry]
# dsn = "https://example.com"
# debug = false
# sample_rate = 1.0
# traces_sample_rate = 1.0
# Can also be regex
# trace_propagation_targets = []
# max_breadcrumbs = 100
# environment = "production"
# log_level = "info"
[ratelimits]
# Amount to multiply every route's duration by
duration_coeff = 1.0
# Amount to multiply every route's max by
max_coeff = 1.0
[authentication]
# Run Versia Server with this value missing to generate a new key
# key = ""
# The provider MUST support OpenID Connect with .well-known discovery
# Most notably, GitHub does not support this
# Redirect URLs in your OpenID provider can be set to this:
# <base_url>/oauth/sso/<provider_id>/callback*
# The asterisk is important, as it allows for any query parameters to be passed
# Authentik for example uses regex so it can be set to (regex):
# <base_url>/oauth/sso/<provider_id>/callback.*
# [[authentication.openid_providers]]
# name = "CPlusPatch ID"
# id = "cpluspatch-id"
# This MUST match the provider's issuer URI, including the trailing slash (or lack thereof)
# url = "https://id.cpluspatch.com/application/o/versia-testing/"
# client_id = "XXXX"
# Sensitive value
# client_secret = "XXXXX"
# icon = "https://cpluspatch.com/images/icons/logo.svg"
[custom_ratelimits]
# Add in any API route in this style here
"/api/v1/timelines/public" = { duration = 60, max = 200 }

File diff suppressed because it is too large Load diff

20
database/datasource.ts Normal file
View file

@ -0,0 +1,20 @@
import { Queue } from "bullmq";
import { getConfig } from "../utils/config";
import { PrismaClient } from "@prisma/client";
const config = getConfig();
const client = new PrismaClient({
datasourceUrl: `postgresql://${config.database.username}:${config.database.password}@${config.database.host}:${config.database.port}/${config.database.database}`,
});
const federationQueue = new Queue("federation", {
connection: {
host: config.redis.queue.host,
port: Number(config.redis.queue.port),
password: config.redis.queue.password || undefined,
db: config.redis.queue.database || undefined,
},
});
export { client, federationQueue };

View file

@ -0,0 +1,39 @@
import type { APIApplication } from "~types/entities/application";
import type { Application } from "@prisma/client";
import { client } from "~database/datasource";
/**
* Represents an application that can authenticate with the API.
*/
/**
* Retrieves the application associated with the given access token.
* @param token The access token to retrieve the application for.
* @returns The application associated with the given access token, or null if no such application exists.
*/
export const getFromToken = async (
token: string
): Promise<Application | null> => {
const dbToken = await client.token.findFirst({
where: {
access_token: token,
},
include: {
application: true,
},
});
return dbToken?.application || null;
};
/**
* Converts this application to an API application.
* @returns The API application representation of this application.
*/
export const applicationToAPI = (app: Application): APIApplication => {
return {
name: app.name,
website: app.website,
vapid_key: app.vapid_key,
};
};

View file

@ -0,0 +1,66 @@
import type { ConfigType } from "@config";
import type { Attachment } from "@prisma/client";
import type { APIAsyncAttachment } from "~types/entities/async_attachment";
import type { APIAttachment } from "~types/entities/attachment";
export const attachmentToAPI = (
attachment: Attachment
): APIAsyncAttachment | APIAttachment => {
let type = "unknown";
if (attachment.mime_type.startsWith("image/")) {
type = "image";
} else if (attachment.mime_type.startsWith("video/")) {
type = "video";
} else if (attachment.mime_type.startsWith("audio/")) {
type = "audio";
}
return {
id: attachment.id,
type: type as any,
url: attachment.url,
remote_url: attachment.remote_url,
preview_url: attachment.thumbnail_url,
text_url: null,
meta: {
width: attachment.width || undefined,
height: attachment.height || undefined,
fps: attachment.fps || undefined,
size:
attachment.width && attachment.height
? `${attachment.width}x${attachment.height}`
: undefined,
duration: attachment.duration || undefined,
length: attachment.size?.toString() || undefined,
aspect:
attachment.width && attachment.height
? attachment.width / attachment.height
: undefined,
original: {
width: attachment.width || undefined,
height: attachment.height || undefined,
size:
attachment.width && attachment.height
? `${attachment.width}x${attachment.height}`
: undefined,
aspect:
attachment.width && attachment.height
? attachment.width / attachment.height
: undefined,
},
// Idk whether size or length is the right value
},
description: attachment.description,
blurhash: attachment.blurhash,
};
};
export const getUrl = (hash: string, config: ConfigType) => {
if (config.media.backend === "local") {
return `${config.http.base_url}/media/${hash}`;
} else if (config.media.backend === "s3") {
return `${config.s3.public_url}/${hash}`;
}
return "";
};

View file

@ -0,0 +1,78 @@
import type { APIEmoji } from "~types/entities/emoji";
import type { Emoji as LysandEmoji } from "~types/lysand/extensions/org.lysand/custom_emojis";
import { client } from "~database/datasource";
import type { Emoji } from "@prisma/client";
/**
* Represents an emoji entity in the database.
*/
/**
* Used for parsing emojis from local text
* @param text The text to parse
* @returns An array of emojis
*/
export const parseEmojis = async (text: string): Promise<Emoji[]> => {
const regex = /:[a-zA-Z0-9_]+:/g;
const matches = text.match(regex);
if (!matches) return [];
return await client.emoji.findMany({
where: {
shortcode: {
in: matches.map(match => match.replace(/:/g, "")),
},
instanceId: null,
},
include: {
instance: true,
},
});
};
export const addEmojiIfNotExists = async (emoji: LysandEmoji) => {
const existingEmoji = await client.emoji.findFirst({
where: {
shortcode: emoji.name,
instance: null,
},
});
if (existingEmoji) return existingEmoji;
return await client.emoji.create({
data: {
shortcode: emoji.name,
url: emoji.url[0].content,
alt: emoji.alt || null,
content_type: emoji.url[0].content_type,
visible_in_picker: true,
},
});
};
/**
* Converts the emoji to an APIEmoji object.
* @returns The APIEmoji object.
*/
export const emojiToAPI = (emoji: Emoji): APIEmoji => {
return {
shortcode: emoji.shortcode,
static_url: emoji.url, // TODO: Add static version
url: emoji.url,
visible_in_picker: emoji.visible_in_picker,
category: undefined,
};
};
export const emojiToLysand = (emoji: Emoji): LysandEmoji => {
return {
name: emoji.shortcode,
url: [
{
content: emoji.url,
content_type: emoji.content_type,
},
],
alt: emoji.alt || undefined,
};
};

View file

@ -0,0 +1,49 @@
import type { Instance } from "@prisma/client";
import { client } from "~database/datasource";
import type { ServerMetadata } from "~types/lysand/Object";
/**
* Represents an instance in the database.
*/
/**
* Adds an instance to the database if it doesn't already exist.
* @param url
* @returns Either the database instance if it already exists, or a newly created instance.
*/
export const addInstanceIfNotExists = async (
url: string
): Promise<Instance> => {
const origin = new URL(url).origin;
const hostname = new URL(url).hostname;
const found = await client.instance.findFirst({
where: {
base_url: hostname,
},
});
if (found) return found;
// Fetch the instance configuration
const metadata = (await fetch(`${origin}/.well-known/lysand`).then(res =>
res.json()
)) as Partial<ServerMetadata>;
if (metadata.type !== "ServerMetadata") {
throw new Error("Invalid instance metadata");
}
if (!(metadata.name && metadata.version)) {
throw new Error("Invalid instance metadata");
}
return await client.instance.create({
data: {
base_url: hostname,
name: metadata.name,
version: metadata.version,
logo: metadata.logo as any,
},
});
};

84
database/entities/Like.ts Normal file
View file

@ -0,0 +1,84 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
import type { Like as LysandLike } from "~types/lysand/Object";
import { getConfig } from "@config";
import type { Like } from "@prisma/client";
import { client } from "~database/datasource";
import type { UserWithRelations } from "./User";
import type { StatusWithRelations } from "./Status";
/**
* Represents a Like entity in the database.
*/
export const toLysand = (like: Like): LysandLike => {
return {
id: like.id,
author: (like as any).liker?.uri,
type: "Like",
created_at: new Date(like.createdAt).toISOString(),
object: (like as any).liked?.uri,
uri: `${getConfig().http.base_url}/actions/${like.id}`,
};
};
/**
* Create a like
* @param user User liking the status
* @param status Status being liked
*/
export const createLike = async (
user: UserWithRelations,
status: StatusWithRelations
) => {
await client.like.create({
data: {
likedId: status.id,
likerId: user.id,
},
});
if (status.author.instanceId === user.instanceId) {
// Notify the user that their post has been favourited
await client.notification.create({
data: {
accountId: user.id,
type: "favourite",
notifiedId: status.authorId,
statusId: status.id,
},
});
} else {
// TODO: Add database jobs for federating this
}
};
/**
* Delete a like
* @param user User deleting their like
* @param status Status being unliked
*/
export const deleteLike = async (
user: UserWithRelations,
status: StatusWithRelations
) => {
await client.like.deleteMany({
where: {
likedId: status.id,
likerId: user.id,
},
});
// Notify the user that their post has been favourited
await client.notification.deleteMany({
where: {
accountId: user.id,
type: "favourite",
notifiedId: status.authorId,
statusId: status.id,
},
});
if (user.instanceId === null && status.author.instanceId !== null) {
// User is local, federate the delete
// TODO: Federate this
}
};

View file

@ -0,0 +1,23 @@
import type { Notification } from "@prisma/client";
import type { APINotification } from "~types/entities/notification";
import { type StatusWithRelations, statusToAPI } from "./Status";
import { type UserWithRelations, userToAPI } from "./User";
export type NotificationWithRelations = Notification & {
status: StatusWithRelations | null;
account: UserWithRelations;
};
export const notificationToAPI = async (
notification: NotificationWithRelations
): Promise<APINotification> => {
return {
account: userToAPI(notification.account),
created_at: new Date(notification.createdAt).toISOString(),
id: notification.id,
type: notification.type,
status: notification.status
? await statusToAPI(notification.status, notification.account)
: undefined,
};
};

View file

@ -0,0 +1,87 @@
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
import type { LysandObject } from "@prisma/client";
import { client } from "~database/datasource";
import type { LysandObjectType } from "~types/lysand/Object";
/**
* Represents a Lysand object in the database.
*/
export const createFromObject = async (object: LysandObjectType) => {
const foundObject = await client.lysandObject.findFirst({
where: { remote_id: object.id },
include: {
author: true,
},
});
if (foundObject) {
return foundObject;
}
const author = await client.lysandObject.findFirst({
where: { uri: (object as any).author },
});
return await client.lysandObject.create({
data: {
authorId: author?.id,
created_at: new Date(object.created_at),
extensions: object.extensions || {},
remote_id: object.id,
type: object.type,
uri: object.uri,
// Rest of data (remove id, author, created_at, extensions, type, uri)
extra_data: Object.fromEntries(
Object.entries(object).filter(
([key]) =>
![
"id",
"author",
"created_at",
"extensions",
"type",
"uri",
].includes(key)
)
),
},
});
};
export const toLysand = (lyObject: LysandObject): LysandObjectType => {
return {
id: lyObject.remote_id || lyObject.id,
created_at: new Date(lyObject.created_at).toISOString(),
type: lyObject.type,
uri: lyObject.uri,
// @ts-expect-error This works, I promise
...lyObject.extra_data,
extensions: lyObject.extensions,
};
};
export const isPublication = (lyObject: LysandObject): boolean => {
return lyObject.type === "Note" || lyObject.type === "Patch";
};
export const isAction = (lyObject: LysandObject): boolean => {
return [
"Like",
"Follow",
"Dislike",
"FollowAccept",
"FollowReject",
"Undo",
"Announce",
].includes(lyObject.type);
};
export const isActor = (lyObject: LysandObject): boolean => {
return lyObject.type === "User";
};
export const isExtension = (lyObject: LysandObject): boolean => {
return lyObject.type === "Extension";
};

197
database/entities/Queue.ts Normal file
View file

@ -0,0 +1,197 @@
import { getConfig } from "@config";
import { Worker } from "bullmq";
import { client, federationQueue } from "~database/datasource";
import {
statusAndUserRelations,
statusToLysand,
type StatusWithRelations,
} from "./Status";
import type { User } from "@prisma/client";
const config = getConfig();
export const federationWorker = new Worker(
"federation",
async job => {
await job.updateProgress(0);
switch (job.name) {
case "federation": {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const statusId = job.data.id as string;
const status = await client.status.findUnique({
where: { id: statusId },
include: statusAndUserRelations,
});
if (!status) return;
// Only get remote users that follow the author of the status, and the remote mentioned users
const peopleToSendTo = await client.user.findMany({
where: {
OR: [
["public", "unlisted", "private"].includes(
status.visibility
)
? {
relationships: {
some: {
subjectId: status.authorId,
following: true,
},
},
instanceId: {
not: null,
},
}
: {},
// Mentioned users
{
id: {
in: status.mentions.map(m => m.id),
},
instanceId: {
not: null,
},
},
],
},
});
let peopleDone = 0;
// Spawn sendToServer job for each user
for (const person of peopleToSendTo) {
await federationQueue.add("sendToServer", {
id: statusId,
user: person,
});
peopleDone++;
await job.updateProgress(
Math.round((peopleDone / peopleToSendTo.length) * 100)
);
}
break;
}
case "sendToServer": {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const statusId = job.data.id as string;
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const user = job.data.user as User;
const status = await client.status.findUnique({
where: { id: statusId },
include: statusAndUserRelations,
});
if (!status) return;
const response = await federateStatusTo(
status,
status.author,
user
);
if (response.status !== 200) {
throw new Error(
`Federation error: ${response.status} ${response.statusText}`
);
}
break;
}
}
await job.updateProgress(100);
return true;
},
{
connection: {
host: config.redis.queue.host,
port: config.redis.queue.port,
password: config.redis.queue.password,
db: config.redis.queue.database || undefined,
},
removeOnComplete: {
count: 400,
},
removeOnFail: {
count: 3000,
},
}
);
/**
* Convert a string into an ArrayBuffer
* from https://developers.google.com/web/updates/2012/06/How-to-convert-ArrayBuffer-to-and-from-String
*/
export const str2ab = (str: string) => {
const buf = new ArrayBuffer(str.length);
const bufView = new Uint8Array(buf);
for (let i = 0, strLen = str.length; i < strLen; i++) {
bufView[i] = str.charCodeAt(i);
}
return buf;
};
export const federateStatusTo = async (
status: StatusWithRelations,
sender: User,
user: User
) => {
const privateKey = await crypto.subtle.importKey(
"pkcs8",
str2ab(atob(user.privateKey ?? "")),
"Ed25519",
false,
["sign"]
);
const digest = await crypto.subtle.digest(
"SHA-256",
new TextEncoder().encode("request_body")
);
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const userInbox = new URL((user.endpoints as any).inbox);
const date = new Date();
const signature = await crypto.subtle.sign(
"Ed25519",
privateKey,
new TextEncoder().encode(
`(request-target): post ${userInbox.pathname}\n` +
`host: ${userInbox.host}\n` +
`date: ${date.toUTCString()}\n` +
`digest: SHA-256=${btoa(
String.fromCharCode(...new Uint8Array(digest))
)}\n`
)
);
const signatureBase64 = btoa(
String.fromCharCode(...new Uint8Array(signature))
);
return fetch(userInbox, {
method: "POST",
headers: {
"Content-Type": "application/json",
Date: date.toUTCString(),
Origin: config.http.base_url,
Signature: `keyId="${sender.uri}",algorithm="ed25519",headers="(request-target) host date digest",signature="${signatureBase64}"`,
},
body: JSON.stringify(statusToLysand(status)),
});
};
export const addStatusFederationJob = async (statusId: string) => {
await federationQueue.add("federation", {
id: statusId,
});
};

View file

@ -0,0 +1,88 @@
import type { Relationship, User } from "@prisma/client";
import type { APIRelationship } from "~types/entities/relationship";
import { client } from "~database/datasource";
/**
* Stores Mastodon API relationships
*/
/**
* Creates a new relationship between two users.
* @param owner The user who owns the relationship.
* @param other The user who is the subject of the relationship.
* @returns The newly created relationship.
*/
export const createNewRelationship = async (
owner: User,
other: User
): Promise<Relationship> => {
return await client.relationship.create({
data: {
ownerId: owner.id,
subjectId: other.id,
languages: [],
following: false,
showingReblogs: false,
notifying: false,
followedBy: false,
blocking: false,
blockedBy: false,
muting: false,
mutingNotifications: false,
requested: false,
domainBlocking: false,
endorsed: false,
note: "",
},
});
};
export const checkForBidirectionalRelationships = async (
user1: User,
user2: User,
createIfNotExists = true
): Promise<boolean> => {
const relationship1 = await client.relationship.findFirst({
where: {
ownerId: user1.id,
subjectId: user2.id,
},
});
const relationship2 = await client.relationship.findFirst({
where: {
ownerId: user2.id,
subjectId: user1.id,
},
});
if (!relationship1 && !relationship2 && createIfNotExists) {
await createNewRelationship(user1, user2);
await createNewRelationship(user2, user1);
}
return !!relationship1 && !!relationship2;
};
/**
* Converts the relationship to an API-friendly format.
* @returns The API-friendly relationship.
*/
export const relationshipToAPI = (rel: Relationship): APIRelationship => {
return {
blocked_by: rel.blockedBy,
blocking: rel.blocking,
domain_blocking: rel.domainBlocking,
endorsed: rel.endorsed,
followed_by: rel.followedBy,
following: rel.following,
id: rel.subjectId,
muting: rel.muting,
muting_notifications: rel.mutingNotifications,
notifying: rel.notifying,
requested: rel.requested,
showing_reblogs: rel.showingReblogs,
languages: rel.languages,
note: rel.note,
};
};

647
database/entities/Status.ts Normal file
View file

@ -0,0 +1,647 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
import { getConfig } from "@config";
import type { UserWithRelations } from "./User";
import {
fetchRemoteUser,
parseMentionsUris,
userRelations,
userToAPI,
} from "./User";
import { client } from "~database/datasource";
import type { LysandPublication, Note } from "~types/lysand/Object";
import { htmlToText } from "html-to-text";
import { getBestContentType } from "@content_types";
import {
Prisma,
type Application,
type Emoji,
type Relationship,
type Status,
type User,
} from "@prisma/client";
import { emojiToAPI, emojiToLysand, parseEmojis } from "./Emoji";
import type { APIStatus } from "~types/entities/status";
import { applicationToAPI } from "./Application";
import { attachmentToAPI } from "./Attachment";
import type { APIAttachment } from "~types/entities/attachment";
import { sanitizeHtml } from "@sanitization";
import { parse } from "marked";
import linkifyStr from "linkify-string";
import linkifyHtml from "linkify-html";
import { addStausToMeilisearch } from "@meilisearch";
const config = getConfig();
export const statusAndUserRelations: Prisma.StatusInclude = {
author: {
include: userRelations,
},
application: true,
emojis: true,
inReplyToPost: {
include: {
author: {
include: userRelations,
},
application: true,
emojis: true,
inReplyToPost: {
include: {
author: true,
},
},
instance: true,
mentions: true,
pinnedBy: true,
_count: {
select: {
replies: true,
},
},
},
},
reblogs: true,
attachments: true,
instance: true,
mentions: {
include: userRelations,
},
pinnedBy: true,
_count: {
select: {
replies: true,
likes: true,
reblogs: true,
},
},
reblog: {
include: {
author: {
include: userRelations,
},
application: true,
emojis: true,
inReplyToPost: {
include: {
author: true,
},
},
instance: true,
mentions: {
include: userRelations,
},
pinnedBy: true,
_count: {
select: {
replies: true,
},
},
},
},
quotingPost: {
include: {
author: {
include: userRelations,
},
application: true,
emojis: true,
inReplyToPost: {
include: {
author: true,
},
},
instance: true,
mentions: true,
pinnedBy: true,
_count: {
select: {
replies: true,
},
},
},
},
likes: {
include: {
liker: true,
},
},
};
const statusRelations = Prisma.validator<Prisma.StatusDefaultArgs>()({
include: statusAndUserRelations,
});
export type StatusWithRelations = Prisma.StatusGetPayload<
typeof statusRelations
>;
/**
* Represents a status (i.e. a post)
*/
/**
* Returns whether this status is viewable by a user.
* @param user The user to check.
* @returns Whether this status is viewable by the user.
*/
export const isViewableByUser = (status: Status, user: User | null) => {
if (status.authorId === user?.id) return true;
if (status.visibility === "public") return true;
else if (status.visibility === "unlisted") return true;
else if (status.visibility === "private") {
// @ts-expect-error Prisma TypeScript types dont include relations
return !!(user?.relationships as Relationship[]).find(
rel => rel.id === status.authorId
);
} else {
// @ts-expect-error Prisma TypeScript types dont include relations
return user && (status.mentions as User[]).includes(user);
}
};
export const fetchFromRemote = async (uri: string): Promise<Status | null> => {
// Check if already in database
const existingStatus: StatusWithRelations | null =
await client.status.findFirst({
where: {
uri: uri,
},
include: statusAndUserRelations,
});
if (existingStatus) return existingStatus;
const status = await fetch(uri);
if (status.status === 404) return null;
const body = (await status.json()) as LysandPublication;
const content = getBestContentType(body.contents);
const emojis = await parseEmojis(content?.content || "");
const author = await fetchRemoteUser(body.author);
let replyStatus: Status | null = null;
let quotingStatus: Status | null = null;
if (body.replies_to.length > 0) {
replyStatus = await fetchFromRemote(body.replies_to[0]);
}
if (body.quotes.length > 0) {
quotingStatus = await fetchFromRemote(body.quotes[0]);
}
return await createNewStatus({
account: author,
content: content?.content || "",
content_type: content?.content_type,
application: null,
// TODO: Add visibility
visibility: "public",
spoiler_text: body.subject || "",
uri: body.uri,
sensitive: body.is_sensitive,
emojis: emojis,
mentions: await parseMentionsUris(body.mentions),
reply: replyStatus
? {
status: replyStatus,
user: (replyStatus as any).author,
}
: undefined,
quote: quotingStatus || undefined,
});
};
/**
* Return all the ancestors of this post,
*/
// eslint-disable-next-line @typescript-eslint/require-await, @typescript-eslint/no-unused-vars
export const getAncestors = async (
status: StatusWithRelations,
fetcher: UserWithRelations | null
) => {
const ancestors: StatusWithRelations[] = [];
let currentStatus = status;
while (currentStatus.inReplyToPostId) {
const parent = await client.status.findFirst({
where: {
id: currentStatus.inReplyToPostId,
},
include: statusAndUserRelations,
});
if (!parent) break;
ancestors.push(parent);
currentStatus = parent;
}
// Filter for posts that are viewable by the user
const viewableAncestors = ancestors.filter(ancestor =>
isViewableByUser(ancestor, fetcher)
);
return viewableAncestors;
};
/**
* Return all the descendants of this post (recursive)
* Temporary implementation, will be replaced with a recursive SQL query when Prisma adds support for it
*/
// eslint-disable-next-line @typescript-eslint/require-await, @typescript-eslint/no-unused-vars
export const getDescendants = async (
status: StatusWithRelations,
fetcher: UserWithRelations | null,
depth = 0
) => {
const descendants: StatusWithRelations[] = [];
const currentStatus = status;
// Fetch all children of children of children recursively calling getDescendants
const children = await client.status.findMany({
where: {
inReplyToPostId: currentStatus.id,
},
include: statusAndUserRelations,
});
for (const child of children) {
descendants.push(child);
if (depth < 20) {
const childDescendants = await getDescendants(
child,
fetcher,
depth + 1
);
descendants.push(...childDescendants);
}
}
// Filter for posts that are viewable by the user
const viewableDescendants = descendants.filter(descendant =>
isViewableByUser(descendant, fetcher)
);
return viewableDescendants;
};
/**
* Creates a new status and saves it to the database.
* @param data The data for the new status.
* @returns A promise that resolves with the new status.
*/
export const createNewStatus = async (data: {
account: User;
application: Application | null;
content: string;
visibility: APIStatus["visibility"];
sensitive: boolean;
spoiler_text: string;
emojis?: Emoji[];
content_type?: string;
uri?: string;
mentions?: User[];
media_attachments?: string[];
reply?: {
status: Status;
user: User;
};
quote?: Status;
}) => {
// Get people mentioned in the content (match @username or @username@domain.com mentions)
const mentionedPeople =
data.content.match(/@[a-zA-Z0-9_]+(@[a-zA-Z0-9_]+)?/g) ?? [];
let mentions = data.mentions || [];
// Parse emojis
const emojis = await parseEmojis(data.content);
data.emojis = data.emojis ? [...data.emojis, ...emojis] : emojis;
// Get list of mentioned users
if (mentions.length === 0) {
mentions = await client.user.findMany({
where: {
OR: mentionedPeople.map(person => ({
username: person.split("@")[1],
instance: {
base_url: person.split("@")[2],
},
})),
},
include: userRelations,
});
}
let formattedContent;
// Get HTML version of content
if (data.content_type === "text/markdown") {
formattedContent = linkifyHtml(await sanitizeHtml(parse(data.content)));
} else if (data.content_type === "text/x.misskeymarkdown") {
// Parse as MFM
} else {
// Parse as plaintext
formattedContent = linkifyStr(data.content);
// Split by newline and add <p> tags
formattedContent = formattedContent
.split("\n")
.map(line => `<p>${line}</p>`)
.join("\n");
}
let status = await client.status.create({
data: {
authorId: data.account.id,
applicationId: data.application?.id,
content: formattedContent,
contentSource: data.content,
contentType: data.content_type,
visibility: data.visibility,
sensitive: data.sensitive,
spoilerText: data.spoiler_text,
emojis: {
connect: data.emojis.map(emoji => {
return {
id: emoji.id,
};
}),
},
attachments: data.media_attachments
? {
connect: data.media_attachments.map(attachment => {
return {
id: attachment,
};
}),
}
: undefined,
inReplyToPostId: data.reply?.status.id,
quotingPostId: data.quote?.id,
instanceId: data.account.instanceId || undefined,
isReblog: false,
uri:
data.uri ||
`${config.http.base_url}/statuses/FAKE-${crypto.randomUUID()}`,
mentions: {
connect: mentions.map(mention => {
return {
id: mention.id,
};
}),
},
},
include: statusAndUserRelations,
});
// Update URI
status = await client.status.update({
where: {
id: status.id,
},
data: {
uri: data.uri || `${config.http.base_url}/statuses/${status.id}`,
},
include: statusAndUserRelations,
});
// Create notification
if (status.inReplyToPost) {
await client.notification.create({
data: {
notifiedId: status.inReplyToPost.authorId,
accountId: status.authorId,
type: "mention",
statusId: status.id,
},
});
}
// Add to search index
await addStausToMeilisearch(status);
return status;
};
export const editStatus = async (
status: StatusWithRelations,
data: {
content: string;
visibility?: APIStatus["visibility"];
sensitive: boolean;
spoiler_text: string;
emojis?: Emoji[];
content_type?: string;
uri?: string;
mentions?: User[];
media_attachments?: string[];
}
) => {
// Get people mentioned in the content (match @username or @username@domain.com mentions
const mentionedPeople =
data.content.match(/@[a-zA-Z0-9_]+(@[a-zA-Z0-9_]+)?/g) ?? [];
let mentions = data.mentions || [];
// Parse emojis
const emojis = await parseEmojis(data.content);
data.emojis = data.emojis ? [...data.emojis, ...emojis] : emojis;
// Get list of mentioned users
if (mentions.length === 0) {
mentions = await client.user.findMany({
where: {
OR: mentionedPeople.map(person => ({
username: person.split("@")[1],
instance: {
base_url: person.split("@")[2],
},
})),
},
include: userRelations,
});
}
let formattedContent;
// Get HTML version of content
if (data.content_type === "text/markdown") {
formattedContent = linkifyHtml(await sanitizeHtml(parse(data.content)));
} else if (data.content_type === "text/x.misskeymarkdown") {
// Parse as MFM
} else {
// Parse as plaintext
formattedContent = linkifyStr(data.content);
// Split by newline and add <p> tags
formattedContent = formattedContent
.split("\n")
.map(line => `<p>${line}</p>`)
.join("\n");
}
const newStatus = await client.status.update({
where: {
id: status.id,
},
data: {
content: formattedContent,
contentSource: data.content,
contentType: data.content_type,
visibility: data.visibility,
sensitive: data.sensitive,
spoilerText: data.spoiler_text,
emojis: {
connect: data.emojis.map(emoji => {
return {
id: emoji.id,
};
}),
},
attachments: data.media_attachments
? {
connect: data.media_attachments.map(attachment => {
return {
id: attachment,
};
}),
}
: undefined,
mentions: {
connect: mentions.map(mention => {
return {
id: mention.id,
};
}),
},
},
include: statusAndUserRelations,
});
return newStatus;
};
export const isFavouritedBy = async (status: Status, user: User) => {
return !!(await client.like.findFirst({
where: {
likerId: user.id,
likedId: status.id,
},
}));
};
/**
* Converts this status to an API status.
* @returns A promise that resolves with the API status.
*/
export const statusToAPI = async (
status: StatusWithRelations,
user?: UserWithRelations
): Promise<APIStatus> => {
return {
id: status.id,
in_reply_to_id: status.inReplyToPostId || null,
in_reply_to_account_id: status.inReplyToPost?.authorId || null,
// @ts-expect-error Prisma TypeScript types dont include relations
account: userToAPI(status.author),
created_at: new Date(status.createdAt).toISOString(),
application: status.application
? applicationToAPI(status.application)
: null,
card: null,
content: status.content,
emojis: status.emojis.map(emoji => emojiToAPI(emoji)),
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
favourited: !!(status.likes ?? []).find(
like => like.likerId === user?.id
),
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
favourites_count: (status.likes ?? []).length,
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
media_attachments: (status.attachments ?? []).map(
a => attachmentToAPI(a) as APIAttachment
),
// @ts-expect-error Prisma TypeScript types dont include relations
mentions: status.mentions.map(mention => userToAPI(mention)),
language: null,
muted: user
? user.relationships.find(r => r.subjectId == status.authorId)
?.muting || false
: false,
pinned: status.pinnedBy.find(u => u.id === user?.id) ? true : false,
// TODO: Add pols
poll: null,
reblog: status.reblog
? await statusToAPI(status.reblog as unknown as StatusWithRelations)
: null,
reblogged: !!(await client.status.findFirst({
where: {
authorId: user?.id,
reblogId: status.id,
},
})),
reblogs_count: status._count.reblogs,
replies_count: status._count.replies,
sensitive: status.sensitive,
spoiler_text: status.spoilerText,
tags: [],
uri: `${config.http.base_url}/statuses/${status.id}`,
visibility: "public",
url: `${config.http.base_url}/statuses/${status.id}`,
bookmarked: false,
quote: status.quotingPost
? await statusToAPI(
status.quotingPost as unknown as StatusWithRelations
)
: null,
quote_id: status.quotingPost?.id || undefined,
};
};
export const statusToLysand = (status: StatusWithRelations): Note => {
return {
type: "Note",
created_at: new Date(status.createdAt).toISOString(),
id: status.id,
author: status.authorId,
uri: `${config.http.base_url}/users/${status.authorId}/statuses/${status.id}`,
contents: [
{
content: status.content,
content_type: "text/html",
},
{
// Content converted to plaintext
content: htmlToText(status.content),
content_type: "text/plain",
},
],
// TODO: Add attachments
attachments: [],
is_sensitive: status.sensitive,
mentions: status.mentions.map(mention => mention.uri),
quotes: status.quotingPost ? [status.quotingPost.uri] : [],
replies_to: status.inReplyToPostId ? [status.inReplyToPostId] : [],
subject: status.spoilerText,
extensions: {
"org.lysand:custom_emojis": {
emojis: status.emojis.map(emoji => emojiToLysand(emoji)),
},
// TODO: Add polls and reactions
},
};
};

View file

@ -0,0 +1,6 @@
/**
* The type of token.
*/
export enum TokenType {
BEARER = "Bearer",
}

470
database/entities/User.ts Normal file
View file

@ -0,0 +1,470 @@
import type { ConfigType } from "@config";
import { getConfig } from "@config";
import type { APIAccount } from "~types/entities/account";
import type { User as LysandUser } from "~types/lysand/Object";
import { htmlToText } from "html-to-text";
import type { User } from "@prisma/client";
import { Prisma } from "@prisma/client";
import { client } from "~database/datasource";
import { addEmojiIfNotExists, emojiToAPI, emojiToLysand } from "./Emoji";
import { addInstanceIfNotExists } from "./Instance";
import type { APISource } from "~types/entities/source";
import { addUserToMeilisearch } from "@meilisearch";
export interface AuthData {
user: UserWithRelations | null;
token: string;
}
/**
* Represents a user in the database.
* Stores local and remote users
*/
export const userRelations: Prisma.UserInclude = {
emojis: true,
instance: true,
likes: true,
relationships: true,
relationshipSubjects: true,
pinnedNotes: true,
_count: {
select: {
statuses: true,
likes: true,
},
},
};
const userRelations2 = Prisma.validator<Prisma.UserDefaultArgs>()({
include: userRelations,
});
export type UserWithRelations = Prisma.UserGetPayload<typeof userRelations2>;
/**
* Get the user's avatar in raw URL format
* @param config The config to use
* @returns The raw URL for the user's avatar
*/
export const getAvatarUrl = (user: User, config: ConfigType) => {
if (!user.avatar) return config.defaults.avatar;
if (config.media.backend === "local") {
return `${config.http.base_url}/media/${user.avatar}`;
} else if (config.media.backend === "s3") {
return `${config.s3.public_url}/${user.avatar}`;
}
return "";
};
/**
* Get the user's header in raw URL format
* @param config The config to use
* @returns The raw URL for the user's header
*/
export const getHeaderUrl = (user: User, config: ConfigType) => {
if (!user.header) return config.defaults.header;
if (config.media.backend === "local") {
return `${config.http.base_url}/media/${user.header}`;
} else if (config.media.backend === "s3") {
return `${config.s3.public_url}/${user.header}`;
}
return "";
};
export const getFromRequest = async (req: Request): Promise<AuthData> => {
// Check auth token
const token = req.headers.get("Authorization")?.split(" ")[1] || "";
return { user: await retrieveUserFromToken(token), token };
};
export const fetchRemoteUser = async (uri: string) => {
// Check if user not already in database
const foundUser = await client.user.findUnique({
where: {
uri,
},
include: userRelations,
});
if (foundUser) return foundUser;
const response = await fetch(uri, {
method: "GET",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
});
const data = (await response.json()) as Partial<LysandUser>;
if (
!(
data.id &&
data.username &&
data.uri &&
data.created_at &&
data.disliked &&
data.featured &&
data.liked &&
data.followers &&
data.following &&
data.inbox &&
data.outbox &&
data.public_key
)
) {
throw new Error("Invalid user data");
}
// Parse emojis and add them to database
const userEmojis =
data.extensions?.["org.lysand:custom_emojis"]?.emojis ?? [];
const user = await client.user.create({
data: {
username: data.username,
uri: data.uri,
createdAt: new Date(data.created_at),
endpoints: {
disliked: data.disliked,
featured: data.featured,
liked: data.liked,
followers: data.followers,
following: data.following,
inbox: data.inbox,
outbox: data.outbox,
},
avatar: (data.avatar && data.avatar[0].content) || "",
header: (data.header && data.header[0].content) || "",
displayName: data.display_name ?? "",
note: data.bio?.[0].content ?? "",
publicKey: data.public_key.public_key,
source: {
language: null,
note: "",
privacy: "public",
sensitive: false,
fields: [],
},
},
});
// Add to Meilisearch
await addUserToMeilisearch(user);
const emojis = [];
for (const emoji of userEmojis) {
emojis.push(await addEmojiIfNotExists(emoji));
}
const uriData = new URL(data.uri);
return await client.user.update({
where: {
id: user.id,
},
data: {
emojis: {
connect: emojis.map(emoji => ({
id: emoji.id,
})),
},
instanceId: (await addInstanceIfNotExists(uriData.origin)).id,
},
include: userRelations,
});
};
/**
* Fetches the list of followers associated with the actor and updates the user's followers
*/
export const fetchFollowers = () => {
//
};
/**
* Creates a new LOCAL user.
* @param data The data for the new user.
* @returns The newly created user.
*/
export const createNewLocalUser = async (data: {
username: string;
display_name?: string;
password: string;
email: string;
bio?: string;
avatar?: string;
header?: string;
admin?: boolean;
}) => {
const config = getConfig();
const keys = await generateUserKeys();
const user = await client.user.create({
data: {
username: data.username,
displayName: data.display_name ?? data.username,
password: await Bun.password.hash(data.password),
email: data.email,
note: data.bio ?? "",
avatar: data.avatar ?? config.defaults.avatar,
header: data.header ?? config.defaults.avatar,
isAdmin: data.admin ?? false,
uri: "",
publicKey: keys.public_key,
privateKey: keys.private_key,
source: {
language: null,
note: "",
privacy: "public",
sensitive: false,
fields: [],
},
},
});
// Add to Meilisearch
await addUserToMeilisearch(user);
return await client.user.update({
where: {
id: user.id,
},
data: {
uri: `${config.http.base_url}/users/${user.id}`,
endpoints: {
disliked: `${config.http.base_url}/users/${user.id}/disliked`,
featured: `${config.http.base_url}/users/${user.id}/featured`,
liked: `${config.http.base_url}/users/${user.id}/liked`,
followers: `${config.http.base_url}/users/${user.id}/followers`,
following: `${config.http.base_url}/users/${user.id}/following`,
inbox: `${config.http.base_url}/users/${user.id}/inbox`,
outbox: `${config.http.base_url}/users/${user.id}/outbox`,
},
},
include: userRelations,
});
};
/**
* Parses mentions from a list of URIs
*/
export const parseMentionsUris = async (mentions: string[]) => {
return await client.user.findMany({
where: {
uri: {
in: mentions,
},
},
include: userRelations,
});
};
/**
* Retrieves a user from a token.
* @param access_token The access token to retrieve the user from.
* @returns The user associated with the given access token.
*/
export const retrieveUserFromToken = async (access_token: string) => {
if (!access_token) return null;
const token = await client.token.findFirst({
where: {
access_token,
},
include: {
user: {
include: userRelations,
},
},
});
if (!token) return null;
return token.user;
};
/**
* Gets the relationship to another user.
* @param other The other user to get the relationship to.
* @returns The relationship to the other user.
*/
export const getRelationshipToOtherUser = async (
user: UserWithRelations,
other: User
) => {
return await client.relationship.findFirst({
where: {
ownerId: user.id,
subjectId: other.id,
},
});
};
/**
* Generates keys for the user.
*/
export const generateUserKeys = async () => {
const keys = (await crypto.subtle.generateKey("Ed25519", true, [
"sign",
"verify",
])) as CryptoKeyPair;
const privateKey = btoa(
String.fromCharCode.apply(null, [
...new Uint8Array(
// jesus help me what do these letters mean
await crypto.subtle.exportKey("pkcs8", keys.privateKey)
),
])
);
const publicKey = btoa(
String.fromCharCode(
...new Uint8Array(
// why is exporting a key so hard
await crypto.subtle.exportKey("spki", keys.publicKey)
)
)
);
// Add header, footer and newlines later on
// These keys are base64 encrypted
return {
private_key: privateKey,
public_key: publicKey,
};
};
export const userToAPI = (
user: UserWithRelations,
isOwnAccount = false
): APIAccount => {
const config = getConfig();
return {
id: user.id,
username: user.username,
display_name: user.displayName,
note: user.note,
url: user.uri,
avatar: getAvatarUrl(user, config),
header: getHeaderUrl(user, config),
locked: user.isLocked,
created_at: new Date(user.createdAt).toISOString(),
followers_count: user.relationshipSubjects.filter(r => r.following)
.length,
following_count: user.relationships.filter(r => r.following).length,
statuses_count: user._count.statuses,
emojis: user.emojis.map(emoji => emojiToAPI(emoji)),
// TODO: Add fields
fields: [],
bot: user.isBot,
source:
isOwnAccount && user.source
? (user.source as any as APISource)
: undefined,
// TODO: Add static avatar and header
avatar_static: "",
header_static: "",
acct:
user.instance === null
? `${user.username}`
: `${user.username}@${user.instance.base_url}`,
// TODO: Add these fields
limited: false,
moved: null,
noindex: false,
suspended: false,
discoverable: undefined,
mute_expires_at: undefined,
group: false,
pleroma: {
is_admin: user.isAdmin,
is_moderator: user.isAdmin,
},
};
};
/**
* Should only return local users
*/
export const userToLysand = (user: UserWithRelations): LysandUser => {
if (user.instanceId !== null) {
throw new Error("Cannot convert remote user to Lysand format");
}
return {
id: user.id,
type: "User",
uri: user.uri,
bio: [
{
content: user.note,
content_type: "text/html",
},
{
content: htmlToText(user.note),
content_type: "text/plain",
},
],
created_at: new Date(user.createdAt).toISOString(),
disliked: `${user.uri}/disliked`,
featured: `${user.uri}/featured`,
liked: `${user.uri}/liked`,
followers: `${user.uri}/followers`,
following: `${user.uri}/following`,
inbox: `${user.uri}/inbox`,
outbox: `${user.uri}/outbox`,
indexable: false,
username: user.username,
avatar: [
{
content: getAvatarUrl(user, getConfig()) || "",
content_type: `image/${user.avatar.split(".")[1]}`,
},
],
header: [
{
content: getHeaderUrl(user, getConfig()) || "",
content_type: `image/${user.header.split(".")[1]}`,
},
],
display_name: user.displayName,
fields: (user.source as any as APISource).fields.map(field => ({
key: [
{
content: field.name,
content_type: "text/html",
},
{
content: htmlToText(field.name),
content_type: "text/plain",
},
],
value: [
{
content: field.value,
content_type: "text/html",
},
{
content: htmlToText(field.value),
content_type: "text/plain",
},
],
})),
public_key: {
actor: `${getConfig().http.base_url}/users/${user.id}`,
public_key: user.publicKey,
},
extensions: {
"org.lysand:custom_emojis": {
emojis: user.emojis.map(emoji => emojiToLysand(emoji)),
},
},
};
};

View file

@ -1,65 +1,52 @@
---
# Run `docker network create lysand-net` before running docker-compose up
version: "3"
services:
versia:
image: ghcr.io/versia-pub/server:main
lysand:
image: ghcr.io/lysand-org/lysand:main
volumes:
- ./logs:/app/dist/logs
- ./config:/app/dist/config:ro
- ./uploads:/app/dist/uploads
#- ./logs:/app/logs
- ./config:/app/config
- ./.env:/app/.env
- ./uploads:/app/uploads
restart: unless-stopped
container_name: versia
tty: true
container_name: lysand
networks:
- versia-net
depends_on:
- db
- redis
- sonic
worker:
image: ghcr.io/versia-pub/worker:main
volumes:
- ./logs:/app/dist/logs
- ./config:/app/dist/config:ro
restart: unless-stopped
container_name: versia-worker
tty: true
networks:
- versia-net
depends_on:
- db
- redis
- lysand-net
db:
image: postgres:17-alpine
container_name: versia-db
build:
context: .
dockerfile: Postgres.Dockerfile
container_name: lysand-db
restart: unless-stopped
environment:
POSTGRES_DB: versia
POSTGRES_USER: versia
POSTGRES_PASSWORD: versia
POSTGRES_DB: lysand
POSTGRES_USER: lysand
POSTGRES_PASSWORD: lysand
networks:
- versia-net
- lysand-net
volumes:
- ./db-data:/var/lib/postgresql/data
redis:
image: redis:alpine
container_name: versia-redis
image: "redis:latest"
container_name: lysand-redis
volumes:
- ./redis-data:/data
restart: unless-stopped
networks:
- versia-net
sonic:
volumes:
- ./config.cfg:/etc/sonic.cfg
- ./store:/var/lib/sonic/store/
image: valeriansaliou/sonic:v1.4.9
container_name: versia-sonic
restart: unless-stopped
- lysand-net
meilisearch:
stdin_open: true
environment:
- MEILI_MASTER_KEY=add_your_key_here
tty: true
networks:
- versia-net
- lysand-net
volumes:
- ./meili-data:/meili_data
image: getmeili/meilisearch:v1.5
container_name: lysand-meilisearch
networks:
versia-net:
lysand-net:
external: true

View file

@ -1,98 +0,0 @@
import taskLists from "@hackmd/markdown-it-task-lists";
import implicitFigures from "markdown-it-image-figures";
import { defineConfig } from "vitepress";
import { tabsMarkdownPlugin } from "vitepress-plugin-tabs";
// https://vitepress.dev/reference/site-config
export default defineConfig({
title: "Versia Server Docs",
lang: "en-US",
description: "Documentation for Versia Server APIs",
markdown: {
config: (md): void => {
md.use(implicitFigures, {
figcaption: "alt",
copyAttrs: "^class$",
});
md.use(taskLists);
md.use(tabsMarkdownPlugin);
},
math: true,
},
cleanUrls: true,
themeConfig: {
// https://vitepress.dev/reference/default-theme-config
nav: [
{ text: "Home", link: "/" },
{
text: "Versia Protocol",
link: "https://versia.pub",
target: "_blank",
},
],
sidebar: [
{
text: "Installation",
items: [
{
text: "Normal",
link: "/setup/installation",
},
{
text: "Nix",
link: "/setup/nix",
},
],
},
{
text: "CLI",
link: "/cli",
},
{
text: "API",
items: [
{
text: "Reactions",
link: "/api/reactions",
},
{
text: "Challenges",
link: "/api/challenges",
},
{
text: "Mastodon Extensions",
link: "/api/mastodon",
},
],
},
{
text: "Frontend",
items: [
{
text: "Authentication",
link: "/frontend/auth",
},
{
text: "Routes",
link: "/frontend/routes",
},
],
},
],
socialLinks: [
{ icon: "github", link: "https://github.com/versia-pub/server" },
],
search: {
provider: "local",
},
logo: "https://cdn.versia.pub/branding/icon.svg",
},
head: [["link", { rel: "icon", href: "/favicon.png", type: "image/png" }]],
titleTemplate: ":title • Versia Server Docs",
});

View file

@ -1,14 +0,0 @@
import type { Theme } from "vitepress";
import DefaultTheme from "vitepress/theme";
// https://vitepress.dev/guide/custom-theme
import { h, type VNode } from "vue";
import "./style.css";
export default {
extends: DefaultTheme,
Layout: (): VNode => {
return h(DefaultTheme.Layout, null, {
// https://vitepress.dev/guide/extending-default-theme#layout-slots
});
},
} satisfies Theme;

View file

@ -1,138 +0,0 @@
/**
* Customize default theme styling by overriding CSS variables:
* https://github.com/vuejs/vitepress/blob/main/src/client/theme-default/styles/vars.css
*/
/**
* Colors
*
* Each colors have exact same color scale system with 3 levels of solid
* colors with different brightness, and 1 soft color.
*
* - `XXX-1`: The most solid color used mainly for colored text. It must
* satisfy the contrast ratio against when used on top of `XXX-soft`.
*
* - `XXX-2`: The color used mainly for hover state of the button.
*
* - `XXX-3`: The color for solid background, such as bg color of the button.
* It must satisfy the contrast ratio with pure white (#ffffff) text on
* top of it.
*
* - `XXX-soft`: The color used for subtle background such as custom container
* or badges. It must satisfy the contrast ratio when putting `XXX-1` colors
* on top of it.
*
* The soft color must be semi transparent alpha channel. This is crucial
* because it allows adding multiple "soft" colors on top of each other
* to create a accent, such as when having inline code block inside
* custom containers.
*
* - `default`: The color used purely for subtle indication without any
* special meanings attached to it such as bg color for menu hover state.
*
* - `brand`: Used for primary brand colors, such as link text, button with
* brand theme, etc.
*
* - `tip`: Used to indicate useful information. The default theme uses the
* brand color for this by default.
*
* - `warning`: Used to indicate warning to the users. Used in custom
* container, badges, etc.
*
* - `danger`: Used to show error, or dangerous message to the users. Used
* in custom container, badges, etc.
* -------------------------------------------------------------------------- */
:root {
--vp-c-default-1: var(--vp-c-gray-1);
--vp-c-default-2: var(--vp-c-gray-2);
--vp-c-default-3: var(--vp-c-gray-3);
--vp-c-default-soft: var(--vp-c-gray-soft);
--vp-c-brand-1: var(--vp-c-indigo-1);
--vp-c-brand-2: var(--vp-c-indigo-2);
--vp-c-brand-3: var(--vp-c-indigo-3);
--vp-c-brand-soft: var(--vp-c-indigo-soft);
--vp-c-tip-1: var(--vp-c-brand-1);
--vp-c-tip-2: var(--vp-c-brand-2);
--vp-c-tip-3: var(--vp-c-brand-3);
--vp-c-tip-soft: var(--vp-c-brand-soft);
--vp-c-warning-1: var(--vp-c-yellow-1);
--vp-c-warning-2: var(--vp-c-yellow-2);
--vp-c-warning-3: var(--vp-c-yellow-3);
--vp-c-warning-soft: var(--vp-c-yellow-soft);
--vp-c-danger-1: var(--vp-c-red-1);
--vp-c-danger-2: var(--vp-c-red-2);
--vp-c-danger-3: var(--vp-c-red-3);
--vp-c-danger-soft: var(--vp-c-red-soft);
}
/**
* Component: Button
* -------------------------------------------------------------------------- */
:root {
--vp-button-brand-border: transparent;
--vp-button-brand-text: var(--vp-c-white);
--vp-button-brand-bg: var(--vp-c-brand-3);
--vp-button-brand-hover-border: transparent;
--vp-button-brand-hover-text: var(--vp-c-white);
--vp-button-brand-hover-bg: var(--vp-c-brand-2);
--vp-button-brand-active-border: transparent;
--vp-button-brand-active-text: var(--vp-c-white);
--vp-button-brand-active-bg: var(--vp-c-brand-1);
}
/**
* Component: Home
* -------------------------------------------------------------------------- */
:root {
--vp-home-hero-name-color: transparent;
--vp-home-hero-name-background: -webkit-linear-gradient(
120deg,
#e6a9fe 30%,
#bd34fe
);
--vp-home-hero-image-background-image: linear-gradient(
-45deg,
#e6a9fe 50%,
#bd34fe 50%
);
--vp-home-hero-image-filter: blur(44px);
}
@media (min-width: 640px) {
:root {
--vp-home-hero-image-filter: blur(56px);
}
}
@media (min-width: 960px) {
:root {
--vp-home-hero-image-filter: blur(68px);
}
}
/**
* Component: Custom Block
* -------------------------------------------------------------------------- */
:root {
--vp-custom-block-tip-border: transparent;
--vp-custom-block-tip-text: var(--vp-c-text-1);
--vp-custom-block-tip-bg: var(--vp-c-brand-soft);
--vp-custom-block-tip-code-bg: var(--vp-c-brand-soft);
}
/**
* Component: Algolia
* -------------------------------------------------------------------------- */
.DocSearch {
--docsearch-primary-color: var(--vp-c-brand-1) !important;
}

View file

@ -1,37 +0,0 @@
# Challenges API
Some API routes may require a cryptographic challenge to be solved before the request can be made. This is to prevent abuse of the API by bots and other malicious actors. The challenge is a simple mathematical problem that can be solved by any client.
This is a form of proof of work CAPTCHA, and should be mostly invisible to users. The challenge is generated by the server and sent to the client, which must solve it and send the solution back to the server.
## Solving a Challenge
Challenges are powered by the [Altcha](https://altcha.org/) library. You may either reimplement their solution code (which is very simple), or use [`altcha-lib`](https://github.com/altcha-org/altcha-lib) to solve the challenges.
## Request Challenge
To request a challenge, you may use the [`POST /api/v1/challenges`](https://vs.cpluspatch.com/docs#tag/challenges/POST/api/v1/challenges) endpoint.
## Sending a Solution
To send a solution with any request, add the following headers:
- `X-Challenge-Solution`: A base64 encoded string of the following JSON object:
```ts
{
number: number; // Solution to the challenge
algorithm: "SHA-256" | "SHA-384" | "SHA-512";
challenge: string;
salt: string,
signature: string,
}
```
Example: `{"number": 42, "algorithm": "SHA-256", "challenge": "xxxx", "salt": "abc", "signature": "def"}` -> `eyJudW1iZXIiOjQyLCJhbGdvcml0aG0iOiJTSEEtMjU2IiwiY2hhbGxlbmdlIjoieHh4eCIsInNhbHQiOiJhYmMiLCJzaWduYXR1cmUiOiJkZWYifQ==`
A challenge solution is valid for 5 minutes (configurable) after the challenge is generated. No solved challenge may be used more than once.
## Routes Requiring Challenges
If challenges are enabled, the following routes will require a challenge to be solved before the request can be made:
- `POST /api/v1/accounts`
Routes requiring challenges may eventually be expanded or made configurable.

View file

@ -1,420 +0,0 @@
# Mastodon API Extensions
Versia Server extends several Mastodon API endpoints to provide additional functionality. These endpoints are not part of the official Mastodon API, but are provided by Versia Server to enhance the user experience.
## Refetch User
```http
POST /api/v1/accounts/:id/refetch
```
Refetches the user's profile information from remote servers. Does not work for local users.
- **Returns**: [`Account`](https://docs.joinmastodon.org/entities/Account/)
- **Authentication**: Required
- **Permissions**: `read:account`
- **Version History**:
- `0.7.0`: Added.
### Request
#### Example
```http
POST /api/v1/accounts/364fd13f-28b5-4e88-badd-ce3e533f0d02/refetch
Authorization: Bearer ...
```
### Response
#### `400 Bad Request`
The user is a local user and cannot be refetched.
#### `200 OK`
New user data.
Example from the [Mastodon API documentation](https://docs.joinmastodon.org/entities/Account/):
```json
{
"id": "23634",
"username": "noiob",
"acct": "noiob@awoo.space",
"display_name": "ikea shark fan account",
"locked": false,
"bot": false,
"created_at": "2017-02-08T02:00:53.274Z",
"note": "<p>:ms_rainbow_flag: :ms_bisexual_flagweb: :ms_nonbinary_flag: <a href=\"https://awoo.space/tags/awoo\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>awoo</span}.space <a href=\"https://awoo.space/tags/admin\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>admin</span} ~ <a href=\"https://awoo.space/tags/bi\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>bi</span} ~ <a href=\"https://awoo.space/tags/nonbinary\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>nonbinary</span} ~ compsci student ~ likes video <a href=\"https://awoo.space/tags/games\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>games</span} and weird/ old electronics and will post obsessively about both ~ avatar by <span class=\"h-card\"><a href=\"https://weirder.earth/@dzuk\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>dzuk</span}</span></p>",
"url": "https://awoo.space/@noiob",
"avatar": "https://files.mastodon.social/accounts/avatars/000/023/634/original/6ca8804dc46800ad.png",
"avatar_static": "https://files.mastodon.social/accounts/avatars/000/023/634/original/6ca8804dc46800ad.png",
"header": "https://files.mastodon.social/accounts/headers/000/023/634/original/256eb8d7ac40f49a.png",
"header_static": "https://files.mastodon.social/accounts/headers/000/023/634/original/256eb8d7ac40f49a.png",
"followers_count": 547,
"following_count": 404,
"statuses_count": 28468,
"last_status_at": "2019-11-17",
"emojis": [
{
"shortcode": "ms_rainbow_flag",
"url": "https://files.mastodon.social/custom_emojis/images/000/028/691/original/6de008d6281f4f59.png",
"static_url": "https://files.mastodon.social/custom_emojis/images/000/028/691/static/6de008d6281f4f59.png",
"visible_in_picker": true
},
{
"shortcode": "ms_bisexual_flag",
"url": "https://files.mastodon.social/custom_emojis/images/000/050/744/original/02f94a5fca7eaf78.png",
"static_url": "https://files.mastodon.social/custom_emojis/images/000/050/744/static/02f94a5fca7eaf78.png",
"visible_in_picker": true
},
{
"shortcode": "ms_nonbinary_flag",
"url": "https://files.mastodon.social/custom_emojis/images/000/105/099/original/8106088bd4782072.png",
"static_url": "https://files.mastodon.social/custom_emojis/images/000/105/099/static/8106088bd4782072.png",
"visible_in_picker": true
}
],
"fields": [
{
"name": "Pronouns",
"value": "they/them",
"verified_at": null
},
{
"name": "Alt",
"value": "<span class=\"h-card\"><a href=\"https://cybre.space/@noiob\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>noiob</span}</span>",
"verified_at": null
},
{
"name": "Bots",
"value": "<span class=\"h-card\"><a href=\"https://botsin.space/@darksouls\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>darksouls</span}</span>, <span class=\"h-card\"><a href=\"https://botsin.space/@nierautomata\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>nierautomata</span}</span>, <span class=\"h-card\"><a href=\"https://mastodon.social/@fedi\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>fedi</span}</span>, code for <span class=\"h-card\"><a href=\"https://botsin.space/@awoobot\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>awoobot</span}</span>",
"verified_at": null
},
{
"name": "Website",
"value": "<a href=\"http://shork.xyz\" rel=\"nofollow noopener noreferrer\" target=\"_blank\"><span class=\"invisible\">http://</span><span class=\"\">shork.xyz</span><span class=\"invisible\"></span}",
"verified_at": "2019-11-10T10:31:10.744+00:00"
}
]
}
```
## Get User By Username
```http
GET /api/v1/accounts/id?username=:username
```
Retrieves a user by their username.
- **Returns**: [`Account`](https://docs.joinmastodon.org/entities/Account/)
- **Authentication**: Not required
- **Permissions**: `read:account`
- **Version History**:
- `0.7.0`: Added.
### Request
#### Example
```http
GET /api/v1/accounts/id?username=bobleponge
```
### Response
#### `404 Not Found`
No user with that username was found.
#### `200 OK`
User data.
Example from the [Mastodon API documentation](https://docs.joinmastodon.org/entities/Account/):
```json
{
"id": "23634",
"username": "noiob",
"acct": "noiob@awoo.space",
"display_name": "ikea shark fan account",
"locked": false,
"bot": false,
"created_at": "2017-02-08T02:00:53.274Z",
"note": "<p>:ms_rainbow_flag: :ms_bisexual_flagweb: :ms_nonbinary_flag: <a href=\"https://awoo.space/tags/awoo\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>awoo</span}.space <a href=\"https://awoo.space/tags/admin\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>admin</span} ~ <a href=\"https://awoo.space/tags/bi\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>bi</span} ~ <a href=\"https://awoo.space/tags/nonbinary\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>nonbinary</span} ~ compsci student ~ likes video <a href=\"https://awoo.space/tags/games\" class=\"mention hashtag\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">#<span>games</span} and weird/ old electronics and will post obsessively about both ~ avatar by <span class=\"h-card\"><a href=\"https://weirder.earth/@dzuk\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>dzuk</span}</span></p>",
"url": "https://awoo.space/@noiob",
"avatar": "https://files.mastodon.social/accounts/avatars/000/023/634/original/6ca8804dc46800ad.png",
"avatar_static": "https://files.mastodon.social/accounts/avatars/000/023/634/original/6ca8804dc46800ad.png",
"header": "https://files.mastodon.social/accounts/headers/000/023/634/original/256eb8d7ac40f49a.png",
"header_static": "https://files.mastodon.social/accounts/headers/000/023/634/original/256eb8d7ac40f49a.png",
"followers_count": 547,
"following_count": 404,
"statuses_count": 28468,
"last_status_at": "2019-11-17",
"emojis": [
{
"shortcode": "ms_rainbow_flag",
"url": "https://files.mastodon.social/custom_emojis/images/000/028/691/original/6de008d6281f4f59.png",
"static_url": "https://files.mastodon.social/custom_emojis/images/000/028/691/static/6de008d6281f4f59.png",
"visible_in_picker": true
},
{
"shortcode": "ms_bisexual_flag",
"url": "https://files.mastodon.social/custom_emojis/images/000/050/744/original/02f94a5fca7eaf78.png",
"static_url": "https://files.mastodon.social/custom_emojis/images/000/050/744/static/02f94a5fca7eaf78.png",
"visible_in_picker": true
},
{
"shortcode": "ms_nonbinary_flag",
"url": "https://files.mastodon.social/custom_emojis/images/000/105/099/original/8106088bd4782072.png",
"static_url": "https://files.mastodon.social/custom_emojis/images/000/105/099/static/8106088bd4782072.png",
"visible_in_picker": true
}
],
"fields": [
{
"name": "Pronouns",
"value": "they/them",
"verified_at": null
},
{
"name": "Alt",
"value": "<span class=\"h-card\"><a href=\"https://cybre.space/@noiob\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>noiob</span}</span>",
"verified_at": null
},
{
"name": "Bots",
"value": "<span class=\"h-card\"><a href=\"https://botsin.space/@darksouls\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>darksouls</span}</span>, <span class=\"h-card\"><a href=\"https://botsin.space/@nierautomata\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>nierautomata</span}</span>, <span class=\"h-card\"><a href=\"https://mastodon.social/@fedi\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>fedi</span}</span>, code for <span class=\"h-card\"><a href=\"https://botsin.space/@awoobot\" class=\"u-url mention\" rel=\"nofollow noopener noreferrer\" target=\"_blank\">@<span>awoobot</span}</span>",
"verified_at": null
},
{
"name": "Website",
"value": "<a href=\"http://shork.xyz\" rel=\"nofollow noopener noreferrer\" target=\"_blank\"><span class=\"invisible\">http://</span><span class=\"\">shork.xyz</span><span class=\"invisible\"></span}",
"verified_at": "2019-11-10T10:31:10.744+00:00"
}
]
}
```
## Get Instance TOS
```http
GET /api/v1/instance/tos
```
Returns the instance's Terms of Service, as configured in the instance settings.
- **Returns**: [`ExtendedDescription`](https://docs.joinmastodon.org/entities/ExtendedDescription/)
- **Authentication**: Not required
- **Permissions**: None
- **Version History**:
- `0.7.0`: Added.
### Request
#### Example
```http
GET /api/v1/instance/tos
```
### Response
#### `200 OK`
Instance's Terms of Service.
```json
{
"updated_at": "2019-11-17T00:00:00.000Z",
"content": "<h1>TOS</h1>\n<p>These are the terms of service for this instance.</p>",
}
```
## Get Instance Privacy Policy
```http
GET /api/v1/instance/privacy_policy
```
Returns the instance's Privacy Policy, as configured in the instance settings.
- **Returns**: [`ExtendedDescription`](https://docs.joinmastodon.org/entities/ExtendedDescription/)
- **Authentication**: Not required
- **Permissions**: None
- **Version History**:
- `0.7.0`: Added.
### Request
#### Example
```http
GET /api/v1/instance/privacy_policy
```
### Response
#### `200 OK`
Instance's Privacy Policy.
```json
{
"updated_at": "2019-11-17T00:00:00.000Z",
"content": "<h1>Privacy Policy</h1>\n<p>This is the privacy policy for this instance.</p>",
}
```
## `/api/v1/instance`
Extra attributes have been added to the `/api/v1/instance` endpoint.
```ts
interface SSOProvider {
id: string;
name: string;
icon?: string;
}
type ExtendedInstance = Instance & {
banner: string | null;
versia_version: string;
sso: {
forced: boolean;
providers: SSOProvider[];
};
}
```
### `banner`
The URL of the instance's banner image.
### `versia_version`
The version of Versia Server running on the instance.
The normal `version` field is always set to `"4.3.0+glitch"` or similar, to not confuse clients that expect a Mastodon instance.
### `sso`
Single Sign-On (SSO) settings for the instance. This object contains two fields:
- `forced`: If this is enabled, normal identifier/password login is disabled and login must be done through SSO.
- `providers`: An array of external OpenID Connect providers that users can link their accounts to. Each provider object contains the following fields:
- `id`: The issuer ID of the OpenID Connect provider.
- `name`: The name of the provider.
- `icon`: The URL of the provider's icon. Optional.
## `/api/v2/instance`
Extra attributes have been added to the `/api/v2/instance` endpoint. These are identical to the `/api/v1/instance` endpoint, except that the `banner` attribute uses the normal Mastodon API attribute.
```ts
type ExtendedInstanceV2 = InstanceV2 & {
versia_version: string;
configuration: Instance["configuration"] & {
emojis: {
// In bytes
emoji_size_limit: number;
max_emoji_shortcode_characters: number;
max_emoji_description_characters: number;
};
};
sso: {
forced: boolean;
providers: SSOProvider[];
};
}
```
### `versia_version`
The version of Versia Server running on the instance.
The normal `version` field is always set to `"4.3.0+glitch"` or similar, to not confuse clients that expect a Mastodon instance.
### `sso`
Single Sign-On (SSO) settings for the instance. This object contains two fields:
- `forced`: If this is enabled, normal identifier/password login is disabled and login must be done through SSO.
- `providers`: An array of external OpenID Connect providers that users can link their accounts to. Each provider object contains the following fields:
- `id`: The issuer ID of the OpenID Connect provider.
- `name`: The name of the provider.
- `icon`: The URL of the provider's icon. Optional.
## `Account`
Two extra attributes have been added to all returned [`Account`](https://docs.joinmastodon.org/entities/Account/) objects.
This object is returned on routes such as `/api/v1/accounts/:id`, `/api/v1/accounts/verify_credentials`, etc.
```ts
type ExtendedAccount = Account & {
roles: Role[];
uri: string;
}
```
### `roles`
An array of `Roles` that the user has.
### `uri`
URI of the account's Versia entity (for federation). Similar to Mastodon's `uri` field on notes.
## `Status`
One attribute has been added to all returned [`Status`](https://docs.joinmastodon.org/entities/Status/) objects.
This object is returned on routes such as `/api/v1/statuses/:id`, `/api/v1/statuses/:id/context`, etc.
```ts
type URL = string;
interface NoteReaction {
name: string;
count: number;
me: boolean;
url: URL;
}
type ExtendedStatus = Status & {
reactions: NoteReaction[];
}
```
```json
{
...
"reactions": [
{
"name": "like",
"count": 3,
"me": true,
},
{
"name": "blobfox",
"count": 1,
"me": false,
}
]
}
```
### `reactions`
An array of all the [`NoteReactions`](./reactions.md#reaction) for the note. Data for the custom emoji (e.g. URL) can be found in the `emojis` field of the [`Status`](https://docs.joinmastodon.org/entities/Status#emojis).
## `/api/v1/accounts/update_credentials`
The `username` parameter can now (optionally) be set to change the user's handle.
> [!WARNING]
> Clients should indicate to users that changing their handle will break existing links to their profile. This is reversible, but the old handle will be available for anyone to claim.

View file

@ -1,175 +0,0 @@
# Reactions API
This API is used to send reactions to notes.
## Reaction
```typescript
type UUID = string;
interface NoteReaction {
name: string;
count: number;
me: boolean;
}
type NoteReactionWithAccounts = NoteReaction & {
account_ids: UUID[];
}
```
## Get Reactions
All reactions attached to a [`Status`](https://docs.joinmastodon.org/entities/Status) can be found on the note itself, [in the `reactions` field](./mastodon.md#reactions).
## Get Users Who Reacted
```http
GET /api/v1/statuses/:id/reactions
```
Get a list of all the users who reacted to a note. Only IDs are returned, not full account objects, to improve performance on very popular notes.
- **Returns:** [`NoteReactionWithAccounts[]`](#reaction)
- **Authentication:** Not required
- **Permissions:** `read:reaction`
- **Version History**:
- `0.8.0`: Added.
### Request
#### Example
```http
GET /api/v1/statuses/123/reactions
```
### Response
#### `200 OK`
List of reactions and associated users. The `me` field is `true` if the current user has reacted with that emoji.
Data for the custom emoji (e.g. URL) can be found in the `emojis` field of the [`Status`](https://docs.joinmastodon.org/entities/Status#emojis).
```json
[
{
"name": "like",
"count": 3,
"me": true,
"account_ids": ["1", "2", "3"]
},
{
"name": "blobfox-coffee",
"count": 1,
"me": false,
"account_ids": ["4"]
}
]
```
## Add Reaction
```http
PUT /api/v1/statuses/:id/reactions/:name
```
Add a reaction to a note.
- **Returns:** [`Status`](https://docs.joinmastodon.org/entities/Status)
- **Authentication:** Required
- **Permissions:** `owner:reaction`
- **Version History**:
- `0.8.0`: Added.
### Request
- `name` (string, required): Either a custom emoji shortcode or a Unicode emoji.
#### Example
```http
PUT /api/v1/statuses/123/reactions/blobfox-coffee
Authorization: Bearer ...
```
```http
PUT /api/v1/statuses/123/reactions/👍
Authorization: Bearer ...
```
### Response
#### `201 Created`
Returns the updated note.
```json
{
"id": "123",
...
"reactions": [
{
"name": "👍",
"count": 3,
"me": true
},
{
"name": "blobfox-coffee",
"count": 1,
"me": false
}
]
}
```
## Remove Reaction
```http
DELETE /api/v1/statuses/:id/reactions/:name
```
Remove a reaction from a note.
- **Returns:** [`Status`](https://docs.joinmastodon.org/entities/Status)
- **Authentication:** Required
- **Permissions:** `owner:reaction`
- **Version History**:
- `0.8.0`: Added.
### Request
- `name` (string, required): Either a custom emoji shortcode or a Unicode emoji.
#### Example
```http
DELETE /api/v1/statuses/123/reactions/blobfox-coffee
Authorization: Bearer ...
```
```http
DELETE /api/v1/statuses/123/reactions
Authorization: Bearer ...
```
### Response
#### `200 OK`
Returns the updated note. If the reaction was not found, the note is returned as is.
```json
{
"id": "123",
...
"reactions": [
{
"name": "👍",
"count": 3,
"me": true
}
]
}
```

View file

@ -1,21 +0,0 @@
# Versia Server CLI
Versia Server includes a built-in, scripting-compatible CLI that can be used to manage the server. This CLI can be used to create and delete users, manage the database and more. It can also output data in JSON or CSV format, making it easy to use in scripts.
## Using the CLI
Versia Server includes a built-in CLI for managing the server. To use it, simply run the following command:
```bash
# Docker
# Replace `versia` with the name of your container
docker compose exec -it versia sh /app/entrypoint.sh cli help
```
You can use the `help` command to see a list of available commands. These include creating users, deleting users and more. Each command also has a `--help,-h` flag that you can use to see more information about the command.
## Scripting with the CLI
Some CLI commands that return data as tables can be used in scripts. To convert them to JSON or CSV, some commands allow you to specify a `--format` flag that can be either `"json"` or `"csv"`. See `cli help` or `cli <command> -h` for more information.
Flags can be used in any order and anywhere in the script (except for the `cli` command itself).

View file

@ -1,87 +0,0 @@
# Frontend Authentication
Multiple API routes are exposed for authentication, to be used by frontend developers.
> [!INFO]
>
> These are different from the Client API routes, which are used by clients to interact with the Mastodon API.
A frontend is a web application that is designed to be the primary user interface for an instance. It is used also used by clients to perform authentication.
## Get Frontend Configuration
```http
GET /api/v1/frontend/config
```
Retrieves the frontend configuration for the instance. This returns whatever the `frontend.settings` object is set to in the Versia Server configuration.
This behaves like the `/api/v1/preferences` endpoint in the Mastodon API, but is specific to the frontend. These values are arbitrary and can be used for anything.
Frontend developers should always namespace their keys to avoid conflicts with other keys.
- **Returns**: Object with arbitrary keys and values.
- **Authentication**: Not required
- **Permissions**: None
- **Version History**:
- `0.7.0`: Added.
### Request
#### Example
```http
GET /api/v1/frontend/config
```
### Response
#### `200 OK`
Frontend configuration.
```json
{
"pub.versia.fe:theme": "dark",
"pub.versia.fe:custom_css": "body { background-color: black; }",
"net.googly.frontend:spoiler_image": "https://example.com/spoiler.png"
}
```
## SSO Sign In
```http
POST /oauth/sso
```
Allows users to sign in to the instance using an external OpenID Connect provider.
- **Returns**: `302 Found` with a `Location` header to redirect the user to the next step.
- **Authentication**: Not required
- **Permissions**: None
- **Version History**:
- `0.7.0`: First documented.
### Request
#### Query Parameters
- `client_id` (string, required): Client ID of the [application](https://docs.joinmastodon.org/entities/Application/) that is making the request.
- `issuer` (string, required): The ID of the OpenID Connect provider, as found in `/api/{v1,v2}/instance`.
#### Example
```http
POST /oauth/sso?client_id=123&issuer=google
```
### Response
#### `302 Found`
Redirects the user to the OpenID Connect provider's login page.
```http
HTTP/2.0 302 Found
Location: https://accounts.google.com/o/oauth2/auth?client_id=123&redirect_uri=https%3A%2F%2Fexample.com%2Fauth&response_type=code&scope=openid%20email&state=123
```

View file

@ -1,53 +0,0 @@
# Frontend Routes
Frontend implementors must implement these routes for correct operation of the instance.
The location of these routes can be configured in the Versia Server configuration at `frontend.routes`:
## Login Form
```http
GET /oauth/authorize
```
This route should display a login form for the user to enter their username and password, as well as a list of OpenID providers to use if available.
The form should submit to the OpenID Connect flow.
Configurable in the Versia Server configuration at `frontend.routes.login`.
## Consent Form
```http
GET /oauth/consent
```
This route should display a consent form for the user to approve the requested application permissions, after logging in.
The form should submit an OpenID Connect authorization request at `POST /oauth/authorize`, with the correct [application](https://docs.joinmastodon.org/entities/Application/) data (client ID, redirect URI, etc.). Do not forget the JWT cookie.
### Submission Example
```http
POST /oauth/authorize
Content-Type: application/json
Cookie: jwt=...
{
"client_id": "client_id",
"response_type": "code",
"redirect_uri": "https://example.com/callback",
"scope": "read write",
"state": "state123",
"code_challenge": "code_challenge",
"code_challenge_method": "S256",
"response_type": "code"
}
```
### Submission Response
```http
HTTP/2.0 302 Found
Location: https://example.com/callback?code=code&state=state123
```

View file

@ -1,18 +0,0 @@
---
layout: home
hero:
name: Versia Server Docs
features:
- icon: 🛠️
title: Installation
details: Details on how to install Versia Server
link: ./setup/installation
- icon: 🖥
title: API Reference
details: Writing your own client? Check out the API reference
link: ./api/challenges
- icon: 📚
title: Frontend Building
details: Information on developing your own frontend
link: ./frontend/routes
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.8 KiB

View file

@ -1,98 +0,0 @@
# Installation
## Requirements
- A Linux-based operating system. Kernel version `6.1` or later is recommended.
- Basic knowledge of Docker and Docker Compose.
Traditional "from-source" installation is not supported, as this software is designed to be run in a containerized environment. This guide will cover how to run the server using Docker.
## Installation
1. Download the `docker-compose.yml` file from the repository
> [!NOTE]
> You may need to change the image from `ghcr.io/versia-pub/server:latest` to `ghcr.io/versia-pub/server:main` if you want to use the latest changes from the `main` branch.
```bash
# Set this to "main" for the development build
TAG=v0.7.0
curl -o docker-compose.yml https://raw.githubusercontent.com/versia-pub/server/$TAG/docker-compose.yml
```
2. Edit the `docker-compose.yml` file to your liking, e.g removing the `db` service if you want to use an existing database.
3. Download the `config.example.toml` file from the repository
```bash
# This should be the same as the TAG variable above
TAG=v0.7.0
curl -o config.example.toml https://raw.githubusercontent.com/versia-pub/server/$TAG/config/config.example.toml
```
4. Edit the `config.example.toml` to your liking. You will at least need to change the `postgres`, `redis` and `http` sections to match your environment.
> [!WARNING]
> The first time you start the server, it will complain about missing keys in the configuration file.
>
> These will be autogenerated and printed to the console, so you can copy them to your `config.toml` file.
5. Run the following command to start the server:
```bash
docker compose up
```
You may need root privileges to run Docker commands.
To check server logs, run `docker compose logs versia`. The server will likely stop if there is an error, so you can check the logs to see what went wrong.
## Installing the frontend
The frontend is not included in the Docker image, so you will need to install it separately.
To do this, you may copy the static files from our frontend's Docker image:
```bash
# The frontend does not have a stable tag, so we use the main branch
TAG=main
OUTDIR=./frontend
TEMP=$(sudo docker create ghcr.io/versia-pub/frontend:$TAG)
sudo docker cp $TEMP:/app/public $OUTDIR
sudo docker rm $TEMP
```
> [!TIP]
>
> This command can be re-run to update the frontend to the latest version.
Then, set the following bind mount in your `docker-compose.yml` file:
```yaml
services:
versia:
...
volumes:
# If you set OUTDIR to a different directory, change this to match
# e.g. - ./custom-frontend:/frontend
- ./frontend:/frontend
```
Finally, update the config to point to the frontend:
```toml
[frontend]
path = "/frontend"
```
## Running the Server
Database migrations are run automatically on startup.
Please see the [CLI documentation](../cli/index.md) for more information on how to use the CLI.
## Updating the server
Updating the server is as simple as running `docker-compose pull` to update the Docker images, then `docker-compose up` to restart the server.
Sometimes, new configuration options are added to `config.example.toml`. If you see a new option in the example file, you should add it to your `config.toml` file.

View file

@ -1,87 +0,0 @@
# Nix Module
This project is packaged as a [Nix Flake](https://nixos.wiki/wiki/Flakes), which can be used to build and run the project in a reproducible environment.
## Installation
### Flake-based NixOS installs
Add the following to your `inputs` in your `flake.nix`:
```nix
inputs = {
# ...
versia-server = {
url = "github:versia-pub/server";
inputs.nixpkgs.follows = "nixpkgs";
};
};
```
Then, add this to your `nixosConfigurations`:
```nix
nixosConfigurations = {
# ...
my-server = {
system = "x86_64-linux"; # arm64-linux is also supported
modules = [
# ...
{
nixpkgs.overlays = [versia-server.overlays.default];
}
versia-server.nixosModules.versia-server
];
};
};
```
You are now ready to use the NixOS module.
## Usage
This module exposes the following configuration option:
```nix
services.versia-server = {
enable = true;
user = "versia-server";
group = "versia-server";
nodes = {
api = {
main = {};
backup = {
configOverrides.http.port = 2734;
};
};
worker = {
one = {};
two = {};
three = {
configOverrides.postgres.port = 5433;
};
};
};
config = {
# ...
http = {
# ...
bind = "0.0.0.0";
port = 8080;
base_url = "https://versia.example";
};
# ...
};
};
```
### Configuration Options
- `enable`: Whether to enable the service. Default: `true`.
- `user`: The user under which the service will run. Default: `versia-server`.
- `group`: The group under which the service will run. Default: `versia-server`.
- `nodes`: A set of nodes to run. Each node can have its own configuration overrides, which will be merged with the default configuration. You must have at least one of each type (`api` and `worker`).
- `config`: Contents of the config file, which is serialized to TOML. Check the Versia Server documentation for information on its contents.

View file

@ -1,28 +0,0 @@
import { config } from "@versia-server/config";
import type { Config } from "drizzle-kit";
/**
* Drizzle can't properly resolve imports with top-level await, so uncomment
* this line when generating migrations.
*/
export default {
dialect: "postgresql",
out: "./packages/kit/tables/migrations",
schema: "./packages/kit/tables/schema.ts",
dbCredentials: {
/* host: "localhost",
port: 40000,
user: "lysand",
password: "lysand",
database: "lysand", */
host: config.postgres.host,
port: config.postgres.port,
user: config.postgres.username,
password: config.postgres.password,
database: config.postgres.database,
},
// Print all statements
verbose: true,
// Always ask for confirmation
strict: true,
} satisfies Config;

View file

@ -1,27 +0,0 @@
#!/bin/sh
# This script is a wrapper for the main server, CLI and Prisma binaries.
# Commands:
# - `start`: Starts the server
# - `cli`: Starts the CLI, sends all arguments to it
# Exit immediately if a command exits with a non-zero status.
set -eu
cd /app/dist
# Parse first argument
case "$1" in
"start")
NODE_ENV=production bun run ./index.js --prod
;;
"cli")
# Start the CLI
shift 1
bun run ./cli/index.js "$@"
;;
*)
# Run custom commands
exec "$@"
;;
esac

View file

@ -1,61 +0,0 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1763421233,
"narHash": "sha256-Stk9ZYRkGrnnpyJ4eqt9eQtdFWRRIvMxpNRf4sIegnw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "89c2b2330e733d6cdb5eae7b899326930c2c0648",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

Some files were not shown because too many files have changed in this diff Show more