Compare commits

..

3 commits

Author SHA1 Message Date
nexy7574
57eae642be
feat: Only inject vias when manual ones aren't provided during join 2025-07-31 17:48:30 +01:00
nexy7574
6a85b6d5b0
fix: Make remote leave helper a public fn 2025-07-30 19:29:33 +01:00
nexy7574
f6ef95c365
feat: Force leave remote rooms admin command 2025-07-30 19:19:32 +01:00
77 changed files with 526 additions and 2166 deletions

4
.envrc
View file

@ -2,8 +2,6 @@
dotenv_if_exists
if [ -f /etc/os-release ] && grep -q '^ID=nixos' /etc/os-release; then
use flake ".#${DIRENV_DEVSHELL:-default}"
fi
# use flake ".#${DIRENV_DEVSHELL:-default}"
PATH_add bin

View file

@ -1,39 +0,0 @@
name: detect-runner-os
description: |
Detect the actual OS name and version of the runner.
Provides separate outputs for name, version, and a combined slug.
outputs:
name:
description: 'OS name (e.g. Ubuntu, Debian)'
value: ${{ steps.detect.outputs.name }}
version:
description: 'OS version (e.g. 22.04, 11)'
value: ${{ steps.detect.outputs.version }}
slug:
description: 'Combined OS slug (e.g. Ubuntu-22.04)'
value: ${{ steps.detect.outputs.slug }}
runs:
using: composite
steps:
- name: Detect runner OS
id: detect
shell: bash
run: |
# Detect OS version (try lsb_release first, fall back to /etc/os-release)
OS_VERSION=$(lsb_release -rs 2>/dev/null || grep VERSION_ID /etc/os-release | cut -d'"' -f2)
# Detect OS name and capitalise (try lsb_release first, fall back to /etc/os-release)
OS_NAME=$(lsb_release -is 2>/dev/null || grep "^ID=" /etc/os-release | cut -d'=' -f2 | tr -d '"' | sed 's/\b\(.\)/\u\1/g')
# Create combined slug
OS_SLUG="${OS_NAME}-${OS_VERSION}"
# Set outputs
echo "name=${OS_NAME}" >> $GITHUB_OUTPUT
echo "version=${OS_VERSION}" >> $GITHUB_OUTPUT
echo "slug=${OS_SLUG}" >> $GITHUB_OUTPUT
# Log detection results
echo "🔍 Detected Runner OS: ${OS_NAME} ${OS_VERSION}"

View file

@ -0,0 +1,27 @@
name: prefligit
description: |
Runs prefligit, pre-commit reimplemented in Rust.
inputs:
extra_args:
description: options to pass to pre-commit run
required: false
default: '--all-files'
runs:
using: composite
steps:
- name: Install uv
uses: https://github.com/astral-sh/setup-uv@v6
with:
enable-cache: true
ignore-nothing-to-cache: true
- name: Install Prefligit
shell: bash
run: |
curl --proto '=https' --tlsv1.2 -LsSf https://github.com/j178/prefligit/releases/download/v0.0.10/prefligit-installer.sh | sh
- uses: actions/cache@v3
with:
path: ~/.cache/prefligit
key: prefligit-0|${{ hashFiles('.pre-commit-config.yaml') }}
- run: prefligit run --show-diff-on-failure --color=always -v ${{ inputs.extra_args }}
shell: bash

View file

@ -1,167 +0,0 @@
name: setup-llvm-with-apt
description: |
Set up LLVM toolchain with APT package management and smart caching.
Supports cross-compilation architectures and additional package installation.
Creates symlinks in /usr/bin: clang, clang++, lld, llvm-ar, llvm-ranlib
inputs:
dpkg-arch:
description: 'Debian architecture for cross-compilation (e.g. arm64)'
required: false
default: ''
extra-packages:
description: 'Additional APT packages to install (space-separated)'
required: false
default: ''
llvm-version:
description: 'LLVM version to install'
required: false
default: '20'
outputs:
llvm-version:
description: 'Installed LLVM version'
value: ${{ steps.configure.outputs.version }}
runs:
using: composite
steps:
- name: Detect runner OS
id: runner-os
uses: ./.forgejo/actions/detect-runner-os
- name: Configure cross-compilation architecture
if: inputs.dpkg-arch != ''
shell: bash
run: |
echo "🏗️ Adding ${{ inputs.dpkg-arch }} architecture"
sudo dpkg --add-architecture ${{ inputs.dpkg-arch }}
# Restrict default sources to amd64
sudo sed -i 's/^deb http/deb [arch=amd64] http/g' /etc/apt/sources.list
sudo sed -i 's/^deb https/deb [arch=amd64] https/g' /etc/apt/sources.list
# Add ports sources for foreign architecture
sudo tee /etc/apt/sources.list.d/${{ inputs.dpkg-arch }}.list > /dev/null <<EOF
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe multiverse
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe multiverse
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
EOF
echo "✅ Architecture ${{ inputs.dpkg-arch }} configured"
- name: Start LLVM cache group
shell: bash
run: echo "::group::📦 Restoring LLVM cache"
- name: Check for LLVM cache
id: cache
uses: https://github.com/actions/cache@v4
with:
path: |
/usr/bin/clang-*
/usr/bin/clang++-*
/usr/bin/lld-*
/usr/bin/llvm-*
/usr/lib/llvm-*/
/usr/lib/x86_64-linux-gnu/libLLVM*.so*
/usr/lib/x86_64-linux-gnu/libclang*.so*
/etc/apt/sources.list.d/archive_uri-*
/etc/apt/trusted.gpg.d/apt.llvm.org.asc
key: llvm-${{ steps.runner-os.outputs.slug }}-v${{ inputs.llvm-version }}-v3-${{ hashFiles('**/Cargo.lock', 'rust-toolchain.toml') }}
- name: End LLVM cache group
shell: bash
run: echo "::endgroup::"
- name: Check and install LLVM if needed
id: llvm-setup
shell: bash
run: |
echo "🔍 Checking for LLVM ${{ inputs.llvm-version }}..."
# Check both binaries and libraries exist
if [ -f "/usr/bin/clang-${{ inputs.llvm-version }}" ] && \
[ -f "/usr/bin/clang++-${{ inputs.llvm-version }}" ] && \
[ -f "/usr/bin/lld-${{ inputs.llvm-version }}" ] && \
([ -f "/usr/lib/x86_64-linux-gnu/libLLVM.so.${{ inputs.llvm-version }}.1" ] || \
[ -f "/usr/lib/x86_64-linux-gnu/libLLVM-${{ inputs.llvm-version }}.so.1" ] || \
[ -f "/usr/lib/llvm-${{ inputs.llvm-version }}/lib/libLLVM.so" ]); then
echo "✅ LLVM ${{ inputs.llvm-version }} found and verified"
echo "needs-install=false" >> $GITHUB_OUTPUT
else
echo "📦 LLVM ${{ inputs.llvm-version }} not found or incomplete - installing..."
echo "::group::🔧 Installing LLVM ${{ inputs.llvm-version }}"
wget -O - https://apt.llvm.org/llvm.sh | bash -s -- ${{ inputs.llvm-version }}
echo "::endgroup::"
if [ ! -f "/usr/bin/clang-${{ inputs.llvm-version }}" ]; then
echo "❌ Failed to install LLVM ${{ inputs.llvm-version }}"
exit 1
fi
echo "✅ Installed LLVM ${{ inputs.llvm-version }}"
echo "needs-install=true" >> $GITHUB_OUTPUT
fi
- name: Prepare for additional packages
if: inputs.extra-packages != ''
shell: bash
run: |
# Update APT if LLVM was cached (installer script already does apt-get update)
if [[ "${{ steps.llvm-setup.outputs.needs-install }}" != "true" ]]; then
echo "::group::📦 Running apt-get update (LLVM cached, extra packages needed)"
sudo apt-get update
echo "::endgroup::"
fi
echo "::group::📦 Installing additional packages"
- name: Install additional packages
if: inputs.extra-packages != ''
uses: https://github.com/awalsh128/cache-apt-pkgs-action@latest
with:
packages: ${{ inputs.extra-packages }}
version: 1.0
- name: End package installation group
if: inputs.extra-packages != ''
shell: bash
run: echo "::endgroup::"
- name: Configure LLVM environment
id: configure
shell: bash
run: |
echo "::group::🔧 Configuring LLVM ${{ inputs.llvm-version }} environment"
# Create symlinks
sudo ln -sf "/usr/bin/clang-${{ inputs.llvm-version }}" /usr/bin/clang
sudo ln -sf "/usr/bin/clang++-${{ inputs.llvm-version }}" /usr/bin/clang++
sudo ln -sf "/usr/bin/lld-${{ inputs.llvm-version }}" /usr/bin/lld
sudo ln -sf "/usr/bin/llvm-ar-${{ inputs.llvm-version }}" /usr/bin/llvm-ar
sudo ln -sf "/usr/bin/llvm-ranlib-${{ inputs.llvm-version }}" /usr/bin/llvm-ranlib
echo " ✓ Created symlinks"
# Setup library paths
LLVM_LIB_PATH="/usr/lib/llvm-${{ inputs.llvm-version }}/lib"
if [ -d "$LLVM_LIB_PATH" ]; then
echo "LD_LIBRARY_PATH=${LLVM_LIB_PATH}:${LD_LIBRARY_PATH:-}" >> $GITHUB_ENV
echo "LIBCLANG_PATH=${LLVM_LIB_PATH}" >> $GITHUB_ENV
echo "$LLVM_LIB_PATH" | sudo tee "/etc/ld.so.conf.d/llvm-${{ inputs.llvm-version }}.conf" > /dev/null
sudo ldconfig
echo " ✓ Configured library paths"
else
# Fallback to standard library location
if [ -d "/usr/lib/x86_64-linux-gnu" ]; then
echo "LIBCLANG_PATH=/usr/lib/x86_64-linux-gnu" >> $GITHUB_ENV
echo " ✓ Using fallback library path"
fi
fi
# Set output
echo "version=${{ inputs.llvm-version }}" >> $GITHUB_OUTPUT
echo "::endgroup::"
echo "✅ LLVM ready: $(clang --version | head -1)"

View file

@ -1,236 +0,0 @@
name: setup-rust
description: |
Set up Rust toolchain with sccache for compilation caching.
Respects rust-toolchain.toml by default or accepts explicit version override.
inputs:
cache-key-suffix:
description: 'Optional suffix for cache keys (e.g. platform identifier)'
required: false
default: ''
rust-components:
description: 'Additional Rust components to install (space-separated)'
required: false
default: ''
rust-target:
description: 'Rust target triple (e.g. x86_64-unknown-linux-gnu)'
required: false
default: ''
rust-version:
description: 'Rust version to install (e.g. nightly). Defaults to 1.87.0'
required: false
default: '1.87.0'
sccache-cache-limit:
description: 'Maximum size limit for sccache local cache (e.g. 2G, 500M)'
required: false
default: '2G'
github-token:
description: 'GitHub token for downloading sccache from GitHub releases'
required: false
default: ''
outputs:
rust-version:
description: 'Installed Rust version'
value: ${{ steps.rust-setup.outputs.version }}
runs:
using: composite
steps:
- name: Detect runner OS
id: runner-os
uses: ./.forgejo/actions/detect-runner-os
- name: Configure Cargo environment
shell: bash
run: |
# Use workspace-relative paths for better control and consistency
echo "CARGO_HOME=${{ github.workspace }}/.cargo" >> $GITHUB_ENV
echo "CARGO_TARGET_DIR=${{ github.workspace }}/target" >> $GITHUB_ENV
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> $GITHUB_ENV
echo "RUSTUP_HOME=${{ github.workspace }}/.rustup" >> $GITHUB_ENV
# Limit binstall resolution timeout to avoid GitHub rate limit delays
echo "BINSTALL_MAXIMUM_RESOLUTION_TIMEOUT=10" >> $GITHUB_ENV
# Ensure directories exist for first run
mkdir -p "${{ github.workspace }}/.cargo"
mkdir -p "${{ github.workspace }}/.sccache"
mkdir -p "${{ github.workspace }}/target"
mkdir -p "${{ github.workspace }}/.rustup"
- name: Start cache restore group
shell: bash
run: echo "::group::📦 Restoring caches (registry, toolchain, build artifacts)"
- name: Cache Cargo registry and git
id: registry-cache
uses: https://github.com/actions/cache@v4
with:
path: |
.cargo/registry/index
.cargo/registry/cache
.cargo/git/db
# Registry cache saved per workflow, restored from any workflow's cache
# Each workflow maintains its own registry that accumulates its needed crates
key: cargo-registry-${{ steps.runner-os.outputs.slug }}-${{ github.workflow }}
restore-keys: |
cargo-registry-${{ steps.runner-os.outputs.slug }}-
- name: Cache toolchain binaries
id: toolchain-cache
uses: https://github.com/actions/cache@v4
with:
path: |
.cargo/bin
.rustup/toolchains
.rustup/update-hashes
# Shared toolchain cache across all Rust versions
key: toolchain-${{ steps.runner-os.outputs.slug }}
- name: Debug GitHub token availability
shell: bash
run: |
if [ -z "${{ inputs.github-token }}" ]; then
echo "⚠️ No GitHub token provided - sccache will use fallback download method"
else
echo "✅ GitHub token provided for sccache"
fi
- name: Setup sccache
uses: https://github.com/mozilla-actions/sccache-action@v0.0.9
with:
token: ${{ inputs.github-token }}
- name: Cache build artifacts
id: build-cache
uses: https://github.com/actions/cache@v4
with:
path: |
target/**/deps
!target/**/deps/*.rlib
target/**/build
target/**/.fingerprint
target/**/incremental
target/**/*.d
/timelord/
# Build artifacts - cache per code change, restore from deps when code changes
key: >-
build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }}
restore-keys: |
build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-
- name: End cache restore group
shell: bash
run: echo "::endgroup::"
- name: Setup Rust toolchain
shell: bash
run: |
# Install rustup if not already cached
if ! command -v rustup &> /dev/null; then
echo "::group::📦 Installing rustup"
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain none
source "$CARGO_HOME/env"
echo "::endgroup::"
else
echo "✅ rustup already available"
fi
# Setup the appropriate Rust version
if [[ -n "${{ inputs.rust-version }}" ]]; then
echo "::group::📦 Setting up Rust ${{ inputs.rust-version }}"
# Set override first to prevent rust-toolchain.toml from auto-installing
rustup override set ${{ inputs.rust-version }} 2>/dev/null || true
# Check if we need to install/update the toolchain
if rustup toolchain list | grep -q "^${{ inputs.rust-version }}-"; then
rustup update ${{ inputs.rust-version }}
else
rustup toolchain install ${{ inputs.rust-version }} --profile minimal -c cargo,clippy,rustfmt
fi
else
echo "::group::📦 Setting up Rust from rust-toolchain.toml"
rustup show
fi
echo "::endgroup::"
- name: Configure PATH and install tools
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.github-token }}
run: |
# Add .cargo/bin to PATH permanently for all subsequent steps
echo "${{ github.workspace }}/.cargo/bin" >> $GITHUB_PATH
# For this step only, we need to add it to PATH since GITHUB_PATH takes effect in the next step
export PATH="${{ github.workspace }}/.cargo/bin:$PATH"
# Install cargo-binstall for fast binary installations
if command -v cargo-binstall &> /dev/null; then
echo "✅ cargo-binstall already available"
else
echo "::group::📦 Installing cargo-binstall"
curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
echo "::endgroup::"
fi
if command -v prek &> /dev/null; then
echo "✅ prek already available"
else
echo "::group::📦 Installing prek"
# prek isn't regularly published to crates.io, so we use git source
cargo-binstall -y --no-symlinks --git https://github.com/j178/prek prek
echo "::endgroup::"
fi
if command -v timelord &> /dev/null; then
echo "✅ timelord already available"
else
echo "::group::📦 Installing timelord"
cargo-binstall -y --no-symlinks timelord-cli
echo "::endgroup::"
fi
- name: Configure sccache environment
shell: bash
run: |
echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV
echo "CMAKE_C_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "CMAKE_CXX_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "CMAKE_CUDA_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV
# Configure incremental compilation GC
# If we restored from old cache (partial hit), clean up aggressively
if [[ "${{ steps.build-cache.outputs.cache-hit }}" != "true" ]]; then
echo "♻️ Partial cache hit - enabling cache cleanup"
echo "CARGO_INCREMENTAL_GC_THRESHOLD=5" >> $GITHUB_ENV
fi
- name: Install Rust components
if: inputs.rust-components != ''
shell: bash
run: |
echo "📦 Installing components: ${{ inputs.rust-components }}"
rustup component add ${{ inputs.rust-components }}
- name: Install Rust target
if: inputs.rust-target != ''
shell: bash
run: |
echo "📦 Installing target: ${{ inputs.rust-target }}"
rustup target add ${{ inputs.rust-target }}
- name: Output version and summary
id: rust-setup
shell: bash
run: |
RUST_VERSION=$(rustc --version | cut -d' ' -f2)
echo "version=$RUST_VERSION" >> $GITHUB_OUTPUT
echo "📋 Setup complete:"
echo " Rust: $(rustc --version)"
echo " Cargo: $(cargo --version)"
echo " prek: $(prek --version 2>/dev/null || echo 'installed')"
echo " timelord: $(timelord --version 2>/dev/null || echo 'installed')"

View file

@ -0,0 +1,22 @@
name: Checks / Prefligit
on:
push:
pull_request:
permissions:
contents: read
jobs:
prefligit:
runs-on: ubuntu-latest
env:
FROM_REF: ${{ github.event.pull_request.base.sha || (!github.event.forced && ( github.event.before != '0000000000000000000000000000000000000000' && github.event.before || github.sha )) || format('{0}~', github.sha) }}
TO_REF: ${{ github.sha }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- uses: ./.forgejo/actions/prefligit
with:
extra_args: --all-files --hook-stage manual

View file

@ -1,79 +0,0 @@
name: Checks / Prek
on:
push:
permissions:
contents: read
jobs:
fast-checks:
name: Pre-commit & Formatting
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Rust nightly
uses: ./.forgejo/actions/setup-rust
with:
rust-version: nightly
github-token: ${{ secrets.GH_PUBLIC_RO }}
- name: Run prek
run: |
prek run \
--all-files \
--hook-stage manual \
--show-diff-on-failure \
--color=always \
-v
- name: Check Rust formatting
run: |
cargo +nightly fmt --all -- --check && \
echo "✅ Formatting check passed" || \
exit 1
clippy-and-tests:
name: Clippy and Cargo Tests
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup LLVM
uses: ./.forgejo/actions/setup-llvm-with-apt
with:
extra-packages: liburing-dev liburing2
- name: Setup Rust with caching
uses: ./.forgejo/actions/setup-rust
with:
github-token: ${{ secrets.GH_PUBLIC_RO }}
- name: Run Clippy lints
run: |
cargo clippy \
--workspace \
--features full \
--locked \
--no-deps \
--profile test \
-- \
-D warnings
- name: Run Cargo tests
run: |
cargo test \
--workspace \
--features full \
--locked \
--profile test \
--all-targets \
--no-fail-fast

View file

@ -1,62 +0,0 @@
name: Maintenance / Renovate
on:
schedule:
# Run at 5am UTC daily to avoid late-night dev
- cron: '0 5 * * *'
workflow_dispatch:
inputs:
dryRun:
description: 'Dry run mode'
required: false
default: null
type: choice
options:
- null
- 'extract'
- 'lookup'
- 'full'
logLevel:
description: 'Log level'
required: false
default: 'info'
type: choice
options:
- 'info'
- 'warning'
- 'critical'
push:
branches:
- main
paths:
# Re-run when config changes
- '.forgejo/workflows/renovate.yml'
- 'renovate.json'
jobs:
renovate:
name: Renovate
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Self-hosted Renovate
uses: https://github.com/renovatebot/github-action@v40.1.0
env:
LOG_LEVEL: ${{ inputs.logLevel || 'info' }}
RENOVATE_AUTODISCOVER: 'false'
RENOVATE_BINARY_SOURCE: 'install'
RENOVATE_DRY_RUN: ${{ inputs.dryRun || 'false' }}
RENOVATE_ENDPOINT: ${{ github.server_url }}/api/v1
RENOVATE_GIT_TIMEOUT: 60000
RENOVATE_GIT_URL: 'endpoint'
RENOVATE_GITHUB_TOKEN_WARN: 'false'
RENOVATE_ONBOARDING: 'false'
RENOVATE_PLATFORM: 'forgejo'
RENOVATE_PR_COMMITS_PER_RUN_LIMIT: 3
RENOVATE_REPOSITORIES: '["${{ github.repository }}"]'
RENOVATE_REQUIRE_CONFIG: 'required'
RENOVATE_TOKEN: ${{ secrets.RENOVATE_TOKEN }}

View file

@ -0,0 +1,144 @@
name: Checks / Rust
on:
push:
jobs:
format:
name: Format
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
with:
toolchain: "nightly"
components: "rustfmt"
- name: Check formatting
run: |
cargo +nightly fmt --all -- --check
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
- uses: https://github.com/actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
github-api-url: https://api.github.com
owner: ${{ vars.GH_APP_OWNER }}
repositories: ""
- name: Install sccache
uses: ./.forgejo/actions/sccache
with:
token: ${{ steps.app-token.outputs.token }}
- run: sudo apt-get update
- name: Install system dependencies
uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1
with:
packages: clang liburing-dev
version: 1
- name: Cache Rust registry
uses: actions/cache@v3
with:
path: |
~/.cargo/git
!~/.cargo/git/checkouts
~/.cargo/registry
!~/.cargo/registry/src
key: rust-registry-${{hashFiles('**/Cargo.lock') }}
- name: Timelord
uses: ./.forgejo/actions/timelord
with:
key: sccache-v0
path: .
- name: Clippy
run: |
cargo clippy \
--workspace \
--features full \
--locked \
--no-deps \
--profile test \
-- \
-D warnings
- name: Show sccache stats
if: always()
run: sccache --show-stats
cargo-test:
name: Cargo Test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
- uses: https://github.com/actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
github-api-url: https://api.github.com
owner: ${{ vars.GH_APP_OWNER }}
repositories: ""
- name: Install sccache
uses: ./.forgejo/actions/sccache
with:
token: ${{ steps.app-token.outputs.token }}
- run: sudo apt-get update
- name: Install system dependencies
uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1
with:
packages: clang liburing-dev
version: 1
- name: Cache Rust registry
uses: actions/cache@v3
with:
path: |
~/.cargo/git
!~/.cargo/git/checkouts
~/.cargo/registry
!~/.cargo/registry/src
key: rust-registry-${{hashFiles('**/Cargo.lock') }}
- name: Timelord
uses: ./.forgejo/actions/timelord
with:
key: sccache-v0
path: .
- name: Cargo Test
run: |
cargo test \
--workspace \
--features full \
--locked \
--profile test \
--all-targets \
--no-fail-fast
- name: Show sccache stats
if: always()
run: sccache --show-stats

5
.github/FUNDING.yml vendored
View file

@ -1,4 +1,5 @@
github: [JadedBlueEyes, nexy7574]
github: [JadedBlueEyes]
# Doesn't support an array, so we can only list nex
ko_fi: nexy7574
custom:
- https://ko-fi.com/nexy7574
- https://ko-fi.com/JadedBlueEyes

View file

@ -9,7 +9,7 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: fix-byte-order-marker
- id: check-byte-order-marker
- id: check-case-conflict
- id: check-symlinks
- id: destroyed-symlinks

413
Cargo.lock generated
View file

@ -126,7 +126,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -183,7 +183,7 @@ dependencies = [
"rustc-hash 2.1.1",
"serde",
"serde_derive",
"syn 2.0.104",
"syn",
]
[[package]]
@ -198,45 +198,6 @@ dependencies = [
"winnow",
]
[[package]]
name = "asn1-rs"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0"
dependencies = [
"asn1-rs-derive",
"asn1-rs-impl",
"displaydoc",
"nom",
"num-traits",
"rusticata-macros",
"thiserror 1.0.69",
"time",
]
[[package]]
name = "asn1-rs-derive"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"synstructure 0.12.6",
]
[[package]]
name = "asn1-rs-impl"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "assign"
version = "1.1.1"
@ -289,7 +250,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -300,7 +261,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -472,11 +433,11 @@ dependencies = [
"hyper",
"hyper-util",
"pin-project-lite",
"rustls 0.23.29",
"rustls-pemfile 2.2.0",
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"tokio",
"tokio-rustls 0.26.2",
"tokio-rustls",
"tower-service",
]
@ -491,9 +452,9 @@ dependencies = [
"http",
"http-body-util",
"pin-project",
"rustls 0.23.29",
"rustls",
"tokio",
"tokio-rustls 0.26.2",
"tokio-rustls",
"tokio-util",
"tower-layer",
"tower-service",
@ -560,7 +521,7 @@ dependencies = [
"regex",
"rustc-hash 1.1.0",
"shlex",
"syn 2.0.104",
"syn",
"which",
]
@ -579,7 +540,7 @@ dependencies = [
"regex",
"rustc-hash 2.1.1",
"shlex",
"syn 2.0.104",
"syn",
]
[[package]]
@ -833,7 +794,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1011,7 +972,7 @@ dependencies = [
"rand 0.8.5",
"regex",
"reqwest",
"ring 0.17.14",
"ring",
"ruma",
"sanitize-filename",
"serde",
@ -1058,7 +1019,7 @@ dependencies = [
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1083,7 +1044,7 @@ dependencies = [
"hyper-util",
"log",
"ruma",
"rustls 0.23.29",
"rustls",
"sd-notify",
"sentry",
"sentry-tower",
@ -1113,7 +1074,6 @@ dependencies = [
"image",
"ipaddress",
"itertools 0.14.0",
"ldap3",
"log",
"loole",
"lru-cache",
@ -1223,16 +1183,6 @@ dependencies = [
"crossterm",
]
[[package]]
name = "core-foundation"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "core-foundation"
version = "0.10.1"
@ -1306,7 +1256,7 @@ dependencies = [
"proc-macro2",
"quote",
"strict",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1416,7 +1366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
dependencies = [
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1443,7 +1393,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1484,20 +1434,6 @@ dependencies = [
"zeroize",
]
[[package]]
name = "der-parser"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e"
dependencies = [
"asn1-rs",
"displaydoc",
"nom",
"num-bigint",
"num-traits",
"rusticata-macros",
]
[[package]]
name = "deranged"
version = "0.4.0"
@ -1525,7 +1461,7 @@ dependencies = [
"convert_case",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1547,7 +1483,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1608,7 +1544,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1628,7 +1564,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -1798,7 +1734,6 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
@ -1846,7 +1781,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -2095,7 +2030,7 @@ dependencies = [
"ipnet",
"once_cell",
"rand 0.9.2",
"ring 0.17.14",
"ring",
"serde",
"thiserror 2.0.12",
"tinyvec",
@ -2187,7 +2122,7 @@ dependencies = [
"markup5ever",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -2281,11 +2216,11 @@ dependencies = [
"http",
"hyper",
"hyper-util",
"rustls 0.23.29",
"rustls-native-certs 0.8.1",
"rustls",
"rustls-native-certs",
"rustls-pki-types",
"tokio",
"tokio-rustls 0.26.2",
"tokio-rustls",
"tower-service",
"webpki-roots 1.0.2",
]
@ -2509,7 +2444,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -2678,7 +2613,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"syn 2.0.104",
"syn",
]
[[package]]
@ -2693,43 +2628,6 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lber"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2df7f9fd9f64cf8f59e1a4a0753fe7d575a5b38d3d7ac5758dcee9357d83ef0a"
dependencies = [
"bytes",
"nom",
]
[[package]]
name = "ldap3"
version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166199a8207874a275144c8a94ff6eed5fcbf5c52303e4d9b4d53a0c7ac76554"
dependencies = [
"async-trait",
"bytes",
"futures",
"futures-util",
"lazy_static",
"lber",
"log",
"nom",
"percent-encoding",
"ring 0.16.20",
"rustls 0.21.12",
"rustls-native-certs 0.6.3",
"thiserror 1.0.69",
"tokio",
"tokio-rustls 0.24.1",
"tokio-stream",
"tokio-util",
"url",
"x509-parser",
]
[[package]]
name = "lebe"
version = "0.5.2"
@ -2968,7 +2866,7 @@ checksum = "a9882ef5c56df184b8ffc107fc6c61e33ee3a654b021961d790a78571bb9d67a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3134,7 +3032,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3196,15 +3094,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "oid-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff"
dependencies = [
"asn1-rs",
]
[[package]]
name = "once_cell"
version = "1.21.3"
@ -3395,7 +3284,7 @@ dependencies = [
"proc-macro2",
"proc-macro2-diagnostics",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3469,7 +3358,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3569,7 +3458,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2"
dependencies = [
"proc-macro2",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3598,7 +3487,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
"version_check",
"yansi",
]
@ -3619,7 +3508,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b"
dependencies = [
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3642,7 +3531,7 @@ dependencies = [
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -3708,7 +3597,7 @@ dependencies = [
"quinn-proto",
"quinn-udp",
"rustc-hash 2.1.1",
"rustls 0.23.29",
"rustls",
"socket2",
"thiserror 2.0.12",
"tokio",
@ -3726,9 +3615,9 @@ dependencies = [
"getrandom 0.3.3",
"lru-slab",
"rand 0.9.2",
"ring 0.17.14",
"ring",
"rustc-hash 2.1.1",
"rustls 0.23.29",
"rustls",
"rustls-pki-types",
"slab",
"thiserror 2.0.12",
@ -3987,16 +3876,16 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"quinn",
"rustls 0.23.29",
"rustls-native-certs 0.8.1",
"rustls-pemfile 2.2.0",
"rustls",
"rustls-native-certs",
"rustls-pemfile",
"rustls-pki-types",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tokio-rustls 0.26.2",
"tokio-rustls",
"tokio-socks",
"tokio-util",
"tower 0.5.2",
@ -4020,21 +3909,6 @@ version = "0.8.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce"
[[package]]
name = "ring"
version = "0.16.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
dependencies = [
"cc",
"libc",
"once_cell",
"spin",
"untrusted 0.7.1",
"web-sys",
"winapi",
]
[[package]]
name = "ring"
version = "0.17.14"
@ -4045,7 +3919,7 @@ dependencies = [
"cfg-if",
"getrandom 0.2.16",
"libc",
"untrusted 0.9.0",
"untrusted",
"windows-sys 0.52.0",
]
@ -4219,7 +4093,7 @@ dependencies = [
"quote",
"ruma-identifiers-validation",
"serde",
"syn 2.0.104",
"syn",
"toml",
]
@ -4304,15 +4178,6 @@ dependencies = [
"semver",
]
[[package]]
name = "rusticata-macros"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632"
dependencies = [
"nom",
]
[[package]]
name = "rustix"
version = "0.38.44"
@ -4339,18 +4204,6 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "rustls"
version = "0.21.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
"ring 0.17.14",
"rustls-webpki 0.101.7",
"sct",
]
[[package]]
name = "rustls"
version = "0.23.29"
@ -4360,25 +4213,13 @@ dependencies = [
"aws-lc-rs",
"log",
"once_cell",
"ring 0.17.14",
"ring",
"rustls-pki-types",
"rustls-webpki 0.103.4",
"rustls-webpki",
"subtle",
"zeroize",
]
[[package]]
name = "rustls-native-certs"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
dependencies = [
"openssl-probe",
"rustls-pemfile 1.0.4",
"schannel",
"security-framework 2.11.1",
]
[[package]]
name = "rustls-native-certs"
version = "0.8.1"
@ -4388,16 +4229,7 @@ dependencies = [
"openssl-probe",
"rustls-pki-types",
"schannel",
"security-framework 3.2.0",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
"base64 0.21.7",
"security-framework",
]
[[package]]
@ -4419,16 +4251,6 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
"ring 0.17.14",
"untrusted 0.9.0",
]
[[package]]
name = "rustls-webpki"
version = "0.103.4"
@ -4436,9 +4258,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc"
dependencies = [
"aws-lc-rs",
"ring 0.17.14",
"ring",
"rustls-pki-types",
"untrusted 0.9.0",
"untrusted",
]
[[package]]
@ -4497,16 +4319,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring 0.17.14",
"untrusted 0.9.0",
]
[[package]]
name = "sd-notify"
version = "0.4.5"
@ -4516,19 +4328,6 @@ dependencies = [
"libc",
]
[[package]]
name = "security-framework"
version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
dependencies = [
"bitflags 2.9.1",
"core-foundation 0.9.4",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework"
version = "3.2.0"
@ -4536,7 +4335,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316"
dependencies = [
"bitflags 2.9.1",
"core-foundation 0.10.1",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
@ -4566,7 +4365,7 @@ checksum = "255914a8e53822abd946e2ce8baa41d4cded6b8e938913b7f7b9da5b7ab44335"
dependencies = [
"httpdate",
"reqwest",
"rustls 0.23.29",
"rustls",
"sentry-backtrace",
"sentry-contexts",
"sentry-core",
@ -4710,7 +4509,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -4924,12 +4723,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spki"
version = "0.7.3"
@ -4998,17 +4791,6 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.104"
@ -5029,18 +4811,6 @@ dependencies = [
"futures-core",
]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]]
name = "synstructure"
version = "0.13.2"
@ -5049,7 +4819,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -5140,7 +4910,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -5151,7 +4921,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -5318,7 +5088,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -5333,23 +5103,13 @@ dependencies = [
"tokio-stream",
]
[[package]]
name = "tokio-rustls"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls 0.21.12",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
dependencies = [
"rustls 0.23.29",
"rustls",
"tokio",
]
@ -5547,7 +5307,7 @@ source = "git+https://forgejo.ellis.link/continuwuation/tracing?rev=1e64095a8051
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -5701,24 +5461,12 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
[[package]]
name = "unicode-xid"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "untrusted"
version = "0.9.0"
@ -5734,7 +5482,7 @@ dependencies = [
"base64 0.22.1",
"log",
"once_cell",
"rustls 0.23.29",
"rustls",
"rustls-pki-types",
"url",
"webpki-roots 0.26.11",
@ -5869,7 +5617,7 @@ dependencies = [
"log",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
"wasm-bindgen-shared",
]
@ -5904,7 +5652,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -6084,7 +5832,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -6095,7 +5843,7 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -6416,23 +6164,6 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "x509-parser"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7069fba5b66b9193bd2c5d3d4ff12b839118f6bcbef5328efafafb5395cf63da"
dependencies = [
"asn1-rs",
"data-encoding",
"der-parser",
"lazy_static",
"nom",
"oid-registry",
"rusticata-macros",
"thiserror 1.0.69",
"time",
]
[[package]]
name = "xml5ever"
version = "0.18.1"
@ -6490,8 +6221,8 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"synstructure 0.13.2",
"syn",
"synstructure",
]
[[package]]
@ -6511,7 +6242,7 @@ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]
@ -6531,8 +6262,8 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"synstructure 0.13.2",
"syn",
"synstructure",
]
[[package]]
@ -6571,7 +6302,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn",
]
[[package]]

View file

@ -546,11 +546,6 @@ features = ["std"]
[workspace.dependencies.maplit]
version = "1.0.2"
[workspace.dependencies.ldap3]
version = "0.11.5"
default-features = false
features = ["sync", "tls-rustls"]
#
# Patches
#
@ -872,7 +867,7 @@ unused-qualifications = "warn"
#unused-results = "warn" # TODO
## some sadness
mismatched_lifetime_syntaxes = "allow" # TODO!
elided_named_lifetimes = "allow" # TODO!
let_underscore_drop = "allow"
missing_docs = "allow"
# cfgs cannot be limited to expected cfgs or their de facto non-transitive/opt-in use-case e.g.
@ -1011,6 +1006,3 @@ literal_string_with_formatting_args = { level = "allow", priority = 1 }
needless_raw_string_hashes = "allow"
# TODO: Enable this lint & fix all instances
collapsible_if = "allow"

View file

@ -64,7 +64,7 @@ StateDirectory=conduwuit
RuntimeDirectory=conduwuit
RuntimeDirectoryMode=0750
Environment=CONTINUWUITY_CONFIG=%d/config.toml
Environment=CONTINUWUITY_CONFIG=${CREDENTIALS_DIRECTORY}/config.toml
LoadCredential=config.toml:/etc/conduwuit/conduwuit.toml
BindPaths=/var/lib/private/conduwuit:/var/lib/matrix-conduit
BindPaths=/var/lib/private/conduwuit:/var/lib/private/matrix-conduit

View file

@ -1696,10 +1696,6 @@
#
#config_reload_signal = true
# This item is undocumented. Please contribute documentation for it.
#
#ldap = false
[global.tls]
# Path to a valid TLS certificate file.
@ -1778,91 +1774,3 @@
# is 33.55MB. Setting it to 0 disables blurhashing.
#
#blurhash_max_raw_size = 33554432
[global.ldap]
# Whether to enable LDAP login.
#
# example: "true"
#
#enable = false
# Whether to force LDAP authentication or authorize classical password
# login.
#
# example: "true"
#
#ldap_only = false
# URI of the LDAP server.
#
# example: "ldap://ldap.example.com:389"
#
#uri = ""
# Root of the searches.
#
# example: "ou=users,dc=example,dc=org"
#
#base_dn = ""
# Bind DN if anonymous search is not enabled.
#
# You can use the variable `{username}` that will be replaced by the
# entered username. In such case, the password used to bind will be the
# one provided for the login and not the one given by
# `bind_password_file`. Beware: automatically granting admin rights will
# not work if you use this direct bind instead of a LDAP search.
#
# example: "cn=ldap-reader,dc=example,dc=org" or
# "cn={username},ou=users,dc=example,dc=org"
#
#bind_dn = ""
# Path to a file on the system that contains the password for the
# `bind_dn`.
#
# The server must be able to access the file, and it must not be empty.
#
#bind_password_file = ""
# Search filter to limit user searches.
#
# You can use the variable `{username}` that will be replaced by the
# entered username for more complex filters.
#
# example: "(&(objectClass=person)(memberOf=matrix))"
#
#filter = "(objectClass=*)"
# Attribute to use to uniquely identify the user.
#
# example: "uid" or "cn"
#
#uid_attribute = "uid"
# Attribute containing the display name of the user.
#
# example: "givenName" or "sn"
#
#name_attribute = "givenName"
# Root of the searches for admin users.
#
# Defaults to `base_dn` if empty.
#
# example: "ou=admins,dc=example,dc=org"
#
#admin_base_dn = ""
# The LDAP search filter to find administrative users for continuwuity.
#
# If left blank, administrative state must be configured manually for each
# user.
#
# You can use the variable `{username}` that will be replaced by the
# entered username for more complex filters.
#
# example: "(objectClass=conduwuitAdmin)" or "(uid={username})"
#
#admin_filter = ""

View file

@ -12,15 +12,6 @@ services:
#- ./continuwuity.toml:/etc/continuwuity.toml
networks:
- proxy
labels:
- "traefik.enable=true"
- "traefik.http.routers.continuwuity.rule=(Host(`matrix.example.com`) || (Host(`example.com`) && PathPrefix(`/.well-known/matrix`)))"
- "traefik.http.routers.continuwuity.entrypoints=websecure" # your HTTPS entry point
- "traefik.http.routers.continuwuity.tls=true"
- "traefik.http.routers.continuwuity.service=continuwuity"
- "traefik.http.services.continuwuity.loadbalancer.server.port=6167"
# possibly, depending on your config:
# - "traefik.http.routers.continuwuity.tls.certresolver=letsencrypt"
environment:
CONTINUWUITY_SERVER_NAME: your.server.name.example # EDIT THIS
CONTINUWUITY_DATABASE_PATH: /var/lib/continuwuity

View file

@ -12,14 +12,6 @@ services:
#- ./continuwuity.toml:/etc/continuwuity.toml
networks:
- proxy
labels:
- "traefik.enable=true"
- "traefik.http.routers.continuwuity.rule=(Host(`matrix.example.com`) || (Host(`example.com`) && PathPrefix(`/.well-known/matrix`)))"
- "traefik.http.routers.continuwuity.entrypoints=websecure"
- "traefik.http.routers.continuwuity.tls.certresolver=letsencrypt"
- "traefik.http.services.continuwuity.loadbalancer.server.port=6167"
# Uncomment and adjust the following if you want to use middleware
# - "traefik.http.routers.continuwuity.middlewares=secureHeaders@file"
environment:
CONTINUWUITY_SERVER_NAME: your.server.name.example # EDIT THIS
CONTINUWUITY_TRUSTED_SERVERS: '["matrix.org"]'

View file

@ -1,68 +0,0 @@
[Unit]
Description=Continuwuity - Matrix homeserver
Documentation=https://continuwuity.org/
Wants=network-online.target
After=network-online.target
Alias=matrix-conduwuit.service
[Service]
DynamicUser=yes
User=conduwuit
Group=conduwuit
Type=notify
Environment="CONTINUWUITY_CONFIG=/etc/conduwuit/conduwuit.toml"
Environment="CONTINUWUITY_LOG_TO_JOURNALD=true"
Environment="CONTINUWUITY_JOURNALD_IDENTIFIER=%N"
ExecStart=/usr/bin/conduwuit
AmbientCapabilities=
CapabilityBoundingSet=
DevicePolicy=closed
LockPersonality=yes
MemoryDenyWriteExecute=yes
NoNewPrivileges=yes
#ProcSubset=pid
ProtectClock=yes
ProtectControlGroups=yes
ProtectHome=yes
ProtectHostname=yes
ProtectKernelLogs=yes
ProtectKernelModules=yes
ProtectKernelTunables=yes
ProtectProc=invisible
ProtectSystem=strict
PrivateDevices=yes
PrivateMounts=yes
PrivateTmp=yes
PrivateUsers=yes
PrivateIPC=yes
RemoveIPC=yes
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
RestrictNamespaces=yes
RestrictRealtime=yes
RestrictSUIDSGID=yes
SystemCallArchitectures=native
SystemCallFilter=@system-service @resources
SystemCallFilter=~@clock @debug @module @mount @reboot @swap @cpu-emulation @obsolete @timer @chown @setuid @privileged @keyring @ipc
SystemCallErrorNumber=EPERM
StateDirectory=conduwuit
ConfigurationDirectory=conduwuit
RuntimeDirectory=conduwuit
RuntimeDirectoryMode=0750
Restart=on-failure
RestartSec=5
TimeoutStopSec=2m
TimeoutStartSec=2m
StartLimitInterval=1m
StartLimitBurst=5
[Install]
WantedBy=multi-user.target

View file

@ -1,80 +0,0 @@
# This should be run using rpkg-util: https://docs.pagure.org/rpkg-util
# it requires Internet access and is not suitable for Fedora main repos
# TODO: rpkg-util is no longer maintained, find a replacement
Name: continuwuity
Version: {{{ git_repo_version }}}
Release: 1%{?dist}
Summary: Very cool Matrix chat homeserver written in Rust
License: Apache-2.0 AND MIT
URL: https://continuwuity.org
VCS: {{{ git_repo_vcs }}}
Source: {{{ git_repo_pack }}}
BuildRequires: cargo-rpm-macros >= 25
BuildRequires: systemd-rpm-macros
# Needed to build rust-librocksdb-sys
BuildRequires: clang
BuildRequires: liburing-devel
Requires: liburing
Requires: glibc
Requires: libstdc++
%global _description %{expand:
A cool hard fork of Conduit, a Matrix homeserver written in Rust}
%description %{_description}
%prep
{{{ git_repo_setup_macro }}}
%cargo_prep -N
# Perform an online build so Git dependencies can be retrieved
sed -i 's/^offline = true$//' .cargo/config.toml
%build
%cargo_build
# Here's the one legally required mystery incantation in this file.
# Some of our dependencies have source files which are (for some reason) marked as excutable.
# Files in .cargo/registry/ are copied into /usr/src/ by the debuginfo machinery
# at the end of the build step, and then the BRP shebang mangling script checks
# the entire buildroot to find executable files, and fails the build because
# it thinks Rust's file attributes are shebangs because they start with `#!`.
# So we have to clear the executable bit on all of them before that happens.
find .cargo/registry/ -executable -name "*.rs" -exec chmod -x {} +
# TODO: this fails currently because it's forced to run in offline mode
# {cargo_license -- --no-dev} > LICENSE.dependencies
%install
install -Dpm0755 target/rpm/conduwuit -t %{buildroot}%{_bindir}
install -Dpm0644 fedora/conduwuit.service -t %{buildroot}%{_unitdir}
install -Dpm0644 conduwuit-example.toml %{buildroot}%{_sysconfdir}/conduwuit/conduwuit.toml
%files
%license LICENSE
%license src/core/matrix/state_res/LICENSE
%doc CODE_OF_CONDUCT.md
%doc CONTRIBUTING.md
%doc README.md
%doc SECURITY.md
%config %{_sysconfdir}/conduwuit/conduwuit.toml
%{_bindir}/conduwuit
%{_unitdir}/conduwuit.service
# Do not create /var/lib/conduwuit, systemd will create it if necessary
%post
%systemd_post conduwuit.service
%preun
%systemd_preun conduwuit.service
%postun
%systemd_postun_with_restart conduwuit.service
%changelog
{{{ git_repo_changelog }}}

24
flake.lock generated
View file

@ -153,11 +153,11 @@
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1755585599,
"narHash": "sha256-tl/0cnsqB/Yt7DbaGMel2RLa7QG5elA8lkaOXli6VdY=",
"lastModified": 1751525020,
"narHash": "sha256-oDO6lCYS5Bf4jUITChj9XV7k3TP38DE0Ckz5n5ORCME=",
"owner": "nix-community",
"repo": "fenix",
"rev": "6ed03ef4c8ec36d193c18e06b9ecddde78fb7e42",
"rev": "a1a5f92f47787e7df9f30e5e5ac13e679215aa1e",
"type": "github"
},
"original": {
@ -516,16 +516,16 @@
"rocksdb": {
"flake": false,
"locked": {
"lastModified": 1753385396,
"narHash": "sha256-/Hvy1yTH/0D5aa7bc+/uqFugCQq4InTdwlRw88vA5IY=",
"ref": "10.4.fb",
"rev": "28d4b7276c16ed3e28af1bd96162d6442ce25923",
"revCount": 13318,
"lastModified": 1741308171,
"narHash": "sha256-YdBvdQ75UJg5ffwNjxizpviCVwVDJnBkM8ZtGIduMgY=",
"ref": "v9.11.1",
"rev": "3ce04794bcfbbb0d2e6f81ae35fc4acf688b6986",
"revCount": 13177,
"type": "git",
"url": "https://forgejo.ellis.link/continuwuation/rocksdb"
},
"original": {
"ref": "10.4.fb",
"ref": "v9.11.1",
"type": "git",
"url": "https://forgejo.ellis.link/continuwuation/rocksdb"
}
@ -546,11 +546,11 @@
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1755504847,
"narHash": "sha256-VX0B9hwhJypCGqncVVLC+SmeMVd/GAYbJZ0MiiUn2Pk=",
"lastModified": 1751433876,
"narHash": "sha256-IsdwOcvLLDDlkFNwhdD5BZy20okIQL01+UQ7Kxbqh8s=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "a905e3b21b144d77e1b304e49f3264f6f8d4db75",
"rev": "11d45c881389dae90b0da5a94cde52c79d0fc7ef",
"type": "github"
},
"original": {

View file

@ -17,7 +17,7 @@
nix-filter.url = "github:numtide/nix-filter?ref=main";
nixpkgs.url = "github:NixOS/nixpkgs?ref=nixpkgs-unstable";
rocksdb = {
url = "git+https://forgejo.ellis.link/continuwuation/rocksdb?ref=10.4.fb";
url = "git+https://forgejo.ellis.link/continuwuation/rocksdb?ref=v9.11.1";
flake = false;
};
};
@ -31,17 +31,13 @@
inherit system;
};
fnx = inputs.fenix.packages.${system};
# The Rust toolchain to use
toolchain = fnx.combine [
(fnx.fromToolchainFile {
file = ./rust-toolchain.toml;
toolchain = inputs.fenix.packages.${system}.fromToolchainFile {
file = ./rust-toolchain.toml;
# See also `rust-toolchain.toml`
sha256 = "sha256-+9FmLhAOezBZCOziO0Qct1NOrfpjNsXxc/8I0c7BdKE=";
})
fnx.complete.rustfmt
];
# See also `rust-toolchain.toml`
sha256 = "sha256-KUm16pHj+cRedf8vxs/Hd2YWxpOrWZ7UOrwhILdSJBU=";
};
mkScope =
pkgs:
@ -66,7 +62,7 @@
}).overrideAttrs
(old: {
src = inputs.rocksdb;
version = "v10.4.fb";
version = "v9.11.1";
cmakeFlags =
pkgs.lib.subtractLists [
# No real reason to have snappy or zlib, no one uses this

View file

@ -13,50 +13,14 @@
"enabled": true
},
"labels": [
"Dependencies",
"Dependencies/Renovate"
"dependencies",
"github_actions"
],
"ignoreDeps": [
"tikv-jemallocator",
"tikv-jemllocator",
"tikv-jemalloc-sys",
"tikv-jemalloc-ctl",
"opentelemetry",
"opentelemetry_sdk",
"opentelemetry-jaeger",
"opentelemetry-rust",
"tracing-opentelemetry"
],
"github-actions": {
"enabled": true,
"fileMatch": [
"(^|/)\\.forgejo/workflows/[^/]+\\.ya?ml$",
"(^|/)\\.forgejo/actions/[^/]+/action\\.ya?ml$",
"(^|/)\\.github/workflows/[^/]+\\.ya?ml$",
"(^|/)\\.github/actions/[^/]+/action\\.ya?ml$"
]
},
"packageRules": [
{
"description": "Batch minor and patch GitHub Actions updates",
"matchManagers": ["github-actions"],
"matchUpdateTypes": ["minor", "patch"],
"groupName": "github-actions-non-major"
},
{
"description": "Group Rust toolchain updates into a single PR",
"matchManagers": ["regex"],
"matchPackageNames": ["rust", "rustc", "cargo"],
"groupName": "rust-toolchain"
},
{
"description": "Group lockfile updates into a single PR",
"matchUpdateTypes": ["lockFileMaintenance"],
"groupName": "lockfile-maintenance"
},
{
"description": "Batch patch-level Rust dependency updates",
"matchManagers": ["cargo"],
"matchUpdateTypes": ["patch"],
"groupName": "rust-patch-updates"
}
]
}

View file

@ -9,16 +9,13 @@
# If you're having trouble making the relevant changes, bug a maintainer.
[toolchain]
channel = "1.87.0"
profile = "minimal"
channel = "1.89.0"
components = [
# For rust-analyzer
"rust-src",
"rust-analyzer",
# For CI and editors
"rustfmt",
"clippy",
# you have to install rustfmt nightly yourself (if you're not on NixOS)
#
# The rust-toolchain.toml file doesn't provide any syntax for specifying components from different toolchains
# "rustfmt"
]

View file

@ -68,8 +68,7 @@ pub(super) async fn create_user(&self, username: String, password: Option<String
// Create user
self.services
.users
.create(&user_id, Some(password.as_str()), None)
.await?;
.create(&user_id, Some(password.as_str()))?;
// Default to pretty displayname
let mut displayname = user_id.localpart().to_owned();
@ -285,7 +284,6 @@ pub(super) async fn reset_password(&self, username: String, password: Option<Str
.services
.users
.set_password(&user_id, Some(new_password.as_str()))
.await
{
| Err(e) => return Err!("Couldn't reset the password for user {user_id}: {e}"),
| Ok(()) => {
@ -949,6 +947,6 @@ pub(super) async fn force_leave_remote_room(
.boxed()
.await?;
self.write_str(&format!("{user_id} has been removed from {room_id} via a remote server.",))
self.write_str(&format!("{user_id} has been joined to {room_id}.",))
.await
}

View file

@ -49,9 +49,6 @@ jemalloc_stats = [
"conduwuit-core/jemalloc_stats",
"conduwuit-service/jemalloc_stats",
]
ldap = [
"conduwuit-service/ldap"
]
release_max_log_level = [
"conduwuit-core/release_max_log_level",
"conduwuit-service/release_max_log_level",

View file

@ -373,7 +373,7 @@ pub(crate) async fn register_route(
let password = if is_guest { None } else { body.password.as_deref() };
// Create user
services.users.create(&user_id, password, None).await?;
services.users.create(&user_id, password)?;
// Default to pretty displayname
let mut displayname = user_id.localpart().to_owned();
@ -659,8 +659,7 @@ pub(crate) async fn change_password_route(
services
.users
.set_password(sender_user, Some(&body.new_password))
.await?;
.set_password(sender_user, Some(&body.new_password))?;
if body.logout_devices {
// Logout all devices except the current one

View file

@ -8,7 +8,7 @@ use conduwuit::{
ref_at,
utils::{
IterStream, ReadyExt,
result::LogErr,
result::{FlatOk, LogErr},
stream::{BroadbandExt, TryIgnore, WidebandExt},
},
};
@ -35,7 +35,6 @@ use ruma::{
};
use tracing::warn;
use super::utils::{count_to_token, parse_pagination_token as parse_token};
use crate::Ruma;
/// list of safe and common non-state events to ignore if the user is ignored
@ -85,14 +84,14 @@ pub(crate) async fn get_message_events_route(
let from: PduCount = body
.from
.as_deref()
.map(parse_token)
.map(str::parse)
.transpose()?
.unwrap_or_else(|| match body.dir {
| Direction::Forward => PduCount::min(),
| Direction::Backward => PduCount::max(),
});
let to: Option<PduCount> = body.to.as_deref().map(parse_token).transpose()?;
let to: Option<PduCount> = body.to.as_deref().map(str::parse).flat_ok();
let limit: usize = body
.limit
@ -181,8 +180,8 @@ pub(crate) async fn get_message_events_route(
.collect();
Ok(get_message_events::v3::Response {
start: count_to_token(from),
end: next_token.map(count_to_token),
start: from.to_string(),
end: next_token.as_ref().map(ToString::to_string),
chunk,
state,
})

View file

@ -36,7 +36,6 @@ pub(super) mod typing;
pub(super) mod unstable;
pub(super) mod unversioned;
pub(super) mod user_directory;
pub(super) mod utils;
pub(super) mod voip;
pub(super) mod well_known;

View file

@ -90,7 +90,7 @@ pub(crate) async fn get_displayname_route(
.await
{
if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?;
services.users.create(&body.user_id, None)?;
}
services
@ -189,7 +189,7 @@ pub(crate) async fn get_avatar_url_route(
.await
{
if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?;
services.users.create(&body.user_id, None)?;
}
services
@ -248,7 +248,7 @@ pub(crate) async fn get_profile_route(
.await
{
if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?;
services.users.create(&body.user_id, None)?;
}
services

View file

@ -18,7 +18,6 @@ use ruma::{
events::{TimelineEventType, relation::RelationType},
};
use super::utils::{count_to_token, parse_pagination_token as parse_token};
use crate::Ruma;
/// # `GET /_matrix/client/r0/rooms/{roomId}/relations/{eventId}/{relType}/{eventType}`
@ -111,14 +110,14 @@ async fn paginate_relations_with_filter(
dir: Direction,
) -> Result<get_relating_events::v1::Response> {
let start: PduCount = from
.map(parse_token)
.map(str::parse)
.transpose()?
.unwrap_or_else(|| match dir {
| Direction::Forward => PduCount::min(),
| Direction::Backward => PduCount::max(),
});
let to: Option<PduCount> = to.map(parse_token).transpose()?;
let to: Option<PduCount> = to.map(str::parse).flat_ok();
// Use limit or else 30, with maximum 100
let limit: usize = limit
@ -130,11 +129,6 @@ async fn paginate_relations_with_filter(
// Spec (v1.10) recommends depth of at least 3
let depth: u8 = if recurse { 3 } else { 1 };
// Check if this is a thread request
let is_thread = filter_rel_type
.as_ref()
.is_some_and(|rel| *rel == RelationType::Thread);
let events: Vec<_> = services
.rooms
.pdu_metadata
@ -158,58 +152,23 @@ async fn paginate_relations_with_filter(
.collect()
.await;
// For threads, check if we should include the root event
let mut root_event = None;
if is_thread && dir == Direction::Backward {
// Check if we've reached the beginning of the thread
// (fewer events than requested means we've exhausted the thread)
if events.len() < limit {
// Try to get the thread root event
if let Ok(root_pdu) = services.rooms.timeline.get_pdu(target).await {
// Check visibility
if services
.rooms
.state_accessor
.user_can_see_event(sender_user, room_id, target)
.await
{
// Store the root event to add to the response
root_event = Some(root_pdu);
}
}
}
let next_batch = match dir {
| Direction::Forward => events.last(),
| Direction::Backward => events.first(),
}
// Determine if there are more events to fetch
let has_more = if root_event.is_some() {
false // We've included the root, no more events
} else {
// Check if we got a full page of results (might be more)
events.len() >= limit
};
let next_batch = if has_more {
match dir {
| Direction::Forward => events.last(),
| Direction::Backward => events.first(),
}
.map(|(count, _)| count_to_token(*count))
} else {
None
};
// Build the response chunk with thread root if needed
let chunk: Vec<_> = root_event
.into_iter()
.map(Event::into_format)
.chain(events.into_iter().map(at!(1)).map(Event::into_format))
.collect();
.map(at!(0))
.as_ref()
.map(ToString::to_string);
Ok(get_relating_events::v1::Response {
next_batch,
prev_batch: from.map(Into::into),
recursion_depth: recurse.then_some(depth.into()),
chunk,
chunk: events
.into_iter()
.map(at!(1))
.map(Event::into_format)
.collect(),
})
}

View file

@ -3,14 +3,13 @@ use std::time::Duration;
use axum::extract::State;
use axum_client_ip::InsecureClientIp;
use conduwuit::{
Err, Error, Result, debug, err, info,
utils::{self, ReadyExt, hash},
Err, Error, Result, debug, err, info, utils,
utils::{ReadyExt, hash},
};
use conduwuit_core::{debug_error, debug_warn};
use conduwuit_service::{Services, uiaa::SESSION_ID_LENGTH};
use conduwuit_service::uiaa::SESSION_ID_LENGTH;
use futures::StreamExt;
use ruma::{
OwnedUserId, UserId,
UserId,
api::client::{
session::{
get_login_token,
@ -50,154 +49,6 @@ pub(crate) async fn get_login_types_route(
]))
}
/// Authenticates the given user by its ID and its password.
///
/// Returns the user ID if successful, and an error otherwise.
#[tracing::instrument(skip_all, fields(%user_id), name = "password")]
pub(crate) async fn password_login(
services: &Services,
user_id: &UserId,
lowercased_user_id: &UserId,
password: &str,
) -> Result<OwnedUserId> {
// Restrict login to accounts only of type 'password', including untyped
// legacy accounts which are equivalent to 'password'.
if services
.users
.origin(user_id)
.await
.is_ok_and(|origin| origin != "password")
{
return Err!(Request(Forbidden("Account does not permit password login.")));
}
let (hash, user_id) = match services.users.password_hash(user_id).await {
| Ok(hash) => (hash, user_id),
| Err(_) => services
.users
.password_hash(lowercased_user_id)
.await
.map(|hash| (hash, lowercased_user_id))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?,
};
if hash.is_empty() {
return Err!(Request(UserDeactivated("The user has been deactivated")));
}
hash::verify_password(password, &hash)
.inspect_err(|e| debug_error!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
Ok(user_id.to_owned())
}
/// Authenticates the given user through the configured LDAP server.
///
/// Creates the user if the user is found in the LDAP and do not already have an
/// account.
#[tracing::instrument(skip_all, fields(%user_id), name = "ldap")]
pub(super) async fn ldap_login(
services: &Services,
user_id: &UserId,
lowercased_user_id: &UserId,
password: &str,
) -> Result<OwnedUserId> {
let (user_dn, is_ldap_admin) = match services.config.ldap.bind_dn.as_ref() {
| Some(bind_dn) if bind_dn.contains("{username}") =>
(bind_dn.replace("{username}", lowercased_user_id.localpart()), false),
| _ => {
debug!("Searching user in LDAP");
let dns = services.users.search_ldap(user_id).await?;
if dns.len() >= 2 {
return Err!(Ldap("LDAP search returned two or more results"));
}
let Some((user_dn, is_admin)) = dns.first() else {
return password_login(services, user_id, lowercased_user_id, password).await;
};
(user_dn.clone(), *is_admin)
},
};
let user_id = services
.users
.auth_ldap(&user_dn, password)
.await
.map(|()| lowercased_user_id.to_owned())?;
// LDAP users are automatically created on first login attempt. This is a very
// common feature that can be seen on many services using a LDAP provider for
// their users (synapse, Nextcloud, Jellyfin, ...).
//
// LDAP users are crated with a dummy password but non empty because an empty
// password is reserved for deactivated accounts. The conduwuit password field
// will never be read to login a LDAP user so it's not an issue.
if !services.users.exists(lowercased_user_id).await {
services
.users
.create(lowercased_user_id, Some("*"), Some("ldap"))
.await?;
}
let is_conduwuit_admin = services.admin.user_is_admin(lowercased_user_id).await;
if is_ldap_admin && !is_conduwuit_admin {
services.admin.make_user_admin(lowercased_user_id).await?;
} else if !is_ldap_admin && is_conduwuit_admin {
services.admin.revoke_admin(lowercased_user_id).await?;
}
Ok(user_id)
}
pub(crate) async fn handle_login(
services: &Services,
body: &Ruma<login::v3::Request>,
identifier: Option<&uiaa::UserIdentifier>,
password: &str,
user: Option<&String>,
) -> Result<OwnedUserId> {
debug!("Got password login type");
let user_id =
if let Some(uiaa::UserIdentifier::UserIdOrLocalpart(user_id)) = identifier {
UserId::parse_with_server_name(user_id, &services.config.server_name)
} else if let Some(user) = user {
UserId::parse_with_server_name(user, &services.config.server_name)
} else {
return Err!(Request(Unknown(
debug_warn!(?body.login_info, "Valid identifier or username was not provided (invalid or unsupported login type?)")
)));
}
.map_err(|e| err!(Request(InvalidUsername(warn!("Username is invalid: {e}")))))?;
let lowercased_user_id = UserId::parse_with_server_name(
user_id.localpart().to_lowercase(),
&services.config.server_name,
)?;
if !services.globals.user_is_local(&user_id)
|| !services.globals.user_is_local(&lowercased_user_id)
{
return Err!(Request(Unknown("User ID does not belong to this homeserver")));
}
if cfg!(feature = "ldap") && services.config.ldap.enable {
match Box::pin(ldap_login(services, &user_id, &lowercased_user_id, password)).await {
| Ok(user_id) => Ok(user_id),
| Err(err) if services.config.ldap.ldap_only => Err(err),
| Err(err) => {
debug_warn!("{err}");
password_login(services, &user_id, &lowercased_user_id, password).await
},
}
} else {
password_login(services, &user_id, &lowercased_user_id, password).await
}
}
/// # `POST /_matrix/client/v3/login`
///
/// Authenticates the user and returns an access token it can use in subsequent
@ -229,7 +80,70 @@ pub(crate) async fn login_route(
password,
user,
..
}) => handle_login(&services, &body, identifier.as_ref(), password, user.as_ref()).await?,
}) => {
debug!("Got password login type");
let user_id =
if let Some(uiaa::UserIdentifier::UserIdOrLocalpart(user_id)) = identifier {
UserId::parse_with_server_name(user_id, &services.config.server_name)
} else if let Some(user) = user {
UserId::parse_with_server_name(user, &services.config.server_name)
} else {
return Err!(Request(Unknown(
debug_warn!(?body.login_info, "Valid identifier or username was not provided (invalid or unsupported login type?)")
)));
}
.map_err(|e| err!(Request(InvalidUsername(warn!("Username is invalid: {e}")))))?;
let lowercased_user_id = UserId::parse_with_server_name(
user_id.localpart().to_lowercase(),
&services.config.server_name,
)?;
if !services.globals.user_is_local(&user_id)
|| !services.globals.user_is_local(&lowercased_user_id)
{
return Err!(Request(Unknown("User ID does not belong to this homeserver")));
}
// first try the username as-is
let hash = services
.users
.password_hash(&user_id)
.await
.inspect_err(|e| debug!("{e}"));
match hash {
| Ok(hash) => {
if hash.is_empty() {
return Err!(Request(UserDeactivated("The user has been deactivated")));
}
hash::verify_password(password, &hash)
.inspect_err(|e| debug!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
user_id
},
| Err(_e) => {
let hash_lowercased_user_id = services
.users
.password_hash(&lowercased_user_id)
.await
.inspect_err(|e| debug!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
if hash_lowercased_user_id.is_empty() {
return Err!(Request(UserDeactivated("The user has been deactivated")));
}
hash::verify_password(password, &hash_lowercased_user_id)
.inspect_err(|e| debug!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
lowercased_user_id
},
}
},
| login::v3::LoginInfo::Token(login::v3::Token { token }) => {
debug!("Got token login type");
if !services.server.config.login_via_existing_session {
@ -284,8 +198,8 @@ pub(crate) async fn login_route(
.clone()
.unwrap_or_else(|| utils::random_string(DEVICE_ID_LENGTH).into());
// Generate a new token for the device (ensuring no collisions)
let token = services.users.generate_unique_token().await;
// Generate a new token for the device
let token = utils::random_string(TOKEN_LENGTH);
// Determine if device_id was provided and exists in the db for this user
let device_exists = if body.device_id.is_some() {

View file

@ -45,7 +45,6 @@ use crate::{
type TodoRooms = BTreeMap<OwnedRoomId, (BTreeSet<TypeStateKey>, usize, u64)>;
const SINGLE_CONNECTION_SYNC: &str = "single_connection_sync";
#[allow(clippy::cognitive_complexity)]
/// POST `/_matrix/client/unstable/org.matrix.msc3575/sync`
///
/// Sliding Sync endpoint (future endpoint: `/_matrix/client/v4/sync`)

View file

@ -292,7 +292,7 @@ pub(crate) async fn get_timezone_key_route(
.await
{
if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?;
services.users.create(&body.user_id, None)?;
}
services
@ -352,7 +352,7 @@ pub(crate) async fn get_profile_key_route(
.await
{
if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?;
services.users.create(&body.user_id, None)?;
}
services

View file

@ -1,28 +0,0 @@
use conduwuit::{
Result, err,
matrix::pdu::{PduCount, ShortEventId},
};
/// Parse a pagination token, trying ShortEventId first, then falling back to
/// PduCount
pub(crate) fn parse_pagination_token(token: &str) -> Result<PduCount> {
// Try parsing as ShortEventId first
if let Ok(shorteventid) = token.parse::<ShortEventId>() {
// ShortEventId maps directly to a PduCount in our database
Ok(PduCount::Normal(shorteventid))
} else if let Ok(count) = token.parse::<u64>() {
// Fallback to PduCount for backwards compatibility
Ok(PduCount::Normal(count))
} else if let Ok(count) = token.parse::<i64>() {
// Also handle negative counts for backfilled events
Ok(PduCount::from_signed(count))
} else {
Err(err!(Request(InvalidParam("Invalid pagination token"))))
}
}
/// Convert a PduCount to a token string (using the underlying ShortEventId)
pub(crate) fn count_to_token(count: PduCount) -> String {
// The PduCount's unsigned value IS the ShortEventId
count.into_unsigned().to_string()
}

View file

@ -5,14 +5,6 @@ use axum_extra::{
typed_header::TypedHeaderRejectionReason,
};
use conduwuit::{Err, Error, Result, debug_error, err, warn};
use futures::{
TryFutureExt,
future::{
Either::{Left, Right},
select_ok,
},
pin_mut,
};
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, OwnedDeviceId, OwnedServerName, OwnedUserId, UserId,
api::{
@ -62,7 +54,17 @@ pub(super) async fn auth(
| None => request.query.access_token.as_deref(),
};
let token = find_token(services, token).await?;
let token = if let Some(token) = token {
match services.appservice.find_from_token(token).await {
| Some(reg_info) => Token::Appservice(Box::new(reg_info)),
| _ => match services.users.find_from_token(token).await {
| Ok((user_id, device_id)) => Token::User((user_id, device_id)),
| _ => Token::Invalid,
},
}
} else {
Token::None
};
if metadata.authentication == AuthScheme::None {
match metadata {
@ -340,25 +342,3 @@ async fn parse_x_matrix(request: &mut Request) -> Result<XMatrix> {
Ok(x_matrix)
}
async fn find_token(services: &Services, token: Option<&str>) -> Result<Token> {
let Some(token) = token else {
return Ok(Token::None);
};
let user_token = services.users.find_from_token(token).map_ok(Token::User);
let appservice_token = services
.appservice
.find_from_token(token)
.map_ok(Box::new)
.map_ok(Token::Appservice);
pin_mut!(user_token, appservice_token);
// Returns Ok if either token type succeeds, Err only if both fail
match select_ok([Left(user_token), Right(appservice_token)]).await {
| Err(e) if !e.is_not_found() => Err(e),
| Ok((token, _)) => Ok(token),
| _ => Ok(Token::Invalid),
}
}

View file

@ -1,4 +1,3 @@
#![allow(clippy::doc_link_with_quotes)]
pub mod check;
pub mod manager;
pub mod proxy;
@ -1948,10 +1947,6 @@ pub struct Config {
pub allow_invalid_tls_certificates_yes_i_know_what_the_fuck_i_am_doing_with_this_and_i_know_this_is_insecure:
bool,
// external structure; separate section
#[serde(default)]
pub ldap: LdapConfig,
// external structure; separate section
#[serde(default)]
pub blurhashing: BlurhashConfig,
@ -2046,114 +2041,6 @@ pub struct BlurhashConfig {
pub blurhash_max_raw_size: u64,
}
#[derive(Clone, Debug, Default, Deserialize)]
#[config_example_generator(filename = "conduwuit-example.toml", section = "global.ldap")]
pub struct LdapConfig {
/// Whether to enable LDAP login.
///
/// example: "true"
#[serde(default)]
pub enable: bool,
/// Whether to force LDAP authentication or authorize classical password
/// login.
///
/// example: "true"
#[serde(default)]
pub ldap_only: bool,
/// URI of the LDAP server.
///
/// example: "ldap://ldap.example.com:389"
///
/// default: ""
#[serde(default)]
pub uri: Option<Url>,
/// Root of the searches.
///
/// example: "ou=users,dc=example,dc=org"
///
/// default: ""
#[serde(default)]
pub base_dn: String,
/// Bind DN if anonymous search is not enabled.
///
/// You can use the variable `{username}` that will be replaced by the
/// entered username. In such case, the password used to bind will be the
/// one provided for the login and not the one given by
/// `bind_password_file`. Beware: automatically granting admin rights will
/// not work if you use this direct bind instead of a LDAP search.
///
/// example: "cn=ldap-reader,dc=example,dc=org" or
/// "cn={username},ou=users,dc=example,dc=org"
///
/// default: ""
#[serde(default)]
pub bind_dn: Option<String>,
/// Path to a file on the system that contains the password for the
/// `bind_dn`.
///
/// The server must be able to access the file, and it must not be empty.
///
/// default: ""
#[serde(default)]
pub bind_password_file: Option<PathBuf>,
/// Search filter to limit user searches.
///
/// You can use the variable `{username}` that will be replaced by the
/// entered username for more complex filters.
///
/// example: "(&(objectClass=person)(memberOf=matrix))"
///
/// default: "(objectClass=*)"
#[serde(default = "default_ldap_search_filter")]
pub filter: String,
/// Attribute to use to uniquely identify the user.
///
/// example: "uid" or "cn"
///
/// default: "uid"
#[serde(default = "default_ldap_uid_attribute")]
pub uid_attribute: String,
/// Attribute containing the display name of the user.
///
/// example: "givenName" or "sn"
///
/// default: "givenName"
#[serde(default = "default_ldap_name_attribute")]
pub name_attribute: String,
/// Root of the searches for admin users.
///
/// Defaults to `base_dn` if empty.
///
/// example: "ou=admins,dc=example,dc=org"
///
/// default: ""
#[serde(default)]
pub admin_base_dn: String,
/// The LDAP search filter to find administrative users for continuwuity.
///
/// If left blank, administrative state must be configured manually for each
/// user.
///
/// You can use the variable `{username}` that will be replaced by the
/// entered username for more complex filters.
///
/// example: "(objectClass=conduwuitAdmin)" or "(uid={username})"
///
/// default: ""
#[serde(default)]
pub admin_filter: String,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(transparent)]
struct ListeningPort {
@ -2543,9 +2430,3 @@ pub(super) fn default_blurhash_x_component() -> u32 { 4 }
pub(super) fn default_blurhash_y_component() -> u32 { 3 }
// end recommended & blurhashing defaults
fn default_ldap_search_filter() -> String { "(objectClass=*)".to_owned() }
fn default_ldap_uid_attribute() -> String { String::from("uid") }
fn default_ldap_name_attribute() -> String { String::from("givenName") }

View file

@ -100,7 +100,7 @@ pub fn trap() {
#[must_use]
pub fn panic_str(p: &Box<dyn Any + Send>) -> &'static str {
(**p).downcast_ref::<&str>().copied().unwrap_or_default()
p.downcast_ref::<&str>().copied().unwrap_or_default()
}
#[inline(always)]

View file

@ -110,8 +110,6 @@ pub enum Error {
InconsistentRoomState(&'static str, ruma::OwnedRoomId),
#[error(transparent)]
IntoHttp(#[from] ruma::api::error::IntoHttpError),
#[error("{0}")]
Ldap(Cow<'static, str>),
#[error(transparent)]
Mxc(#[from] ruma::MxcUriError),
#[error(transparent)]

View file

@ -18,7 +18,7 @@ pub const STABLE_ROOM_VERSIONS: &[RoomVersionId] = &[
/// Experimental, partially supported room versions
pub const UNSTABLE_ROOM_VERSIONS: &[RoomVersionId] =
&[RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5];
&[RoomVersionId::V2, RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5];
type RoomVersion = (RoomVersionId, RoomVersionStability);

View file

@ -19,7 +19,7 @@ where
S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a,
{
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a;
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
}
impl<'a, K, S> Get<'a, K, S> for S
@ -29,7 +29,7 @@ where
K: AsRef<[u8]> + Send + Sync + 'a,
{
#[inline]
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a {
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
map.get_batch(self)
}
}
@ -39,7 +39,7 @@ where
pub(crate) fn get_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
where
S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a,

View file

@ -10,7 +10,7 @@ use super::stream::is_cached;
use crate::{keyval, keyval::Key, stream};
#[implement(super::Map)]
pub fn keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'a, K>>> + Send
pub fn keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'_, K>>> + Send
where
K: Deserialize<'a> + Send,
{

View file

@ -15,7 +15,7 @@ use crate::{
pub fn keys_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -40,7 +40,7 @@ where
pub fn keys_raw_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key};
pub fn keys_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -37,7 +37,7 @@ where
pub fn keys_raw_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -50,7 +50,7 @@ where
pub fn raw_keys_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -17,7 +17,7 @@ where
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug,
{
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a;
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
}
impl<'a, K, S> Qry<'a, K, S> for S
@ -27,7 +27,7 @@ where
K: Serialize + Debug + 'a,
{
#[inline]
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a {
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
map.qry_batch(self)
}
}
@ -37,7 +37,7 @@ where
pub(crate) fn qry_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
where
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug + 'a,

View file

@ -10,7 +10,7 @@ use super::rev_stream::is_cached;
use crate::{keyval, keyval::Key, stream};
#[implement(super::Map)]
pub fn rev_keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'a, K>>> + Send
pub fn rev_keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'_, K>>> + Send
where
K: Deserialize<'a> + Send,
{

View file

@ -15,7 +15,7 @@ use crate::{
pub fn rev_keys_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -41,7 +41,7 @@ where
pub fn rev_keys_raw_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key};
pub fn rev_keys_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -37,7 +37,7 @@ where
pub fn rev_keys_raw_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -50,7 +50,7 @@ where
pub fn rev_raw_keys_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream};
#[implement(super::Map)]
pub fn rev_stream<'a, K, V>(
self: &'a Arc<Self>,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
where
K: Deserialize<'a> + Send,
V: Deserialize<'a> + Send,

View file

@ -20,7 +20,7 @@ use crate::{
pub fn rev_stream_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -55,7 +55,7 @@ where
pub fn rev_stream_raw_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key};
pub fn rev_stream_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -50,7 +50,7 @@ where
pub fn rev_stream_raw_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -68,7 +68,7 @@ where
pub fn rev_raw_stream_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream};
#[implement(super::Map)]
pub fn stream<'a, K, V>(
self: &'a Arc<Self>,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
where
K: Deserialize<'a> + Send,
V: Deserialize<'a> + Send,

View file

@ -19,7 +19,7 @@ use crate::{
pub fn stream_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -53,7 +53,7 @@ where
pub fn stream_raw_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key};
pub fn stream_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -50,7 +50,7 @@ where
pub fn stream_raw_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -68,7 +68,7 @@ where
pub fn raw_stream_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -374,10 +374,6 @@ pub(super) static MAPS: &[Descriptor] = &[
name: "userid_masterkeyid",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "userid_origin",
..descriptor::RANDOM
},
Descriptor {
name: "userid_password",
..descriptor::RANDOM

View file

@ -443,7 +443,7 @@ pub(crate) fn into_send_seek(result: stream::State<'_>) -> stream::State<'static
unsafe { std::mem::transmute(result) }
}
fn into_recv_seek(result: stream::State<'static>) -> stream::State<'static> {
fn into_recv_seek(result: stream::State<'static>) -> stream::State<'_> {
// SAFETY: This is to receive the State from the channel; see above.
unsafe { std::mem::transmute(result) }
}

View file

@ -326,7 +326,7 @@ fn ser_array() {
}
#[test]
#[ignore = "arrayvec deserialization is not implemented (separators)"]
#[ignore]
fn de_array() {
let a: u64 = 123_456;
let b: u64 = 987_654;
@ -358,7 +358,7 @@ fn de_array() {
}
#[test]
#[ignore = "Nested sequences are not supported"]
#[ignore]
fn de_complex() {
type Key<'a> = (&'a UserId, ArrayVec<u64, 2>, &'a RoomId);

View file

@ -56,7 +56,6 @@ standard = [
"jemalloc",
"jemalloc_conf",
"journald",
"ldap",
"media_thumbnail",
"systemd",
"url_preview",
@ -64,7 +63,7 @@ standard = [
]
full = [
"standard",
# "hardened_malloc", # Conflicts with jemalloc
"hardened_malloc",
"jemalloc_prof",
"perf_measurements",
"tokio_console"
@ -115,9 +114,6 @@ jemalloc_stats = [
jemalloc_conf = [
"conduwuit-core/jemalloc_conf",
]
ldap = [
"conduwuit-api/ldap",
]
media_thumbnail = [
"conduwuit-service/media_thumbnail",
]

View file

@ -30,7 +30,7 @@ use tower::{Service, ServiceExt};
type MakeService = IntoMakeServiceWithConnectInfo<Router, net::SocketAddr>;
const NULL_ADDR: net::SocketAddr = net::SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0);
const NULL_ADDR: net::SocketAddr = net::SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0);
const FINI_POLL_INTERVAL: Duration = Duration::from_millis(750);
#[tracing::instrument(skip_all, level = "debug")]

View file

@ -53,9 +53,6 @@ jemalloc_stats = [
"conduwuit-core/jemalloc_stats",
"conduwuit-database/jemalloc_stats",
]
ldap = [
"dep:ldap3"
]
media_thumbnail = [
"dep:image",
]
@ -92,8 +89,6 @@ image.workspace = true
image.optional = true
ipaddress.workspace = true
itertools.workspace = true
ldap3.workspace = true
ldap3.optional = true
log.workspace = true
loole.workspace = true
lru-cache.workspace = true

View file

@ -38,7 +38,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
// Create a user for the server
let server_user = services.globals.server_user.as_ref();
services.users.create(server_user, None, None).await?;
services.users.create(server_user, None)?;
let create_content = {
use RoomVersionId::*;

View file

@ -4,14 +4,14 @@ mod registration_info;
use std::{collections::BTreeMap, iter::IntoIterator, sync::Arc};
use async_trait::async_trait;
use conduwuit::{Err, Result, err, utils::stream::IterStream};
use conduwuit::{Result, err, utils::stream::IterStream};
use database::Map;
use futures::{Future, FutureExt, Stream, TryStreamExt};
use ruma::{RoomAliasId, RoomId, UserId, api::appservice::Registration};
use tokio::sync::{RwLock, RwLockReadGuard};
pub use self::{namespace_regex::NamespaceRegex, registration_info::RegistrationInfo};
use crate::{Dep, globals, sending, users};
use crate::{Dep, sending};
pub struct Service {
registration_info: RwLock<Registrations>,
@ -20,9 +20,7 @@ pub struct Service {
}
struct Services {
globals: Dep<globals::Service>,
sending: Dep<sending::Service>,
users: Dep<users::Service>,
}
struct Data {
@ -37,9 +35,7 @@ impl crate::Service for Service {
Ok(Arc::new(Self {
registration_info: RwLock::new(BTreeMap::new()),
services: Services {
globals: args.depend::<globals::Service>("globals"),
sending: args.depend::<sending::Service>("sending"),
users: args.depend::<users::Service>("users"),
},
db: Data {
id_appserviceregistrations: args.db["id_appserviceregistrations"].clone(),
@ -48,93 +44,23 @@ impl crate::Service for Service {
}
async fn worker(self: Arc<Self>) -> Result {
// First, collect all appservices to check for token conflicts
let appservices: Vec<(String, Registration)> = self.iter_db_ids().try_collect().await?;
// Inserting registrations into cache
self.iter_db_ids()
.try_for_each(async |appservice| {
self.registration_info
.write()
.await
.insert(appservice.0, appservice.1.try_into()?);
// Check for appservice-to-appservice token conflicts
for i in 0..appservices.len() {
for j in i.saturating_add(1)..appservices.len() {
if appservices[i].1.as_token == appservices[j].1.as_token {
return Err!(Database(error!(
"Token collision detected: Appservices '{}' and '{}' have the same token",
appservices[i].0, appservices[j].0
)));
}
}
}
// Process each appservice
for (id, registration) in appservices {
// During startup, resolve any token collisions in favour of appservices
// by logging out conflicting user devices
if let Ok((user_id, device_id)) = self
.services
.users
.find_from_token(&registration.as_token)
.await
{
conduwuit::warn!(
"Token collision detected during startup: Appservice '{}' token was also \
used by user '{}' device '{}'. Logging out the user device to resolve \
conflict.",
id,
user_id.localpart(),
device_id
);
self.services
.users
.remove_device(&user_id, &device_id)
.await;
}
self.start_appservice(id, registration).await?;
}
Ok(())
Ok(())
})
.await
}
fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }
}
impl Service {
/// Starts an appservice, ensuring its sender_localpart user exists and is
/// active. Creates the user if it doesn't exist, or reactivates it if it
/// was deactivated. Then registers the appservice in memory for request
/// handling.
async fn start_appservice(&self, id: String, registration: Registration) -> Result {
let appservice_user_id = UserId::parse_with_server_name(
registration.sender_localpart.as_str(),
self.services.globals.server_name(),
)?;
if !self.services.users.exists(&appservice_user_id).await {
self.services
.users
.create(&appservice_user_id, None, None)
.await?;
} else if self
.services
.users
.is_deactivated(&appservice_user_id)
.await
.unwrap_or(false)
{
// Reactivate the appservice user if it was accidentally deactivated
self.services
.users
.set_password(&appservice_user_id, None)
.await?;
}
self.registration_info
.write()
.await
.insert(id, registration.try_into()?);
Ok(())
}
/// Registers an appservice and returns the ID to the caller
pub async fn register_appservice(
&self,
@ -142,40 +68,15 @@ impl Service {
appservice_config_body: &str,
) -> Result {
//TODO: Check for collisions between exclusive appservice namespaces
// Check for token collision with other appservices (allow re-registration of
// same appservice)
if let Ok(existing) = self.find_from_token(&registration.as_token).await {
if existing.registration.id != registration.id {
return Err(err!(Request(InvalidParam(
"Cannot register appservice: Token is already used by appservice '{}'. \
Please generate a different token.",
existing.registration.id
))));
}
}
// Prevent token collision with existing user tokens
if self
.services
.users
.find_from_token(&registration.as_token)
self.registration_info
.write()
.await
.is_ok()
{
return Err(err!(Request(InvalidParam(
"Cannot register appservice: The provided token is already in use by a user \
device. Please generate a different token for the appservice."
))));
}
.insert(registration.id.clone(), registration.clone().try_into()?);
self.db
.id_appserviceregistrations
.insert(&registration.id, appservice_config_body);
self.start_appservice(registration.id.clone(), registration.clone())
.await?;
Ok(())
}
@ -212,14 +113,12 @@ impl Service {
.map(|info| info.registration)
}
/// Returns Result to match users::find_from_token for select_ok usage
pub async fn find_from_token(&self, token: &str) -> Result<RegistrationInfo> {
pub async fn find_from_token(&self, token: &str) -> Option<RegistrationInfo> {
self.read()
.await
.values()
.find(|info| info.registration.as_token == token)
.cloned()
.ok_or_else(|| err!(Request(NotFound("Appservice token not found"))))
}
/// Checks if a given user id matches any exclusive appservice regex

View file

@ -41,11 +41,6 @@ impl crate::Service for Service {
return Ok(());
}
if self.services.config.ldap.enable {
warn!("emergency password feature not available with LDAP enabled.");
return Ok(());
}
self.set_emergency_access().await.inspect_err(|e| {
error!("Could not set the configured emergency password for the server user: {e}");
})
@ -62,8 +57,7 @@ impl Service {
self.services
.users
.set_password(server_user, self.services.config.emergency_password.as_deref())
.await?;
.set_password(server_user, self.services.config.emergency_password.as_deref())?;
let (ruleset, pwd_set) = match self.services.config.emergency_password {
| Some(_) => (Ruleset::server_default(server_user), true),

View file

@ -215,8 +215,8 @@ async fn db_lt_12(services: &Services) -> Result<()> {
for username in &services
.users
.list_local_users()
.map(ToOwned::to_owned)
.collect::<Vec<OwnedUserId>>()
.map(UserId::to_owned)
.collect::<Vec<_>>()
.await
{
let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name)
@ -295,8 +295,8 @@ async fn db_lt_13(services: &Services) -> Result<()> {
for username in &services
.users
.list_local_users()
.map(ToOwned::to_owned)
.collect::<Vec<OwnedUserId>>()
.map(UserId::to_owned)
.collect::<Vec<_>>()
.await
{
let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name)

View file

@ -183,8 +183,8 @@ impl Service {
.services
.users
.list_local_users()
.map(ToOwned::to_owned)
.collect::<Vec<OwnedUserId>>()
.map(UserId::to_owned)
.collect::<Vec<_>>()
.await
{
let presence = self.db.get_presence(user_id).await;

View file

@ -178,7 +178,7 @@ impl Service {
pub fn get_pushkeys<'a>(
&'a self,
sender: &'a UserId,
) -> impl Stream<Item = &'a str> + Send + 'a {
) -> impl Stream<Item = &str> + Send + 'a {
let prefix = (sender, Interfix);
self.db
.senderkey_pusher

View file

@ -178,7 +178,7 @@ impl Service {
pub fn local_aliases_for_room<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a RoomAliasId> + Send + 'a {
) -> impl Stream<Item = &RoomAliasId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.aliasid_alias
@ -188,9 +188,7 @@ impl Service {
}
#[tracing::instrument(skip(self), level = "debug")]
pub fn all_local_aliases<'a>(
&'a self,
) -> impl Stream<Item = (&'a RoomId, &'a str)> + Send + 'a {
pub fn all_local_aliases<'a>(&'a self) -> impl Stream<Item = (&RoomId, &str)> + Send + 'a {
self.db
.alias_roomid
.stream()

View file

@ -60,13 +60,10 @@ impl Data {
target: ShortEventId,
from: PduCount,
dir: Direction,
) -> impl Stream<Item = (PduCount, impl Event)> + Send + 'a {
// Query from exact position then filter excludes it (saturating_inc could skip
// events at min/max boundaries)
let from_unsigned = from.into_unsigned();
) -> impl Stream<Item = (PduCount, impl Event)> + Send + '_ {
let mut current = ArrayVec::<u8, 16>::new();
current.extend(target.to_be_bytes());
current.extend(from_unsigned.to_be_bytes());
current.extend(from.saturating_inc(dir).into_unsigned().to_be_bytes());
let current = current.as_slice();
match dir {
| Direction::Forward => self.tofrom_relation.raw_keys_from(current).boxed(),
@ -76,17 +73,6 @@ impl Data {
.ready_take_while(move |key| key.starts_with(&target.to_be_bytes()))
.map(|to_from| u64_from_u8(&to_from[8..16]))
.map(PduCount::from_unsigned)
.ready_filter(move |count| {
if from == PduCount::min() || from == PduCount::max() {
true
} else {
let count_unsigned = count.into_unsigned();
match dir {
| Direction::Forward => count_unsigned > from_unsigned,
| Direction::Backward => count_unsigned < from_unsigned,
}
}
})
.wide_filter_map(move |shorteventid| async move {
let pdu_id: RawPduId = PduId { shortroomid, shorteventid }.into();

View file

@ -65,7 +65,7 @@ impl Data {
&'a self,
room_id: &'a RoomId,
since: u64,
) -> impl Stream<Item = ReceiptItem<'a>> + Send + 'a {
) -> impl Stream<Item = ReceiptItem<'_>> + Send + 'a {
type Key<'a> = (&'a RoomId, u64, &'a UserId);
type KeyVal<'a> = (Key<'a>, CanonicalJsonObject);

View file

@ -112,7 +112,7 @@ impl Service {
&'a self,
room_id: &'a RoomId,
since: u64,
) -> impl Stream<Item = ReceiptItem<'a>> + Send + 'a {
) -> impl Stream<Item = ReceiptItem<'_>> + Send + 'a {
self.db.readreceipts_since(room_id, since)
}

View file

@ -104,7 +104,7 @@ pub fn deindex_pdu(&self, shortroomid: ShortRoomId, pdu_id: &RawPduId, message_b
pub async fn search_pdus<'a>(
&'a self,
query: &'a RoomQuery<'a>,
) -> Result<(usize, impl Stream<Item = impl Event + use<>> + Send + 'a)> {
) -> Result<(usize, impl Stream<Item = impl Event + use<>> + Send + '_)> {
let pdu_ids: Vec<_> = self.search_pdu_ids(query).await?.collect().await;
let filter = &query.criteria.filter;
@ -137,10 +137,10 @@ pub async fn search_pdus<'a>(
// result is modeled as a stream such that callers don't have to be refactored
// though an additional async/wrap still exists for now
#[implement(Service)]
pub async fn search_pdu_ids<'a>(
&'a self,
query: &'a RoomQuery<'_>,
) -> Result<impl Stream<Item = RawPduId> + Send + 'a + use<'a>> {
pub async fn search_pdu_ids(
&self,
query: &RoomQuery<'_>,
) -> Result<impl Stream<Item = RawPduId> + Send + '_ + use<'_>> {
let shortroomid = self.services.short.get_shortroomid(query.room_id).await?;
let pdu_ids = self.search_pdu_ids_query_room(query, shortroomid).await;
@ -173,7 +173,7 @@ fn search_pdu_ids_query_words<'a>(
&'a self,
shortroomid: ShortRoomId,
word: &'a str,
) -> impl Stream<Item = RawPduId> + Send + 'a {
) -> impl Stream<Item = RawPduId> + Send + '_ {
self.search_pdu_ids_query_word(shortroomid, word)
.map(move |key| -> RawPduId {
let key = &key[prefix_len(word)..];
@ -183,11 +183,11 @@ fn search_pdu_ids_query_words<'a>(
/// Iterate over raw database results for a word
#[implement(Service)]
fn search_pdu_ids_query_word<'a>(
&'a self,
fn search_pdu_ids_query_word(
&self,
shortroomid: ShortRoomId,
word: &'a str,
) -> impl Stream<Item = Val<'a>> + Send + 'a + use<'a> {
word: &str,
) -> impl Stream<Item = Val<'_>> + Send + '_ + use<'_> {
// rustc says const'ing this not yet stable
let end_id: RawPduId = PduId {
shortroomid,

View file

@ -62,7 +62,7 @@ pub async fn get_or_create_shorteventid(&self, event_id: &EventId) -> ShortEvent
pub fn multi_get_or_create_shorteventid<'a, I>(
&'a self,
event_ids: I,
) -> impl Stream<Item = ShortEventId> + Send + 'a
) -> impl Stream<Item = ShortEventId> + Send + '_
where
I: Iterator<Item = &'a EventId> + Clone + Debug + Send + 'a,
{

View file

@ -388,7 +388,7 @@ impl Service {
pub fn get_forward_extremities<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a EventId> + Send + 'a {
) -> impl Stream<Item = &EventId> + Send + '_ {
let prefix = (room_id, Interfix);
self.db

View file

@ -144,7 +144,7 @@ pub fn clear_appservice_in_room_cache(&self) { self.appservice_in_room_cache.wri
pub fn room_servers<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a ServerName> + Send + 'a {
) -> impl Stream<Item = &ServerName> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomserverids
@ -167,7 +167,7 @@ pub async fn server_in_room<'a>(&'a self, server: &'a ServerName, room_id: &'a R
pub fn server_rooms<'a>(
&'a self,
server: &'a ServerName,
) -> impl Stream<Item = &'a RoomId> + Send + 'a {
) -> impl Stream<Item = &RoomId> + Send + 'a {
let prefix = (server, Interfix);
self.db
.serverroomids
@ -202,7 +202,7 @@ pub fn get_shared_rooms<'a>(
&'a self,
user_a: &'a UserId,
user_b: &'a UserId,
) -> impl Stream<Item = &'a RoomId> + Send + 'a {
) -> impl Stream<Item = &RoomId> + Send + 'a {
use conduwuit::utils::set;
let a = self.rooms_joined(user_a);
@ -216,7 +216,7 @@ pub fn get_shared_rooms<'a>(
pub fn room_members<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuserid_joined
@ -239,7 +239,7 @@ pub async fn room_joined_count(&self, room_id: &RoomId) -> Result<u64> {
pub fn local_users_in_room<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
self.room_members(room_id)
.ready_filter(|user| self.services.globals.user_is_local(user))
}
@ -251,7 +251,7 @@ pub fn local_users_in_room<'a>(
pub fn active_local_users_in_room<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
self.local_users_in_room(room_id)
.filter(|user| self.services.users.is_active(user))
}
@ -273,7 +273,7 @@ pub async fn room_invited_count(&self, room_id: &RoomId) -> Result<u64> {
pub fn room_useroncejoined<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuseroncejoinedids
@ -288,7 +288,7 @@ pub fn room_useroncejoined<'a>(
pub fn room_members_invited<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuserid_invitecount
@ -303,7 +303,7 @@ pub fn room_members_invited<'a>(
pub fn room_members_knocked<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuserid_knockedcount
@ -347,7 +347,7 @@ pub async fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result
pub fn rooms_joined<'a>(
&'a self,
user_id: &'a UserId,
) -> impl Stream<Item = &'a RoomId> + Send + 'a {
) -> impl Stream<Item = &RoomId> + Send + 'a {
self.db
.userroomid_joined
.keys_raw_prefix(user_id)

View file

@ -49,7 +49,7 @@ pub async fn update_membership(
#[allow(clippy::collapsible_if)]
if !self.services.globals.user_is_local(user_id) {
if !self.services.users.exists(user_id).await {
self.services.users.create(user_id, None, None).await?;
self.services.users.create(user_id, None)?;
}
}

View file

@ -81,7 +81,7 @@ pub async fn servers_route_via(&self, room_id: &RoomId) -> Result<Vec<OwnedServe
pub fn servers_invite_via<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &'a ServerName> + Send + 'a {
) -> impl Stream<Item = &ServerName> + Send + 'a {
type KeyVal<'a> = (Ignore, Vec<&'a ServerName>);
self.db

View file

@ -165,6 +165,25 @@ pub async fn create_hash_and_sign_event(
return Err!(Request(Forbidden("Event is not authorized.")));
}
// Check with the policy server
match self
.services
.event_handler
.ask_policy_server(&pdu, room_id)
.await
{
| Ok(true) => {},
| Ok(false) => {
return Err!(Request(Forbidden(debug_warn!(
"Policy server marked this event as spam"
))));
},
| Err(e) => {
// fail open
warn!("Failed to check event with policy server: {e}");
},
}
// Hash and sign
let mut pdu_json = utils::to_canonical_object(&pdu).map_err(|e| {
err!(Request(BadJson(warn!("Failed to convert PDU to canonical JSON: {e}"))))
@ -203,25 +222,6 @@ pub async fn create_hash_and_sign_event(
pdu_json.insert("event_id".into(), CanonicalJsonValue::String(pdu.event_id.clone().into()));
// Check with the policy server
match self
.services
.event_handler
.ask_policy_server(&pdu, room_id)
.await
{
| Ok(true) => {},
| Ok(false) => {
return Err!(Request(Forbidden(debug_warn!(
"Policy server marked this event as spam"
))));
},
| Err(e) => {
// fail open
warn!("Failed to check event with policy server: {e}");
},
}
// Generate short event id
let _shorteventid = self
.services

View file

@ -1,19 +1,11 @@
#[cfg(feature = "ldap")]
use std::collections::HashMap;
use std::{collections::BTreeMap, mem, sync::Arc};
#[cfg(feature = "ldap")]
use conduwuit::result::LogErr;
use conduwuit::{
Err, Error, Result, Server, at, debug_warn, err, is_equal_to, trace,
Err, Error, Result, Server, at, debug_warn, err, trace,
utils::{self, ReadyExt, stream::TryIgnore, string::Unquoted},
};
#[cfg(feature = "ldap")]
use conduwuit_core::{debug, error};
use database::{Deserialized, Ignore, Interfix, Json, Map};
use futures::{Stream, StreamExt, TryFutureExt};
#[cfg(feature = "ldap")]
use ldap3::{LdapConnAsync, Scope, SearchEntry};
use ruma::{
DeviceId, KeyId, MilliSecondsSinceUnixEpoch, OneTimeKeyAlgorithm, OneTimeKeyId,
OneTimeKeyName, OwnedDeviceId, OwnedKeyId, OwnedMxcUri, OwnedUserId, RoomId, UInt, UserId,
@ -27,7 +19,7 @@ use ruma::{
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::{Dep, account_data, admin, appservice, globals, rooms};
use crate::{Dep, account_data, admin, globals, rooms};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserSuspension {
@ -48,7 +40,6 @@ struct Services {
server: Arc<Server>,
account_data: Dep<account_data::Service>,
admin: Dep<admin::Service>,
appservice: Dep<appservice::Service>,
globals: Dep<globals::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
state_cache: Dep<rooms::state_cache::Service>,
@ -71,7 +62,6 @@ struct Data {
userid_displayname: Arc<Map>,
userid_lastonetimekeyupdate: Arc<Map>,
userid_masterkeyid: Arc<Map>,
userid_origin: Arc<Map>,
userid_password: Arc<Map>,
userid_suspension: Arc<Map>,
userid_selfsigningkeyid: Arc<Map>,
@ -86,7 +76,6 @@ impl crate::Service for Service {
server: args.server.clone(),
account_data: args.depend::<account_data::Service>("account_data"),
admin: args.depend::<admin::Service>("admin"),
appservice: args.depend::<appservice::Service>("appservice"),
globals: args.depend::<globals::Service>("globals"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
@ -109,7 +98,6 @@ impl crate::Service for Service {
userid_displayname: args.db["userid_displayname"].clone(),
userid_lastonetimekeyupdate: args.db["userid_lastonetimekeyupdate"].clone(),
userid_masterkeyid: args.db["userid_masterkeyid"].clone(),
userid_origin: args.db["userid_origin"].clone(),
userid_password: args.db["userid_password"].clone(),
userid_suspension: args.db["userid_suspension"].clone(),
userid_selfsigningkeyid: args.db["userid_selfsigningkeyid"].clone(),
@ -146,21 +134,9 @@ impl Service {
}
/// Create a new user account on this homeserver.
///
/// User origin is by default "password" (meaning that it will login using
/// its user_id/password). Users with other origins (currently only "ldap"
/// is available) have special login processes.
#[inline]
pub async fn create(
&self,
user_id: &UserId,
password: Option<&str>,
origin: Option<&str>,
) -> Result<()> {
self.db
.userid_origin
.insert(user_id, origin.unwrap_or("password"));
self.set_password(user_id, password).await
pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
self.set_password(user_id, password)
}
/// Deactivate account
@ -174,7 +150,7 @@ impl Service {
// result in an empty string, so the user will not be able to log in again.
// Systems like changing the password without logging in should check if the
// account is deactivated.
self.set_password(user_id, None).await?;
self.set_password(user_id, None)?;
// TODO: Unhook 3PID
Ok(())
@ -275,34 +251,13 @@ impl Service {
.ready_filter_map(|(u, p): (&UserId, &[u8])| (!p.is_empty()).then_some(u))
}
/// Returns the origin of the user (password/LDAP/...).
pub async fn origin(&self, user_id: &UserId) -> Result<String> {
self.db.userid_origin.get(user_id).await.deserialized()
}
/// Returns the password hash for the given user.
pub async fn password_hash(&self, user_id: &UserId) -> Result<String> {
self.db.userid_password.get(user_id).await.deserialized()
}
/// Hash and set the user's password to the Argon2 hash
pub async fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
// Cannot change the password of a LDAP user. There are two special cases :
// - a `None` password can be used to deactivate a LDAP user
// - a "*" password is used as the default password of an active LDAP user
if cfg!(feature = "ldap")
&& password.is_some_and(|pwd| pwd != "*")
&& self
.db
.userid_origin
.get(user_id)
.await
.deserialized::<String>()
.is_ok_and(is_equal_to!("ldap"))
{
return Err!(Request(InvalidParam("Cannot change password of a LDAP user")));
}
pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
password
.map(utils::hash::password)
.transpose()
@ -422,7 +377,7 @@ impl Service {
pub fn all_device_ids<'a>(
&'a self,
user_id: &'a UserId,
) -> impl Stream<Item = &'a DeviceId> + Send + 'a {
) -> impl Stream<Item = &DeviceId> + Send + 'a {
let prefix = (user_id, Interfix);
self.db
.userdeviceid_metadata
@ -436,31 +391,6 @@ impl Service {
self.db.userdeviceid_token.qry(&key).await.deserialized()
}
/// Generate a unique access token that doesn't collide with existing tokens
pub async fn generate_unique_token(&self) -> String {
loop {
let token = utils::random_string(32);
// Check for collision with appservice tokens
if self
.services
.appservice
.find_from_token(&token)
.await
.is_ok()
{
continue;
}
// Check for collision with user tokens
if self.db.token_userdeviceid.get(&token).await.is_ok() {
continue;
}
return token;
}
}
/// Replaces the access token of one device.
pub async fn set_token(
&self,
@ -477,19 +407,6 @@ impl Service {
)));
}
// Check for token collision with appservices
if self
.services
.appservice
.find_from_token(token)
.await
.is_ok()
{
return Err!(Request(InvalidParam(
"Token conflicts with an existing appservice token"
)));
}
// Remove old token
if let Ok(old_token) = self.db.userdeviceid_token.qry(&key).await {
self.db.token_userdeviceid.remove(&old_token);
@ -770,7 +687,7 @@ impl Service {
user_id: &'a UserId,
from: u64,
to: Option<u64>,
) -> impl Stream<Item = &'a UserId> + Send + 'a {
) -> impl Stream<Item = &UserId> + Send + 'a {
self.keys_changed_user_or_room(user_id.as_str(), from, to)
.map(|(user_id, ..)| user_id)
}
@ -781,7 +698,7 @@ impl Service {
room_id: &'a RoomId,
from: u64,
to: Option<u64>,
) -> impl Stream<Item = (&'a UserId, u64)> + Send + 'a {
) -> impl Stream<Item = (&UserId, u64)> + Send + 'a {
self.keys_changed_user_or_room(room_id.as_str(), from, to)
}
@ -790,7 +707,7 @@ impl Service {
user_or_room_id: &'a str,
from: u64,
to: Option<u64>,
) -> impl Stream<Item = (&'a UserId, u64)> + Send + 'a {
) -> impl Stream<Item = (&UserId, u64)> + Send + 'a {
type KeyVal<'a> = ((&'a str, u64), &'a UserId);
let to = to.unwrap_or(u64::MAX);
@ -1175,154 +1092,6 @@ impl Service {
self.db.useridprofilekey_value.del(key);
}
}
#[cfg(not(feature = "ldap"))]
pub async fn search_ldap(&self, _user_id: &UserId) -> Result<Vec<(String, bool)>> {
Err!(FeatureDisabled("ldap"))
}
#[cfg(feature = "ldap")]
pub async fn search_ldap(&self, user_id: &UserId) -> Result<Vec<(String, bool)>> {
let localpart = user_id.localpart().to_owned();
let lowercased_localpart = localpart.to_lowercase();
let config = &self.services.server.config.ldap;
let uri = config
.uri
.as_ref()
.ok_or_else(|| err!(Ldap(error!("LDAP URI is not configured."))))?;
debug!(?uri, "LDAP creating connection...");
let (conn, mut ldap) = LdapConnAsync::new(uri.as_str())
.await
.map_err(|e| err!(Ldap(error!(?user_id, "LDAP connection setup error: {e}"))))?;
let driver = self.services.server.runtime().spawn(async move {
match conn.drive().await {
| Err(e) => error!("LDAP connection error: {e}"),
| Ok(()) => debug!("LDAP connection completed."),
}
});
match (&config.bind_dn, &config.bind_password_file) {
| (Some(bind_dn), Some(bind_password_file)) => {
let bind_pw = String::from_utf8(std::fs::read(bind_password_file)?)?;
ldap.simple_bind(bind_dn, bind_pw.trim())
.await
.and_then(ldap3::LdapResult::success)
.map_err(|e| err!(Ldap(error!("LDAP bind error: {e}"))))?;
},
| (..) => {},
}
let attr = [&config.uid_attribute, &config.name_attribute];
let user_filter = &config.filter.replace("{username}", &lowercased_localpart);
let (entries, _result) = ldap
.search(&config.base_dn, Scope::Subtree, user_filter, &attr)
.await
.and_then(ldap3::SearchResult::success)
.inspect(|(entries, result)| trace!(?entries, ?result, "LDAP Search"))
.map_err(|e| err!(Ldap(error!(?attr, ?user_filter, "LDAP search error: {e}"))))?;
let mut dns: HashMap<String, bool> = entries
.into_iter()
.filter_map(|entry| {
let search_entry = SearchEntry::construct(entry);
debug!(?search_entry, "LDAP search entry");
search_entry
.attrs
.get(&config.uid_attribute)
.into_iter()
.chain(search_entry.attrs.get(&config.name_attribute))
.any(|ids| ids.contains(&localpart) || ids.contains(&lowercased_localpart))
.then_some((search_entry.dn, false))
})
.collect();
if !config.admin_filter.is_empty() {
let admin_base_dn = if config.admin_base_dn.is_empty() {
&config.base_dn
} else {
&config.admin_base_dn
};
let admin_filter = &config
.admin_filter
.replace("{username}", &lowercased_localpart);
let (admin_entries, _result) = ldap
.search(admin_base_dn, Scope::Subtree, admin_filter, &attr)
.await
.and_then(ldap3::SearchResult::success)
.inspect(|(entries, result)| trace!(?entries, ?result, "LDAP Admin Search"))
.map_err(|e| {
err!(Ldap(error!(?attr, ?admin_filter, "Ldap admin search error: {e}")))
})?;
dns.extend(admin_entries.into_iter().filter_map(|entry| {
let search_entry = SearchEntry::construct(entry);
debug!(?search_entry, "LDAP search entry");
search_entry
.attrs
.get(&config.uid_attribute)
.into_iter()
.chain(search_entry.attrs.get(&config.name_attribute))
.any(|ids| ids.contains(&localpart) || ids.contains(&lowercased_localpart))
.then_some((search_entry.dn, true))
}));
}
ldap.unbind()
.await
.map_err(|e| err!(Ldap(error!("LDAP unbind error: {e}"))))?;
driver.await.log_err().ok();
Ok(dns.drain().collect())
}
#[cfg(not(feature = "ldap"))]
pub async fn auth_ldap(&self, _user_dn: &str, _password: &str) -> Result {
Err!(FeatureDisabled("ldap"))
}
#[cfg(feature = "ldap")]
pub async fn auth_ldap(&self, user_dn: &str, password: &str) -> Result {
let config = &self.services.server.config.ldap;
let uri = config
.uri
.as_ref()
.ok_or_else(|| err!(Ldap(error!("LDAP URI is not configured."))))?;
debug!(?uri, "LDAP creating connection...");
let (conn, mut ldap) = LdapConnAsync::new(uri.as_str())
.await
.map_err(|e| err!(Ldap(error!(?user_dn, "LDAP connection setup error: {e}"))))?;
let driver = self.services.server.runtime().spawn(async move {
match conn.drive().await {
| Err(e) => error!("LDAP connection error: {e}"),
| Ok(()) => debug!("LDAP connection completed."),
}
});
ldap.simple_bind(user_dn, password)
.await
.and_then(ldap3::LdapResult::success)
.map_err(|e| {
err!(Request(Forbidden(debug_error!("LDAP authentication error: {e}"))))
})?;
ldap.unbind()
.await
.map_err(|e| err!(Ldap(error!("LDAP unbind error: {e}"))))?;
driver.await.log_err().ok();
Ok(())
}
}
pub fn parse_master_key(