diff --git a/.envrc b/.envrc index bad73b75..172993c4 100644 --- a/.envrc +++ b/.envrc @@ -2,6 +2,8 @@ dotenv_if_exists -# use flake ".#${DIRENV_DEVSHELL:-default}" +if [ -f /etc/os-release ] && grep -q '^ID=nixos' /etc/os-release; then + use flake ".#${DIRENV_DEVSHELL:-default}" +fi PATH_add bin diff --git a/.forgejo/actions/detect-runner-os/action.yml b/.forgejo/actions/detect-runner-os/action.yml new file mode 100644 index 00000000..6ada1d5d --- /dev/null +++ b/.forgejo/actions/detect-runner-os/action.yml @@ -0,0 +1,39 @@ +name: detect-runner-os +description: | + Detect the actual OS name and version of the runner. + Provides separate outputs for name, version, and a combined slug. + +outputs: + name: + description: 'OS name (e.g. Ubuntu, Debian)' + value: ${{ steps.detect.outputs.name }} + version: + description: 'OS version (e.g. 22.04, 11)' + value: ${{ steps.detect.outputs.version }} + slug: + description: 'Combined OS slug (e.g. Ubuntu-22.04)' + value: ${{ steps.detect.outputs.slug }} + +runs: + using: composite + steps: + - name: Detect runner OS + id: detect + shell: bash + run: | + # Detect OS version (try lsb_release first, fall back to /etc/os-release) + OS_VERSION=$(lsb_release -rs 2>/dev/null || grep VERSION_ID /etc/os-release | cut -d'"' -f2) + + # Detect OS name and capitalise (try lsb_release first, fall back to /etc/os-release) + OS_NAME=$(lsb_release -is 2>/dev/null || grep "^ID=" /etc/os-release | cut -d'=' -f2 | tr -d '"' | sed 's/\b\(.\)/\u\1/g') + + # Create combined slug + OS_SLUG="${OS_NAME}-${OS_VERSION}" + + # Set outputs + echo "name=${OS_NAME}" >> $GITHUB_OUTPUT + echo "version=${OS_VERSION}" >> $GITHUB_OUTPUT + echo "slug=${OS_SLUG}" >> $GITHUB_OUTPUT + + # Log detection results + echo "🔍 Detected Runner OS: ${OS_NAME} ${OS_VERSION}" diff --git a/.forgejo/actions/prefligit/action.yml b/.forgejo/actions/prefligit/action.yml deleted file mode 100644 index 8cbd4500..00000000 --- a/.forgejo/actions/prefligit/action.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: prefligit -description: | - Runs prefligit, pre-commit reimplemented in Rust. -inputs: - extra_args: - description: options to pass to pre-commit run - required: false - default: '--all-files' - -runs: - using: composite - steps: - - name: Install uv - uses: https://github.com/astral-sh/setup-uv@v6 - with: - enable-cache: true - ignore-nothing-to-cache: true - - name: Install Prefligit - shell: bash - run: | - curl --proto '=https' --tlsv1.2 -LsSf https://github.com/j178/prefligit/releases/download/v0.0.10/prefligit-installer.sh | sh - - uses: actions/cache@v3 - with: - path: ~/.cache/prefligit - key: prefligit-0|${{ hashFiles('.pre-commit-config.yaml') }} - - run: prefligit run --show-diff-on-failure --color=always -v ${{ inputs.extra_args }} - shell: bash diff --git a/.forgejo/actions/setup-llvm-with-apt/action.yml b/.forgejo/actions/setup-llvm-with-apt/action.yml new file mode 100644 index 00000000..eb421e4f --- /dev/null +++ b/.forgejo/actions/setup-llvm-with-apt/action.yml @@ -0,0 +1,167 @@ +name: setup-llvm-with-apt +description: | + Set up LLVM toolchain with APT package management and smart caching. + Supports cross-compilation architectures and additional package installation. + + Creates symlinks in /usr/bin: clang, clang++, lld, llvm-ar, llvm-ranlib + +inputs: + dpkg-arch: + description: 'Debian architecture for cross-compilation (e.g. arm64)' + required: false + default: '' + extra-packages: + description: 'Additional APT packages to install (space-separated)' + required: false + default: '' + llvm-version: + description: 'LLVM version to install' + required: false + default: '20' + +outputs: + llvm-version: + description: 'Installed LLVM version' + value: ${{ steps.configure.outputs.version }} + +runs: + using: composite + steps: + - name: Detect runner OS + id: runner-os + uses: ./.forgejo/actions/detect-runner-os + + - name: Configure cross-compilation architecture + if: inputs.dpkg-arch != '' + shell: bash + run: | + echo "🏗️ Adding ${{ inputs.dpkg-arch }} architecture" + sudo dpkg --add-architecture ${{ inputs.dpkg-arch }} + + # Restrict default sources to amd64 + sudo sed -i 's/^deb http/deb [arch=amd64] http/g' /etc/apt/sources.list + sudo sed -i 's/^deb https/deb [arch=amd64] https/g' /etc/apt/sources.list + + # Add ports sources for foreign architecture + sudo tee /etc/apt/sources.list.d/${{ inputs.dpkg-arch }}.list > /dev/null <> $GITHUB_OUTPUT + else + echo "📦 LLVM ${{ inputs.llvm-version }} not found or incomplete - installing..." + + echo "::group::🔧 Installing LLVM ${{ inputs.llvm-version }}" + wget -O - https://apt.llvm.org/llvm.sh | bash -s -- ${{ inputs.llvm-version }} + echo "::endgroup::" + + if [ ! -f "/usr/bin/clang-${{ inputs.llvm-version }}" ]; then + echo "❌ Failed to install LLVM ${{ inputs.llvm-version }}" + exit 1 + fi + + echo "✅ Installed LLVM ${{ inputs.llvm-version }}" + echo "needs-install=true" >> $GITHUB_OUTPUT + fi + + - name: Prepare for additional packages + if: inputs.extra-packages != '' + shell: bash + run: | + # Update APT if LLVM was cached (installer script already does apt-get update) + if [[ "${{ steps.llvm-setup.outputs.needs-install }}" != "true" ]]; then + echo "::group::📦 Running apt-get update (LLVM cached, extra packages needed)" + sudo apt-get update + echo "::endgroup::" + fi + echo "::group::📦 Installing additional packages" + + - name: Install additional packages + if: inputs.extra-packages != '' + uses: https://github.com/awalsh128/cache-apt-pkgs-action@latest + with: + packages: ${{ inputs.extra-packages }} + version: 1.0 + + - name: End package installation group + if: inputs.extra-packages != '' + shell: bash + run: echo "::endgroup::" + + - name: Configure LLVM environment + id: configure + shell: bash + run: | + echo "::group::🔧 Configuring LLVM ${{ inputs.llvm-version }} environment" + + # Create symlinks + sudo ln -sf "/usr/bin/clang-${{ inputs.llvm-version }}" /usr/bin/clang + sudo ln -sf "/usr/bin/clang++-${{ inputs.llvm-version }}" /usr/bin/clang++ + sudo ln -sf "/usr/bin/lld-${{ inputs.llvm-version }}" /usr/bin/lld + sudo ln -sf "/usr/bin/llvm-ar-${{ inputs.llvm-version }}" /usr/bin/llvm-ar + sudo ln -sf "/usr/bin/llvm-ranlib-${{ inputs.llvm-version }}" /usr/bin/llvm-ranlib + echo " ✓ Created symlinks" + + # Setup library paths + LLVM_LIB_PATH="/usr/lib/llvm-${{ inputs.llvm-version }}/lib" + if [ -d "$LLVM_LIB_PATH" ]; then + echo "LD_LIBRARY_PATH=${LLVM_LIB_PATH}:${LD_LIBRARY_PATH:-}" >> $GITHUB_ENV + echo "LIBCLANG_PATH=${LLVM_LIB_PATH}" >> $GITHUB_ENV + + echo "$LLVM_LIB_PATH" | sudo tee "/etc/ld.so.conf.d/llvm-${{ inputs.llvm-version }}.conf" > /dev/null + sudo ldconfig + echo " ✓ Configured library paths" + else + # Fallback to standard library location + if [ -d "/usr/lib/x86_64-linux-gnu" ]; then + echo "LIBCLANG_PATH=/usr/lib/x86_64-linux-gnu" >> $GITHUB_ENV + echo " ✓ Using fallback library path" + fi + fi + + # Set output + echo "version=${{ inputs.llvm-version }}" >> $GITHUB_OUTPUT + echo "::endgroup::" + echo "✅ LLVM ready: $(clang --version | head -1)" diff --git a/.forgejo/actions/setup-rust/action.yml b/.forgejo/actions/setup-rust/action.yml new file mode 100644 index 00000000..091da8c2 --- /dev/null +++ b/.forgejo/actions/setup-rust/action.yml @@ -0,0 +1,236 @@ +name: setup-rust +description: | + Set up Rust toolchain with sccache for compilation caching. + Respects rust-toolchain.toml by default or accepts explicit version override. + +inputs: + cache-key-suffix: + description: 'Optional suffix for cache keys (e.g. platform identifier)' + required: false + default: '' + rust-components: + description: 'Additional Rust components to install (space-separated)' + required: false + default: '' + rust-target: + description: 'Rust target triple (e.g. x86_64-unknown-linux-gnu)' + required: false + default: '' + rust-version: + description: 'Rust version to install (e.g. nightly). Defaults to 1.87.0' + required: false + default: '1.87.0' + sccache-cache-limit: + description: 'Maximum size limit for sccache local cache (e.g. 2G, 500M)' + required: false + default: '2G' + github-token: + description: 'GitHub token for downloading sccache from GitHub releases' + required: false + default: '' + +outputs: + rust-version: + description: 'Installed Rust version' + value: ${{ steps.rust-setup.outputs.version }} + +runs: + using: composite + steps: + - name: Detect runner OS + id: runner-os + uses: ./.forgejo/actions/detect-runner-os + + - name: Configure Cargo environment + shell: bash + run: | + # Use workspace-relative paths for better control and consistency + echo "CARGO_HOME=${{ github.workspace }}/.cargo" >> $GITHUB_ENV + echo "CARGO_TARGET_DIR=${{ github.workspace }}/target" >> $GITHUB_ENV + echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> $GITHUB_ENV + echo "RUSTUP_HOME=${{ github.workspace }}/.rustup" >> $GITHUB_ENV + + # Limit binstall resolution timeout to avoid GitHub rate limit delays + echo "BINSTALL_MAXIMUM_RESOLUTION_TIMEOUT=10" >> $GITHUB_ENV + + # Ensure directories exist for first run + mkdir -p "${{ github.workspace }}/.cargo" + mkdir -p "${{ github.workspace }}/.sccache" + mkdir -p "${{ github.workspace }}/target" + mkdir -p "${{ github.workspace }}/.rustup" + + - name: Start cache restore group + shell: bash + run: echo "::group::📦 Restoring caches (registry, toolchain, build artifacts)" + + - name: Cache Cargo registry and git + id: registry-cache + uses: https://github.com/actions/cache@v4 + with: + path: | + .cargo/registry/index + .cargo/registry/cache + .cargo/git/db + # Registry cache saved per workflow, restored from any workflow's cache + # Each workflow maintains its own registry that accumulates its needed crates + key: cargo-registry-${{ steps.runner-os.outputs.slug }}-${{ github.workflow }} + restore-keys: | + cargo-registry-${{ steps.runner-os.outputs.slug }}- + + - name: Cache toolchain binaries + id: toolchain-cache + uses: https://github.com/actions/cache@v4 + with: + path: | + .cargo/bin + .rustup/toolchains + .rustup/update-hashes + # Shared toolchain cache across all Rust versions + key: toolchain-${{ steps.runner-os.outputs.slug }} + + - name: Debug GitHub token availability + shell: bash + run: | + if [ -z "${{ inputs.github-token }}" ]; then + echo "⚠️ No GitHub token provided - sccache will use fallback download method" + else + echo "✅ GitHub token provided for sccache" + fi + + - name: Setup sccache + uses: https://github.com/mozilla-actions/sccache-action@v0.0.9 + with: + token: ${{ inputs.github-token }} + + - name: Cache build artifacts + id: build-cache + uses: https://github.com/actions/cache@v4 + with: + path: | + target/**/deps + !target/**/deps/*.rlib + target/**/build + target/**/.fingerprint + target/**/incremental + target/**/*.d + /timelord/ + # Build artifacts - cache per code change, restore from deps when code changes + key: >- + build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }} + restore-keys: | + build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}- + + - name: End cache restore group + shell: bash + run: echo "::endgroup::" + + - name: Setup Rust toolchain + shell: bash + run: | + # Install rustup if not already cached + if ! command -v rustup &> /dev/null; then + echo "::group::📦 Installing rustup" + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain none + source "$CARGO_HOME/env" + echo "::endgroup::" + else + echo "✅ rustup already available" + fi + + # Setup the appropriate Rust version + if [[ -n "${{ inputs.rust-version }}" ]]; then + echo "::group::📦 Setting up Rust ${{ inputs.rust-version }}" + # Set override first to prevent rust-toolchain.toml from auto-installing + rustup override set ${{ inputs.rust-version }} 2>/dev/null || true + + # Check if we need to install/update the toolchain + if rustup toolchain list | grep -q "^${{ inputs.rust-version }}-"; then + rustup update ${{ inputs.rust-version }} + else + rustup toolchain install ${{ inputs.rust-version }} --profile minimal -c cargo,clippy,rustfmt + fi + else + echo "::group::📦 Setting up Rust from rust-toolchain.toml" + rustup show + fi + echo "::endgroup::" + + - name: Configure PATH and install tools + shell: bash + env: + GITHUB_TOKEN: ${{ inputs.github-token }} + run: | + # Add .cargo/bin to PATH permanently for all subsequent steps + echo "${{ github.workspace }}/.cargo/bin" >> $GITHUB_PATH + + # For this step only, we need to add it to PATH since GITHUB_PATH takes effect in the next step + export PATH="${{ github.workspace }}/.cargo/bin:$PATH" + + # Install cargo-binstall for fast binary installations + if command -v cargo-binstall &> /dev/null; then + echo "✅ cargo-binstall already available" + else + echo "::group::📦 Installing cargo-binstall" + curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash + echo "::endgroup::" + fi + + if command -v prek &> /dev/null; then + echo "✅ prek already available" + else + echo "::group::📦 Installing prek" + # prek isn't regularly published to crates.io, so we use git source + cargo-binstall -y --no-symlinks --git https://github.com/j178/prek prek + echo "::endgroup::" + fi + + if command -v timelord &> /dev/null; then + echo "✅ timelord already available" + else + echo "::group::📦 Installing timelord" + cargo-binstall -y --no-symlinks timelord-cli + echo "::endgroup::" + fi + + - name: Configure sccache environment + shell: bash + run: | + echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV + echo "CMAKE_C_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV + echo "CMAKE_CXX_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV + echo "CMAKE_CUDA_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV + echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV + + # Configure incremental compilation GC + # If we restored from old cache (partial hit), clean up aggressively + if [[ "${{ steps.build-cache.outputs.cache-hit }}" != "true" ]]; then + echo "♻️ Partial cache hit - enabling cache cleanup" + echo "CARGO_INCREMENTAL_GC_THRESHOLD=5" >> $GITHUB_ENV + fi + + - name: Install Rust components + if: inputs.rust-components != '' + shell: bash + run: | + echo "📦 Installing components: ${{ inputs.rust-components }}" + rustup component add ${{ inputs.rust-components }} + + - name: Install Rust target + if: inputs.rust-target != '' + shell: bash + run: | + echo "📦 Installing target: ${{ inputs.rust-target }}" + rustup target add ${{ inputs.rust-target }} + + - name: Output version and summary + id: rust-setup + shell: bash + run: | + RUST_VERSION=$(rustc --version | cut -d' ' -f2) + echo "version=$RUST_VERSION" >> $GITHUB_OUTPUT + + echo "📋 Setup complete:" + echo " Rust: $(rustc --version)" + echo " Cargo: $(cargo --version)" + echo " prek: $(prek --version 2>/dev/null || echo 'installed')" + echo " timelord: $(timelord --version 2>/dev/null || echo 'installed')" diff --git a/.forgejo/workflows/prefligit-checks.yml b/.forgejo/workflows/prefligit-checks.yml deleted file mode 100644 index cc512496..00000000 --- a/.forgejo/workflows/prefligit-checks.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Checks / Prefligit - -on: - push: - pull_request: -permissions: - contents: read - -jobs: - prefligit: - runs-on: ubuntu-latest - env: - FROM_REF: ${{ github.event.pull_request.base.sha || (!github.event.forced && ( github.event.before != '0000000000000000000000000000000000000000' && github.event.before || github.sha )) || format('{0}~', github.sha) }} - TO_REF: ${{ github.sha }} - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - uses: ./.forgejo/actions/prefligit - with: - extra_args: --all-files --hook-stage manual diff --git a/.forgejo/workflows/prek-checks.yml b/.forgejo/workflows/prek-checks.yml new file mode 100644 index 00000000..c25b9c3d --- /dev/null +++ b/.forgejo/workflows/prek-checks.yml @@ -0,0 +1,79 @@ +name: Checks / Prek + +on: + push: + +permissions: + contents: read + +jobs: + fast-checks: + name: Pre-commit & Formatting + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup Rust nightly + uses: ./.forgejo/actions/setup-rust + with: + rust-version: nightly + github-token: ${{ secrets.GH_PUBLIC_RO }} + + - name: Run prek + run: | + prek run \ + --all-files \ + --hook-stage manual \ + --show-diff-on-failure \ + --color=always \ + -v + + - name: Check Rust formatting + run: | + cargo +nightly fmt --all -- --check && \ + echo "✅ Formatting check passed" || \ + exit 1 + + clippy-and-tests: + name: Clippy and Cargo Tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup LLVM + uses: ./.forgejo/actions/setup-llvm-with-apt + with: + extra-packages: liburing-dev liburing2 + + - name: Setup Rust with caching + uses: ./.forgejo/actions/setup-rust + with: + github-token: ${{ secrets.GH_PUBLIC_RO }} + + - name: Run Clippy lints + run: | + cargo clippy \ + --workspace \ + --features full \ + --locked \ + --no-deps \ + --profile test \ + -- \ + -D warnings + + - name: Run Cargo tests + run: | + cargo test \ + --workspace \ + --features full \ + --locked \ + --profile test \ + --all-targets \ + --no-fail-fast diff --git a/.forgejo/workflows/renovate.yml b/.forgejo/workflows/renovate.yml new file mode 100644 index 00000000..e8522bec --- /dev/null +++ b/.forgejo/workflows/renovate.yml @@ -0,0 +1,62 @@ +name: Maintenance / Renovate + +on: + schedule: + # Run at 5am UTC daily to avoid late-night dev + - cron: '0 5 * * *' + + workflow_dispatch: + inputs: + dryRun: + description: 'Dry run mode' + required: false + default: null + type: choice + options: + - null + - 'extract' + - 'lookup' + - 'full' + logLevel: + description: 'Log level' + required: false + default: 'info' + type: choice + options: + - 'info' + - 'warning' + - 'critical' + + push: + branches: + - main + paths: + # Re-run when config changes + - '.forgejo/workflows/renovate.yml' + - 'renovate.json' + +jobs: + renovate: + name: Renovate + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Self-hosted Renovate + uses: https://github.com/renovatebot/github-action@v40.1.0 + env: + LOG_LEVEL: ${{ inputs.logLevel || 'info' }} + RENOVATE_AUTODISCOVER: 'false' + RENOVATE_BINARY_SOURCE: 'install' + RENOVATE_DRY_RUN: ${{ inputs.dryRun || 'false' }} + RENOVATE_ENDPOINT: ${{ github.server_url }}/api/v1 + RENOVATE_GIT_TIMEOUT: 60000 + RENOVATE_GIT_URL: 'endpoint' + RENOVATE_GITHUB_TOKEN_WARN: 'false' + RENOVATE_ONBOARDING: 'false' + RENOVATE_PLATFORM: 'forgejo' + RENOVATE_PR_COMMITS_PER_RUN_LIMIT: 3 + RENOVATE_REPOSITORIES: '["${{ github.repository }}"]' + RENOVATE_REQUIRE_CONFIG: 'required' + RENOVATE_TOKEN: ${{ secrets.RENOVATE_TOKEN }} diff --git a/.forgejo/workflows/rust-checks.yml b/.forgejo/workflows/rust-checks.yml deleted file mode 100644 index c46363a0..00000000 --- a/.forgejo/workflows/rust-checks.yml +++ /dev/null @@ -1,144 +0,0 @@ -name: Checks / Rust - -on: - push: - -jobs: - format: - name: Format - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install rust - uses: ./.forgejo/actions/rust-toolchain - with: - toolchain: "nightly" - components: "rustfmt" - - - name: Check formatting - run: | - cargo +nightly fmt --all -- --check - - clippy: - name: Clippy - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install rust - uses: ./.forgejo/actions/rust-toolchain - - - uses: https://github.com/actions/create-github-app-token@v2 - id: app-token - with: - app-id: ${{ vars.GH_APP_ID }} - private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} - github-api-url: https://api.github.com - owner: ${{ vars.GH_APP_OWNER }} - repositories: "" - - name: Install sccache - uses: ./.forgejo/actions/sccache - with: - token: ${{ steps.app-token.outputs.token }} - - run: sudo apt-get update - - name: Install system dependencies - uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1 - with: - packages: clang liburing-dev - version: 1 - - name: Cache Rust registry - uses: actions/cache@v3 - with: - path: | - ~/.cargo/git - !~/.cargo/git/checkouts - ~/.cargo/registry - !~/.cargo/registry/src - key: rust-registry-${{hashFiles('**/Cargo.lock') }} - - name: Timelord - uses: ./.forgejo/actions/timelord - with: - key: sccache-v0 - path: . - - name: Clippy - run: | - cargo clippy \ - --workspace \ - --features full \ - --locked \ - --no-deps \ - --profile test \ - -- \ - -D warnings - - - name: Show sccache stats - if: always() - run: sccache --show-stats - - cargo-test: - name: Cargo Test - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install rust - uses: ./.forgejo/actions/rust-toolchain - - - uses: https://github.com/actions/create-github-app-token@v2 - id: app-token - with: - app-id: ${{ vars.GH_APP_ID }} - private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} - github-api-url: https://api.github.com - owner: ${{ vars.GH_APP_OWNER }} - repositories: "" - - name: Install sccache - uses: ./.forgejo/actions/sccache - with: - token: ${{ steps.app-token.outputs.token }} - - run: sudo apt-get update - - name: Install system dependencies - uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1 - with: - packages: clang liburing-dev - version: 1 - - name: Cache Rust registry - uses: actions/cache@v3 - with: - path: | - ~/.cargo/git - !~/.cargo/git/checkouts - ~/.cargo/registry - !~/.cargo/registry/src - key: rust-registry-${{hashFiles('**/Cargo.lock') }} - - name: Timelord - uses: ./.forgejo/actions/timelord - with: - key: sccache-v0 - path: . - - name: Cargo Test - run: | - cargo test \ - --workspace \ - --features full \ - --locked \ - --profile test \ - --all-targets \ - --no-fail-fast - - - name: Show sccache stats - if: always() - run: sccache --show-stats diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index fcfaade5..841427b7 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,5 +1,4 @@ -github: [JadedBlueEyes] -# Doesn't support an array, so we can only list nex -ko_fi: nexy7574 +github: [JadedBlueEyes, nexy7574] custom: + - https://ko-fi.com/nexy7574 - https://ko-fi.com/JadedBlueEyes diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 68e3a982..da594310 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - - id: check-byte-order-marker + - id: fix-byte-order-marker - id: check-case-conflict - id: check-symlinks - id: destroyed-symlinks diff --git a/Cargo.lock b/Cargo.lock index 5d7192b6..2b044a1f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -126,7 +126,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -183,7 +183,7 @@ dependencies = [ "rustc-hash 2.1.1", "serde", "serde_derive", - "syn", + "syn 2.0.104", ] [[package]] @@ -198,6 +198,45 @@ dependencies = [ "winnow", ] +[[package]] +name = "asn1-rs" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror 1.0.69", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "synstructure 0.12.6", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "assign" version = "1.1.1" @@ -250,7 +289,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -261,7 +300,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -433,11 +472,11 @@ dependencies = [ "hyper", "hyper-util", "pin-project-lite", - "rustls", - "rustls-pemfile", + "rustls 0.23.29", + "rustls-pemfile 2.2.0", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tower-service", ] @@ -452,9 +491,9 @@ dependencies = [ "http", "http-body-util", "pin-project", - "rustls", + "rustls 0.23.29", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tokio-util", "tower-layer", "tower-service", @@ -521,7 +560,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn", + "syn 2.0.104", "which", ] @@ -540,7 +579,7 @@ dependencies = [ "regex", "rustc-hash 2.1.1", "shlex", - "syn", + "syn 2.0.104", ] [[package]] @@ -794,7 +833,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -972,7 +1011,7 @@ dependencies = [ "rand 0.8.5", "regex", "reqwest", - "ring", + "ring 0.17.14", "ruma", "sanitize-filename", "serde", @@ -1019,7 +1058,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1044,7 +1083,7 @@ dependencies = [ "hyper-util", "log", "ruma", - "rustls", + "rustls 0.23.29", "sd-notify", "sentry", "sentry-tower", @@ -1074,6 +1113,7 @@ dependencies = [ "image", "ipaddress", "itertools 0.14.0", + "ldap3", "log", "loole", "lru-cache", @@ -1183,6 +1223,16 @@ dependencies = [ "crossterm", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation" version = "0.10.1" @@ -1256,7 +1306,7 @@ dependencies = [ "proc-macro2", "quote", "strict", - "syn", + "syn 2.0.104", ] [[package]] @@ -1366,7 +1416,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1393,7 +1443,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1434,6 +1484,20 @@ dependencies = [ "zeroize", ] +[[package]] +name = "der-parser" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom", + "num-bigint", + "num-traits", + "rusticata-macros", +] + [[package]] name = "deranged" version = "0.4.0" @@ -1461,7 +1525,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1483,7 +1547,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1544,7 +1608,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1564,7 +1628,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -1734,6 +1798,7 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1781,7 +1846,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -2030,7 +2095,7 @@ dependencies = [ "ipnet", "once_cell", "rand 0.9.2", - "ring", + "ring 0.17.14", "serde", "thiserror 2.0.12", "tinyvec", @@ -2122,7 +2187,7 @@ dependencies = [ "markup5ever", "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -2216,11 +2281,11 @@ dependencies = [ "http", "hyper", "hyper-util", - "rustls", - "rustls-native-certs", + "rustls 0.23.29", + "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tower-service", "webpki-roots 1.0.2", ] @@ -2444,7 +2509,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -2613,7 +2678,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn", + "syn 2.0.104", ] [[package]] @@ -2628,6 +2693,43 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +[[package]] +name = "lber" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2df7f9fd9f64cf8f59e1a4a0753fe7d575a5b38d3d7ac5758dcee9357d83ef0a" +dependencies = [ + "bytes", + "nom", +] + +[[package]] +name = "ldap3" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "166199a8207874a275144c8a94ff6eed5fcbf5c52303e4d9b4d53a0c7ac76554" +dependencies = [ + "async-trait", + "bytes", + "futures", + "futures-util", + "lazy_static", + "lber", + "log", + "nom", + "percent-encoding", + "ring 0.16.20", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "thiserror 1.0.69", + "tokio", + "tokio-rustls 0.24.1", + "tokio-stream", + "tokio-util", + "url", + "x509-parser", +] + [[package]] name = "lebe" version = "0.5.2" @@ -2866,7 +2968,7 @@ checksum = "a9882ef5c56df184b8ffc107fc6c61e33ee3a654b021961d790a78571bb9d67a" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -3032,7 +3134,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -3094,6 +3196,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "oid-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff" +dependencies = [ + "asn1-rs", +] + [[package]] name = "once_cell" version = "1.21.3" @@ -3284,7 +3395,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -3358,7 +3469,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -3458,7 +3569,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2" dependencies = [ "proc-macro2", - "syn", + "syn 2.0.104", ] [[package]] @@ -3487,7 +3598,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", "version_check", "yansi", ] @@ -3508,7 +3619,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" dependencies = [ "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -3531,7 +3642,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -3597,7 +3708,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls", + "rustls 0.23.29", "socket2", "thiserror 2.0.12", "tokio", @@ -3615,9 +3726,9 @@ dependencies = [ "getrandom 0.3.3", "lru-slab", "rand 0.9.2", - "ring", + "ring 0.17.14", "rustc-hash 2.1.1", - "rustls", + "rustls 0.23.29", "rustls-pki-types", "slab", "thiserror 2.0.12", @@ -3876,16 +3987,16 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls", - "rustls-native-certs", - "rustls-pemfile", + "rustls 0.23.29", + "rustls-native-certs 0.8.1", + "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tokio-socks", "tokio-util", "tower 0.5.2", @@ -3909,6 +4020,21 @@ version = "0.8.52" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.14" @@ -3919,7 +4045,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -4093,7 +4219,7 @@ dependencies = [ "quote", "ruma-identifiers-validation", "serde", - "syn", + "syn 2.0.104", "toml", ] @@ -4178,6 +4304,15 @@ dependencies = [ "semver", ] +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom", +] + [[package]] name = "rustix" version = "0.38.44" @@ -4204,6 +4339,18 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring 0.17.14", + "rustls-webpki 0.101.7", + "sct", +] + [[package]] name = "rustls" version = "0.23.29" @@ -4213,13 +4360,25 @@ dependencies = [ "aws-lc-rs", "log", "once_cell", - "ring", + "ring 0.17.14", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.103.4", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework 2.11.1", +] + [[package]] name = "rustls-native-certs" version = "0.8.1" @@ -4229,7 +4388,16 @@ dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.2.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", ] [[package]] @@ -4251,6 +4419,16 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + [[package]] name = "rustls-webpki" version = "0.103.4" @@ -4258,9 +4436,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" dependencies = [ "aws-lc-rs", - "ring", + "ring 0.17.14", "rustls-pki-types", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -4319,6 +4497,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + [[package]] name = "sd-notify" version = "0.4.5" @@ -4328,6 +4516,19 @@ dependencies = [ "libc", ] +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + [[package]] name = "security-framework" version = "3.2.0" @@ -4335,7 +4536,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ "bitflags 2.9.1", - "core-foundation", + "core-foundation 0.10.1", "core-foundation-sys", "libc", "security-framework-sys", @@ -4365,7 +4566,7 @@ checksum = "255914a8e53822abd946e2ce8baa41d4cded6b8e938913b7f7b9da5b7ab44335" dependencies = [ "httpdate", "reqwest", - "rustls", + "rustls 0.23.29", "sentry-backtrace", "sentry-contexts", "sentry-core", @@ -4509,7 +4710,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -4723,6 +4924,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spki" version = "0.7.3" @@ -4791,6 +4998,17 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "syn" version = "2.0.104" @@ -4811,6 +5029,18 @@ dependencies = [ "futures-core", ] +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "unicode-xid", +] + [[package]] name = "synstructure" version = "0.13.2" @@ -4819,7 +5049,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -4910,7 +5140,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -4921,7 +5151,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -5088,7 +5318,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -5103,13 +5333,23 @@ dependencies = [ "tokio-stream", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls", + "rustls 0.23.29", "tokio", ] @@ -5307,7 +5547,7 @@ source = "git+https://forgejo.ellis.link/continuwuation/tracing?rev=1e64095a8051 dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -5461,12 +5701,24 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -5482,7 +5734,7 @@ dependencies = [ "base64 0.22.1", "log", "once_cell", - "rustls", + "rustls 0.23.29", "rustls-pki-types", "url", "webpki-roots 0.26.11", @@ -5617,7 +5869,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn", + "syn 2.0.104", "wasm-bindgen-shared", ] @@ -5652,7 +5904,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5832,7 +6084,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -5843,7 +6095,7 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -6164,6 +6416,23 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" +[[package]] +name = "x509-parser" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7069fba5b66b9193bd2c5d3d4ff12b839118f6bcbef5328efafafb5395cf63da" +dependencies = [ + "asn1-rs", + "data-encoding", + "der-parser", + "lazy_static", + "nom", + "oid-registry", + "rusticata-macros", + "thiserror 1.0.69", + "time", +] + [[package]] name = "xml5ever" version = "0.18.1" @@ -6221,8 +6490,8 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn", - "synstructure", + "syn 2.0.104", + "synstructure 0.13.2", ] [[package]] @@ -6242,7 +6511,7 @@ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] @@ -6262,8 +6531,8 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn", - "synstructure", + "syn 2.0.104", + "synstructure 0.13.2", ] [[package]] @@ -6302,7 +6571,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.104", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index c656e183..9452066c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -546,6 +546,11 @@ features = ["std"] [workspace.dependencies.maplit] version = "1.0.2" +[workspace.dependencies.ldap3] +version = "0.11.5" +default-features = false +features = ["sync", "tls-rustls"] + # # Patches # @@ -867,7 +872,7 @@ unused-qualifications = "warn" #unused-results = "warn" # TODO ## some sadness -elided_named_lifetimes = "allow" # TODO! +mismatched_lifetime_syntaxes = "allow" # TODO! let_underscore_drop = "allow" missing_docs = "allow" # cfgs cannot be limited to expected cfgs or their de facto non-transitive/opt-in use-case e.g. @@ -1006,3 +1011,6 @@ literal_string_with_formatting_args = { level = "allow", priority = 1 } needless_raw_string_hashes = "allow" + +# TODO: Enable this lint & fix all instances +collapsible_if = "allow" diff --git a/arch/conduwuit.service b/arch/conduwuit.service index f7100179..34c3995e 100644 --- a/arch/conduwuit.service +++ b/arch/conduwuit.service @@ -64,7 +64,7 @@ StateDirectory=conduwuit RuntimeDirectory=conduwuit RuntimeDirectoryMode=0750 -Environment=CONTINUWUITY_CONFIG=${CREDENTIALS_DIRECTORY}/config.toml +Environment=CONTINUWUITY_CONFIG=%d/config.toml LoadCredential=config.toml:/etc/conduwuit/conduwuit.toml BindPaths=/var/lib/private/conduwuit:/var/lib/matrix-conduit BindPaths=/var/lib/private/conduwuit:/var/lib/private/matrix-conduit diff --git a/conduwuit-example.toml b/conduwuit-example.toml index 541050b1..f0e510b4 100644 --- a/conduwuit-example.toml +++ b/conduwuit-example.toml @@ -1696,6 +1696,10 @@ # #config_reload_signal = true +# This item is undocumented. Please contribute documentation for it. +# +#ldap = false + [global.tls] # Path to a valid TLS certificate file. @@ -1774,3 +1778,91 @@ # is 33.55MB. Setting it to 0 disables blurhashing. # #blurhash_max_raw_size = 33554432 + +[global.ldap] + +# Whether to enable LDAP login. +# +# example: "true" +# +#enable = false + +# Whether to force LDAP authentication or authorize classical password +# login. +# +# example: "true" +# +#ldap_only = false + +# URI of the LDAP server. +# +# example: "ldap://ldap.example.com:389" +# +#uri = "" + +# Root of the searches. +# +# example: "ou=users,dc=example,dc=org" +# +#base_dn = "" + +# Bind DN if anonymous search is not enabled. +# +# You can use the variable `{username}` that will be replaced by the +# entered username. In such case, the password used to bind will be the +# one provided for the login and not the one given by +# `bind_password_file`. Beware: automatically granting admin rights will +# not work if you use this direct bind instead of a LDAP search. +# +# example: "cn=ldap-reader,dc=example,dc=org" or +# "cn={username},ou=users,dc=example,dc=org" +# +#bind_dn = "" + +# Path to a file on the system that contains the password for the +# `bind_dn`. +# +# The server must be able to access the file, and it must not be empty. +# +#bind_password_file = "" + +# Search filter to limit user searches. +# +# You can use the variable `{username}` that will be replaced by the +# entered username for more complex filters. +# +# example: "(&(objectClass=person)(memberOf=matrix))" +# +#filter = "(objectClass=*)" + +# Attribute to use to uniquely identify the user. +# +# example: "uid" or "cn" +# +#uid_attribute = "uid" + +# Attribute containing the display name of the user. +# +# example: "givenName" or "sn" +# +#name_attribute = "givenName" + +# Root of the searches for admin users. +# +# Defaults to `base_dn` if empty. +# +# example: "ou=admins,dc=example,dc=org" +# +#admin_base_dn = "" + +# The LDAP search filter to find administrative users for continuwuity. +# +# If left blank, administrative state must be configured manually for each +# user. +# +# You can use the variable `{username}` that will be replaced by the +# entered username for more complex filters. +# +# example: "(objectClass=conduwuitAdmin)" or "(uid={username})" +# +#admin_filter = "" diff --git a/docs/deploying/docker-compose.for-traefik.yml b/docs/deploying/docker-compose.for-traefik.yml index 547712b6..f67e603b 100644 --- a/docs/deploying/docker-compose.for-traefik.yml +++ b/docs/deploying/docker-compose.for-traefik.yml @@ -12,6 +12,15 @@ services: #- ./continuwuity.toml:/etc/continuwuity.toml networks: - proxy + labels: + - "traefik.enable=true" + - "traefik.http.routers.continuwuity.rule=(Host(`matrix.example.com`) || (Host(`example.com`) && PathPrefix(`/.well-known/matrix`)))" + - "traefik.http.routers.continuwuity.entrypoints=websecure" # your HTTPS entry point + - "traefik.http.routers.continuwuity.tls=true" + - "traefik.http.routers.continuwuity.service=continuwuity" + - "traefik.http.services.continuwuity.loadbalancer.server.port=6167" + # possibly, depending on your config: + # - "traefik.http.routers.continuwuity.tls.certresolver=letsencrypt" environment: CONTINUWUITY_SERVER_NAME: your.server.name.example # EDIT THIS CONTINUWUITY_DATABASE_PATH: /var/lib/continuwuity diff --git a/docs/deploying/docker-compose.with-traefik.yml b/docs/deploying/docker-compose.with-traefik.yml index 49b7c905..8021e034 100644 --- a/docs/deploying/docker-compose.with-traefik.yml +++ b/docs/deploying/docker-compose.with-traefik.yml @@ -12,6 +12,14 @@ services: #- ./continuwuity.toml:/etc/continuwuity.toml networks: - proxy + labels: + - "traefik.enable=true" + - "traefik.http.routers.continuwuity.rule=(Host(`matrix.example.com`) || (Host(`example.com`) && PathPrefix(`/.well-known/matrix`)))" + - "traefik.http.routers.continuwuity.entrypoints=websecure" + - "traefik.http.routers.continuwuity.tls.certresolver=letsencrypt" + - "traefik.http.services.continuwuity.loadbalancer.server.port=6167" + # Uncomment and adjust the following if you want to use middleware + # - "traefik.http.routers.continuwuity.middlewares=secureHeaders@file" environment: CONTINUWUITY_SERVER_NAME: your.server.name.example # EDIT THIS CONTINUWUITY_TRUSTED_SERVERS: '["matrix.org"]' diff --git a/fedora/conduwuit.service b/fedora/conduwuit.service new file mode 100644 index 00000000..6ab2af46 --- /dev/null +++ b/fedora/conduwuit.service @@ -0,0 +1,68 @@ +[Unit] +Description=Continuwuity - Matrix homeserver +Documentation=https://continuwuity.org/ +Wants=network-online.target +After=network-online.target +Alias=matrix-conduwuit.service + +[Service] +DynamicUser=yes +User=conduwuit +Group=conduwuit +Type=notify + +Environment="CONTINUWUITY_CONFIG=/etc/conduwuit/conduwuit.toml" + +Environment="CONTINUWUITY_LOG_TO_JOURNALD=true" +Environment="CONTINUWUITY_JOURNALD_IDENTIFIER=%N" + +ExecStart=/usr/bin/conduwuit + +AmbientCapabilities= +CapabilityBoundingSet= + +DevicePolicy=closed +LockPersonality=yes +MemoryDenyWriteExecute=yes +NoNewPrivileges=yes +#ProcSubset=pid +ProtectClock=yes +ProtectControlGroups=yes +ProtectHome=yes +ProtectHostname=yes +ProtectKernelLogs=yes +ProtectKernelModules=yes +ProtectKernelTunables=yes +ProtectProc=invisible +ProtectSystem=strict +PrivateDevices=yes +PrivateMounts=yes +PrivateTmp=yes +PrivateUsers=yes +PrivateIPC=yes +RemoveIPC=yes +RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX +RestrictNamespaces=yes +RestrictRealtime=yes +RestrictSUIDSGID=yes +SystemCallArchitectures=native +SystemCallFilter=@system-service @resources +SystemCallFilter=~@clock @debug @module @mount @reboot @swap @cpu-emulation @obsolete @timer @chown @setuid @privileged @keyring @ipc +SystemCallErrorNumber=EPERM + +StateDirectory=conduwuit +ConfigurationDirectory=conduwuit +RuntimeDirectory=conduwuit +RuntimeDirectoryMode=0750 + +Restart=on-failure +RestartSec=5 + +TimeoutStopSec=2m +TimeoutStartSec=2m + +StartLimitInterval=1m +StartLimitBurst=5 + +[Install] +WantedBy=multi-user.target diff --git a/fedora/continuwuity.spec.rpkg b/fedora/continuwuity.spec.rpkg new file mode 100644 index 00000000..19edf701 --- /dev/null +++ b/fedora/continuwuity.spec.rpkg @@ -0,0 +1,80 @@ +# This should be run using rpkg-util: https://docs.pagure.org/rpkg-util +# it requires Internet access and is not suitable for Fedora main repos +# TODO: rpkg-util is no longer maintained, find a replacement + +Name: continuwuity +Version: {{{ git_repo_version }}} +Release: 1%{?dist} +Summary: Very cool Matrix chat homeserver written in Rust + +License: Apache-2.0 AND MIT + +URL: https://continuwuity.org +VCS: {{{ git_repo_vcs }}} +Source: {{{ git_repo_pack }}} + +BuildRequires: cargo-rpm-macros >= 25 +BuildRequires: systemd-rpm-macros +# Needed to build rust-librocksdb-sys +BuildRequires: clang +BuildRequires: liburing-devel + +Requires: liburing +Requires: glibc +Requires: libstdc++ + +%global _description %{expand: +A cool hard fork of Conduit, a Matrix homeserver written in Rust} + +%description %{_description} + +%prep +{{{ git_repo_setup_macro }}} +%cargo_prep -N +# Perform an online build so Git dependencies can be retrieved +sed -i 's/^offline = true$//' .cargo/config.toml + +%build +%cargo_build + +# Here's the one legally required mystery incantation in this file. +# Some of our dependencies have source files which are (for some reason) marked as excutable. +# Files in .cargo/registry/ are copied into /usr/src/ by the debuginfo machinery +# at the end of the build step, and then the BRP shebang mangling script checks +# the entire buildroot to find executable files, and fails the build because +# it thinks Rust's file attributes are shebangs because they start with `#!`. +# So we have to clear the executable bit on all of them before that happens. +find .cargo/registry/ -executable -name "*.rs" -exec chmod -x {} + + +# TODO: this fails currently because it's forced to run in offline mode +# {cargo_license -- --no-dev} > LICENSE.dependencies + +%install +install -Dpm0755 target/rpm/conduwuit -t %{buildroot}%{_bindir} +install -Dpm0644 fedora/conduwuit.service -t %{buildroot}%{_unitdir} +install -Dpm0644 conduwuit-example.toml %{buildroot}%{_sysconfdir}/conduwuit/conduwuit.toml + +%files +%license LICENSE +%license src/core/matrix/state_res/LICENSE +%doc CODE_OF_CONDUCT.md +%doc CONTRIBUTING.md +%doc README.md +%doc SECURITY.md +%config %{_sysconfdir}/conduwuit/conduwuit.toml + +%{_bindir}/conduwuit +%{_unitdir}/conduwuit.service +# Do not create /var/lib/conduwuit, systemd will create it if necessary + +%post +%systemd_post conduwuit.service + +%preun +%systemd_preun conduwuit.service + +%postun +%systemd_postun_with_restart conduwuit.service + +%changelog +{{{ git_repo_changelog }}} \ No newline at end of file diff --git a/flake.lock b/flake.lock index 51a04c6c..4c2bf9fb 100644 --- a/flake.lock +++ b/flake.lock @@ -153,11 +153,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1751525020, - "narHash": "sha256-oDO6lCYS5Bf4jUITChj9XV7k3TP38DE0Ckz5n5ORCME=", + "lastModified": 1755585599, + "narHash": "sha256-tl/0cnsqB/Yt7DbaGMel2RLa7QG5elA8lkaOXli6VdY=", "owner": "nix-community", "repo": "fenix", - "rev": "a1a5f92f47787e7df9f30e5e5ac13e679215aa1e", + "rev": "6ed03ef4c8ec36d193c18e06b9ecddde78fb7e42", "type": "github" }, "original": { @@ -516,16 +516,16 @@ "rocksdb": { "flake": false, "locked": { - "lastModified": 1741308171, - "narHash": "sha256-YdBvdQ75UJg5ffwNjxizpviCVwVDJnBkM8ZtGIduMgY=", - "ref": "v9.11.1", - "rev": "3ce04794bcfbbb0d2e6f81ae35fc4acf688b6986", - "revCount": 13177, + "lastModified": 1753385396, + "narHash": "sha256-/Hvy1yTH/0D5aa7bc+/uqFugCQq4InTdwlRw88vA5IY=", + "ref": "10.4.fb", + "rev": "28d4b7276c16ed3e28af1bd96162d6442ce25923", + "revCount": 13318, "type": "git", "url": "https://forgejo.ellis.link/continuwuation/rocksdb" }, "original": { - "ref": "v9.11.1", + "ref": "10.4.fb", "type": "git", "url": "https://forgejo.ellis.link/continuwuation/rocksdb" } @@ -546,11 +546,11 @@ "rust-analyzer-src": { "flake": false, "locked": { - "lastModified": 1751433876, - "narHash": "sha256-IsdwOcvLLDDlkFNwhdD5BZy20okIQL01+UQ7Kxbqh8s=", + "lastModified": 1755504847, + "narHash": "sha256-VX0B9hwhJypCGqncVVLC+SmeMVd/GAYbJZ0MiiUn2Pk=", "owner": "rust-lang", "repo": "rust-analyzer", - "rev": "11d45c881389dae90b0da5a94cde52c79d0fc7ef", + "rev": "a905e3b21b144d77e1b304e49f3264f6f8d4db75", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 564cd479..d6beb84e 100644 --- a/flake.nix +++ b/flake.nix @@ -17,7 +17,7 @@ nix-filter.url = "github:numtide/nix-filter?ref=main"; nixpkgs.url = "github:NixOS/nixpkgs?ref=nixpkgs-unstable"; rocksdb = { - url = "git+https://forgejo.ellis.link/continuwuation/rocksdb?ref=v9.11.1"; + url = "git+https://forgejo.ellis.link/continuwuation/rocksdb?ref=10.4.fb"; flake = false; }; }; @@ -31,13 +31,17 @@ inherit system; }; + fnx = inputs.fenix.packages.${system}; # The Rust toolchain to use - toolchain = inputs.fenix.packages.${system}.fromToolchainFile { - file = ./rust-toolchain.toml; + toolchain = fnx.combine [ + (fnx.fromToolchainFile { + file = ./rust-toolchain.toml; - # See also `rust-toolchain.toml` - sha256 = "sha256-KUm16pHj+cRedf8vxs/Hd2YWxpOrWZ7UOrwhILdSJBU="; - }; + # See also `rust-toolchain.toml` + sha256 = "sha256-+9FmLhAOezBZCOziO0Qct1NOrfpjNsXxc/8I0c7BdKE="; + }) + fnx.complete.rustfmt + ]; mkScope = pkgs: @@ -62,7 +66,7 @@ }).overrideAttrs (old: { src = inputs.rocksdb; - version = "v9.11.1"; + version = "v10.4.fb"; cmakeFlags = pkgs.lib.subtractLists [ # No real reason to have snappy or zlib, no one uses this diff --git a/renovate.json b/renovate.json index eecf8532..deb428af 100644 --- a/renovate.json +++ b/renovate.json @@ -13,14 +13,50 @@ "enabled": true }, "labels": [ - "dependencies", - "github_actions" + "Dependencies", + "Dependencies/Renovate" ], "ignoreDeps": [ - "tikv-jemllocator", + "tikv-jemallocator", "tikv-jemalloc-sys", "tikv-jemalloc-ctl", - "opentelemetry-rust", + "opentelemetry", + "opentelemetry_sdk", + "opentelemetry-jaeger", "tracing-opentelemetry" + ], + "github-actions": { + "enabled": true, + "fileMatch": [ + "(^|/)\\.forgejo/workflows/[^/]+\\.ya?ml$", + "(^|/)\\.forgejo/actions/[^/]+/action\\.ya?ml$", + "(^|/)\\.github/workflows/[^/]+\\.ya?ml$", + "(^|/)\\.github/actions/[^/]+/action\\.ya?ml$" + ] + }, + "packageRules": [ + { + "description": "Batch minor and patch GitHub Actions updates", + "matchManagers": ["github-actions"], + "matchUpdateTypes": ["minor", "patch"], + "groupName": "github-actions-non-major" + }, + { + "description": "Group Rust toolchain updates into a single PR", + "matchManagers": ["regex"], + "matchPackageNames": ["rust", "rustc", "cargo"], + "groupName": "rust-toolchain" + }, + { + "description": "Group lockfile updates into a single PR", + "matchUpdateTypes": ["lockFileMaintenance"], + "groupName": "lockfile-maintenance" + }, + { + "description": "Batch patch-level Rust dependency updates", + "matchManagers": ["cargo"], + "matchUpdateTypes": ["patch"], + "groupName": "rust-patch-updates" + } ] } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index bdb608aa..63e9d9ce 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -9,13 +9,16 @@ # If you're having trouble making the relevant changes, bug a maintainer. [toolchain] -channel = "1.87.0" profile = "minimal" +channel = "1.89.0" components = [ # For rust-analyzer "rust-src", "rust-analyzer", # For CI and editors - "rustfmt", "clippy", + # you have to install rustfmt nightly yourself (if you're not on NixOS) + # + # The rust-toolchain.toml file doesn't provide any syntax for specifying components from different toolchains + # "rustfmt" ] diff --git a/src/admin/user/commands.rs b/src/admin/user/commands.rs index 22c5db44..f6689a38 100644 --- a/src/admin/user/commands.rs +++ b/src/admin/user/commands.rs @@ -68,7 +68,8 @@ pub(super) async fn create_user(&self, username: String, password: Option return Err!("Couldn't reset the password for user {user_id}: {e}"), | Ok(()) => { @@ -947,6 +949,6 @@ pub(super) async fn force_leave_remote_room( .boxed() .await?; - self.write_str(&format!("{user_id} has been joined to {room_id}.",)) + self.write_str(&format!("{user_id} has been removed from {room_id} via a remote server.",)) .await } diff --git a/src/api/Cargo.toml b/src/api/Cargo.toml index 15ada812..9b4ea460 100644 --- a/src/api/Cargo.toml +++ b/src/api/Cargo.toml @@ -49,6 +49,9 @@ jemalloc_stats = [ "conduwuit-core/jemalloc_stats", "conduwuit-service/jemalloc_stats", ] +ldap = [ + "conduwuit-service/ldap" +] release_max_log_level = [ "conduwuit-core/release_max_log_level", "conduwuit-service/release_max_log_level", diff --git a/src/api/client/account.rs b/src/api/client/account.rs index 0cea7bd9..67268c9f 100644 --- a/src/api/client/account.rs +++ b/src/api/client/account.rs @@ -373,7 +373,7 @@ pub(crate) async fn register_route( let password = if is_guest { None } else { body.password.as_deref() }; // Create user - services.users.create(&user_id, password)?; + services.users.create(&user_id, password, None).await?; // Default to pretty displayname let mut displayname = user_id.localpart().to_owned(); @@ -659,7 +659,8 @@ pub(crate) async fn change_password_route( services .users - .set_password(sender_user, Some(&body.new_password))?; + .set_password(sender_user, Some(&body.new_password)) + .await?; if body.logout_devices { // Logout all devices except the current one diff --git a/src/api/client/message.rs b/src/api/client/message.rs index f8818ebb..4d489c2f 100644 --- a/src/api/client/message.rs +++ b/src/api/client/message.rs @@ -8,7 +8,7 @@ use conduwuit::{ ref_at, utils::{ IterStream, ReadyExt, - result::{FlatOk, LogErr}, + result::LogErr, stream::{BroadbandExt, TryIgnore, WidebandExt}, }, }; @@ -35,6 +35,7 @@ use ruma::{ }; use tracing::warn; +use super::utils::{count_to_token, parse_pagination_token as parse_token}; use crate::Ruma; /// list of safe and common non-state events to ignore if the user is ignored @@ -84,14 +85,14 @@ pub(crate) async fn get_message_events_route( let from: PduCount = body .from .as_deref() - .map(str::parse) + .map(parse_token) .transpose()? .unwrap_or_else(|| match body.dir { | Direction::Forward => PduCount::min(), | Direction::Backward => PduCount::max(), }); - let to: Option = body.to.as_deref().map(str::parse).flat_ok(); + let to: Option = body.to.as_deref().map(parse_token).transpose()?; let limit: usize = body .limit @@ -180,8 +181,8 @@ pub(crate) async fn get_message_events_route( .collect(); Ok(get_message_events::v3::Response { - start: from.to_string(), - end: next_token.as_ref().map(ToString::to_string), + start: count_to_token(from), + end: next_token.map(count_to_token), chunk, state, }) diff --git a/src/api/client/mod.rs b/src/api/client/mod.rs index 4a7a0590..be335cba 100644 --- a/src/api/client/mod.rs +++ b/src/api/client/mod.rs @@ -36,6 +36,7 @@ pub(super) mod typing; pub(super) mod unstable; pub(super) mod unversioned; pub(super) mod user_directory; +pub(super) mod utils; pub(super) mod voip; pub(super) mod well_known; diff --git a/src/api/client/profile.rs b/src/api/client/profile.rs index 1882495c..eaa66e70 100644 --- a/src/api/client/profile.rs +++ b/src/api/client/profile.rs @@ -90,7 +90,7 @@ pub(crate) async fn get_displayname_route( .await { if !services.users.exists(&body.user_id).await { - services.users.create(&body.user_id, None)?; + services.users.create(&body.user_id, None, None).await?; } services @@ -189,7 +189,7 @@ pub(crate) async fn get_avatar_url_route( .await { if !services.users.exists(&body.user_id).await { - services.users.create(&body.user_id, None)?; + services.users.create(&body.user_id, None, None).await?; } services @@ -248,7 +248,7 @@ pub(crate) async fn get_profile_route( .await { if !services.users.exists(&body.user_id).await { - services.users.create(&body.user_id, None)?; + services.users.create(&body.user_id, None, None).await?; } services diff --git a/src/api/client/relations.rs b/src/api/client/relations.rs index 1aa34ada..f6d8fe9e 100644 --- a/src/api/client/relations.rs +++ b/src/api/client/relations.rs @@ -18,6 +18,7 @@ use ruma::{ events::{TimelineEventType, relation::RelationType}, }; +use super::utils::{count_to_token, parse_pagination_token as parse_token}; use crate::Ruma; /// # `GET /_matrix/client/r0/rooms/{roomId}/relations/{eventId}/{relType}/{eventType}` @@ -110,14 +111,14 @@ async fn paginate_relations_with_filter( dir: Direction, ) -> Result { let start: PduCount = from - .map(str::parse) + .map(parse_token) .transpose()? .unwrap_or_else(|| match dir { | Direction::Forward => PduCount::min(), | Direction::Backward => PduCount::max(), }); - let to: Option = to.map(str::parse).flat_ok(); + let to: Option = to.map(parse_token).transpose()?; // Use limit or else 30, with maximum 100 let limit: usize = limit @@ -129,6 +130,11 @@ async fn paginate_relations_with_filter( // Spec (v1.10) recommends depth of at least 3 let depth: u8 = if recurse { 3 } else { 1 }; + // Check if this is a thread request + let is_thread = filter_rel_type + .as_ref() + .is_some_and(|rel| *rel == RelationType::Thread); + let events: Vec<_> = services .rooms .pdu_metadata @@ -152,23 +158,58 @@ async fn paginate_relations_with_filter( .collect() .await; - let next_batch = match dir { - | Direction::Forward => events.last(), - | Direction::Backward => events.first(), + // For threads, check if we should include the root event + let mut root_event = None; + if is_thread && dir == Direction::Backward { + // Check if we've reached the beginning of the thread + // (fewer events than requested means we've exhausted the thread) + if events.len() < limit { + // Try to get the thread root event + if let Ok(root_pdu) = services.rooms.timeline.get_pdu(target).await { + // Check visibility + if services + .rooms + .state_accessor + .user_can_see_event(sender_user, room_id, target) + .await + { + // Store the root event to add to the response + root_event = Some(root_pdu); + } + } + } } - .map(at!(0)) - .as_ref() - .map(ToString::to_string); + + // Determine if there are more events to fetch + let has_more = if root_event.is_some() { + false // We've included the root, no more events + } else { + // Check if we got a full page of results (might be more) + events.len() >= limit + }; + + let next_batch = if has_more { + match dir { + | Direction::Forward => events.last(), + | Direction::Backward => events.first(), + } + .map(|(count, _)| count_to_token(*count)) + } else { + None + }; + + // Build the response chunk with thread root if needed + let chunk: Vec<_> = root_event + .into_iter() + .map(Event::into_format) + .chain(events.into_iter().map(at!(1)).map(Event::into_format)) + .collect(); Ok(get_relating_events::v1::Response { next_batch, prev_batch: from.map(Into::into), recursion_depth: recurse.then_some(depth.into()), - chunk: events - .into_iter() - .map(at!(1)) - .map(Event::into_format) - .collect(), + chunk, }) } diff --git a/src/api/client/session.rs b/src/api/client/session.rs index 992073c6..da7bed2c 100644 --- a/src/api/client/session.rs +++ b/src/api/client/session.rs @@ -3,13 +3,14 @@ use std::time::Duration; use axum::extract::State; use axum_client_ip::InsecureClientIp; use conduwuit::{ - Err, Error, Result, debug, err, info, utils, - utils::{ReadyExt, hash}, + Err, Error, Result, debug, err, info, + utils::{self, ReadyExt, hash}, }; -use conduwuit_service::uiaa::SESSION_ID_LENGTH; +use conduwuit_core::{debug_error, debug_warn}; +use conduwuit_service::{Services, uiaa::SESSION_ID_LENGTH}; use futures::StreamExt; use ruma::{ - UserId, + OwnedUserId, UserId, api::client::{ session::{ get_login_token, @@ -49,6 +50,154 @@ pub(crate) async fn get_login_types_route( ])) } +/// Authenticates the given user by its ID and its password. +/// +/// Returns the user ID if successful, and an error otherwise. +#[tracing::instrument(skip_all, fields(%user_id), name = "password")] +pub(crate) async fn password_login( + services: &Services, + user_id: &UserId, + lowercased_user_id: &UserId, + password: &str, +) -> Result { + // Restrict login to accounts only of type 'password', including untyped + // legacy accounts which are equivalent to 'password'. + if services + .users + .origin(user_id) + .await + .is_ok_and(|origin| origin != "password") + { + return Err!(Request(Forbidden("Account does not permit password login."))); + } + + let (hash, user_id) = match services.users.password_hash(user_id).await { + | Ok(hash) => (hash, user_id), + | Err(_) => services + .users + .password_hash(lowercased_user_id) + .await + .map(|hash| (hash, lowercased_user_id)) + .map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?, + }; + + if hash.is_empty() { + return Err!(Request(UserDeactivated("The user has been deactivated"))); + } + + hash::verify_password(password, &hash) + .inspect_err(|e| debug_error!("{e}")) + .map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?; + + Ok(user_id.to_owned()) +} + +/// Authenticates the given user through the configured LDAP server. +/// +/// Creates the user if the user is found in the LDAP and do not already have an +/// account. +#[tracing::instrument(skip_all, fields(%user_id), name = "ldap")] +pub(super) async fn ldap_login( + services: &Services, + user_id: &UserId, + lowercased_user_id: &UserId, + password: &str, +) -> Result { + let (user_dn, is_ldap_admin) = match services.config.ldap.bind_dn.as_ref() { + | Some(bind_dn) if bind_dn.contains("{username}") => + (bind_dn.replace("{username}", lowercased_user_id.localpart()), false), + | _ => { + debug!("Searching user in LDAP"); + + let dns = services.users.search_ldap(user_id).await?; + if dns.len() >= 2 { + return Err!(Ldap("LDAP search returned two or more results")); + } + + let Some((user_dn, is_admin)) = dns.first() else { + return password_login(services, user_id, lowercased_user_id, password).await; + }; + + (user_dn.clone(), *is_admin) + }, + }; + + let user_id = services + .users + .auth_ldap(&user_dn, password) + .await + .map(|()| lowercased_user_id.to_owned())?; + + // LDAP users are automatically created on first login attempt. This is a very + // common feature that can be seen on many services using a LDAP provider for + // their users (synapse, Nextcloud, Jellyfin, ...). + // + // LDAP users are crated with a dummy password but non empty because an empty + // password is reserved for deactivated accounts. The conduwuit password field + // will never be read to login a LDAP user so it's not an issue. + if !services.users.exists(lowercased_user_id).await { + services + .users + .create(lowercased_user_id, Some("*"), Some("ldap")) + .await?; + } + + let is_conduwuit_admin = services.admin.user_is_admin(lowercased_user_id).await; + + if is_ldap_admin && !is_conduwuit_admin { + services.admin.make_user_admin(lowercased_user_id).await?; + } else if !is_ldap_admin && is_conduwuit_admin { + services.admin.revoke_admin(lowercased_user_id).await?; + } + + Ok(user_id) +} + +pub(crate) async fn handle_login( + services: &Services, + body: &Ruma, + identifier: Option<&uiaa::UserIdentifier>, + password: &str, + user: Option<&String>, +) -> Result { + debug!("Got password login type"); + let user_id = + if let Some(uiaa::UserIdentifier::UserIdOrLocalpart(user_id)) = identifier { + UserId::parse_with_server_name(user_id, &services.config.server_name) + } else if let Some(user) = user { + UserId::parse_with_server_name(user, &services.config.server_name) + } else { + return Err!(Request(Unknown( + debug_warn!(?body.login_info, "Valid identifier or username was not provided (invalid or unsupported login type?)") + ))); + } + .map_err(|e| err!(Request(InvalidUsername(warn!("Username is invalid: {e}")))))?; + + let lowercased_user_id = UserId::parse_with_server_name( + user_id.localpart().to_lowercase(), + &services.config.server_name, + )?; + + if !services.globals.user_is_local(&user_id) + || !services.globals.user_is_local(&lowercased_user_id) + { + return Err!(Request(Unknown("User ID does not belong to this homeserver"))); + } + + if cfg!(feature = "ldap") && services.config.ldap.enable { + match Box::pin(ldap_login(services, &user_id, &lowercased_user_id, password)).await { + | Ok(user_id) => Ok(user_id), + | Err(err) if services.config.ldap.ldap_only => Err(err), + | Err(err) => { + debug_warn!("{err}"); + password_login(services, &user_id, &lowercased_user_id, password).await + }, + } + } else { + password_login(services, &user_id, &lowercased_user_id, password).await + } +} + /// # `POST /_matrix/client/v3/login` /// /// Authenticates the user and returns an access token it can use in subsequent @@ -80,70 +229,7 @@ pub(crate) async fn login_route( password, user, .. - }) => { - debug!("Got password login type"); - let user_id = - if let Some(uiaa::UserIdentifier::UserIdOrLocalpart(user_id)) = identifier { - UserId::parse_with_server_name(user_id, &services.config.server_name) - } else if let Some(user) = user { - UserId::parse_with_server_name(user, &services.config.server_name) - } else { - return Err!(Request(Unknown( - debug_warn!(?body.login_info, "Valid identifier or username was not provided (invalid or unsupported login type?)") - ))); - } - .map_err(|e| err!(Request(InvalidUsername(warn!("Username is invalid: {e}")))))?; - - let lowercased_user_id = UserId::parse_with_server_name( - user_id.localpart().to_lowercase(), - &services.config.server_name, - )?; - - if !services.globals.user_is_local(&user_id) - || !services.globals.user_is_local(&lowercased_user_id) - { - return Err!(Request(Unknown("User ID does not belong to this homeserver"))); - } - - // first try the username as-is - let hash = services - .users - .password_hash(&user_id) - .await - .inspect_err(|e| debug!("{e}")); - - match hash { - | Ok(hash) => { - if hash.is_empty() { - return Err!(Request(UserDeactivated("The user has been deactivated"))); - } - - hash::verify_password(password, &hash) - .inspect_err(|e| debug!("{e}")) - .map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?; - - user_id - }, - | Err(_e) => { - let hash_lowercased_user_id = services - .users - .password_hash(&lowercased_user_id) - .await - .inspect_err(|e| debug!("{e}")) - .map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?; - - if hash_lowercased_user_id.is_empty() { - return Err!(Request(UserDeactivated("The user has been deactivated"))); - } - - hash::verify_password(password, &hash_lowercased_user_id) - .inspect_err(|e| debug!("{e}")) - .map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?; - - lowercased_user_id - }, - } - }, + }) => handle_login(&services, &body, identifier.as_ref(), password, user.as_ref()).await?, | login::v3::LoginInfo::Token(login::v3::Token { token }) => { debug!("Got token login type"); if !services.server.config.login_via_existing_session { @@ -198,8 +284,8 @@ pub(crate) async fn login_route( .clone() .unwrap_or_else(|| utils::random_string(DEVICE_ID_LENGTH).into()); - // Generate a new token for the device - let token = utils::random_string(TOKEN_LENGTH); + // Generate a new token for the device (ensuring no collisions) + let token = services.users.generate_unique_token().await; // Determine if device_id was provided and exists in the db for this user let device_exists = if body.device_id.is_some() { diff --git a/src/api/client/sync/v4.rs b/src/api/client/sync/v4.rs index 14cd50d8..a16e4526 100644 --- a/src/api/client/sync/v4.rs +++ b/src/api/client/sync/v4.rs @@ -45,6 +45,7 @@ use crate::{ type TodoRooms = BTreeMap, usize, u64)>; const SINGLE_CONNECTION_SYNC: &str = "single_connection_sync"; +#[allow(clippy::cognitive_complexity)] /// POST `/_matrix/client/unstable/org.matrix.msc3575/sync` /// /// Sliding Sync endpoint (future endpoint: `/_matrix/client/v4/sync`) diff --git a/src/api/client/unstable.rs b/src/api/client/unstable.rs index 08f70975..f8703ff3 100644 --- a/src/api/client/unstable.rs +++ b/src/api/client/unstable.rs @@ -292,7 +292,7 @@ pub(crate) async fn get_timezone_key_route( .await { if !services.users.exists(&body.user_id).await { - services.users.create(&body.user_id, None)?; + services.users.create(&body.user_id, None, None).await?; } services @@ -352,7 +352,7 @@ pub(crate) async fn get_profile_key_route( .await { if !services.users.exists(&body.user_id).await { - services.users.create(&body.user_id, None)?; + services.users.create(&body.user_id, None, None).await?; } services diff --git a/src/api/client/utils.rs b/src/api/client/utils.rs new file mode 100644 index 00000000..cc941b95 --- /dev/null +++ b/src/api/client/utils.rs @@ -0,0 +1,28 @@ +use conduwuit::{ + Result, err, + matrix::pdu::{PduCount, ShortEventId}, +}; + +/// Parse a pagination token, trying ShortEventId first, then falling back to +/// PduCount +pub(crate) fn parse_pagination_token(token: &str) -> Result { + // Try parsing as ShortEventId first + if let Ok(shorteventid) = token.parse::() { + // ShortEventId maps directly to a PduCount in our database + Ok(PduCount::Normal(shorteventid)) + } else if let Ok(count) = token.parse::() { + // Fallback to PduCount for backwards compatibility + Ok(PduCount::Normal(count)) + } else if let Ok(count) = token.parse::() { + // Also handle negative counts for backfilled events + Ok(PduCount::from_signed(count)) + } else { + Err(err!(Request(InvalidParam("Invalid pagination token")))) + } +} + +/// Convert a PduCount to a token string (using the underlying ShortEventId) +pub(crate) fn count_to_token(count: PduCount) -> String { + // The PduCount's unsigned value IS the ShortEventId + count.into_unsigned().to_string() +} diff --git a/src/api/router/auth.rs b/src/api/router/auth.rs index 01254c32..44afc3ef 100644 --- a/src/api/router/auth.rs +++ b/src/api/router/auth.rs @@ -5,6 +5,14 @@ use axum_extra::{ typed_header::TypedHeaderRejectionReason, }; use conduwuit::{Err, Error, Result, debug_error, err, warn}; +use futures::{ + TryFutureExt, + future::{ + Either::{Left, Right}, + select_ok, + }, + pin_mut, +}; use ruma::{ CanonicalJsonObject, CanonicalJsonValue, OwnedDeviceId, OwnedServerName, OwnedUserId, UserId, api::{ @@ -54,17 +62,7 @@ pub(super) async fn auth( | None => request.query.access_token.as_deref(), }; - let token = if let Some(token) = token { - match services.appservice.find_from_token(token).await { - | Some(reg_info) => Token::Appservice(Box::new(reg_info)), - | _ => match services.users.find_from_token(token).await { - | Ok((user_id, device_id)) => Token::User((user_id, device_id)), - | _ => Token::Invalid, - }, - } - } else { - Token::None - }; + let token = find_token(services, token).await?; if metadata.authentication == AuthScheme::None { match metadata { @@ -342,3 +340,25 @@ async fn parse_x_matrix(request: &mut Request) -> Result { Ok(x_matrix) } + +async fn find_token(services: &Services, token: Option<&str>) -> Result { + let Some(token) = token else { + return Ok(Token::None); + }; + + let user_token = services.users.find_from_token(token).map_ok(Token::User); + + let appservice_token = services + .appservice + .find_from_token(token) + .map_ok(Box::new) + .map_ok(Token::Appservice); + + pin_mut!(user_token, appservice_token); + // Returns Ok if either token type succeeds, Err only if both fail + match select_ok([Left(user_token), Right(appservice_token)]).await { + | Err(e) if !e.is_not_found() => Err(e), + | Ok((token, _)) => Ok(token), + | _ => Ok(Token::Invalid), + } +} diff --git a/src/core/config/mod.rs b/src/core/config/mod.rs index aa021be7..e8518ed4 100644 --- a/src/core/config/mod.rs +++ b/src/core/config/mod.rs @@ -1,3 +1,4 @@ +#![allow(clippy::doc_link_with_quotes)] pub mod check; pub mod manager; pub mod proxy; @@ -1947,6 +1948,10 @@ pub struct Config { pub allow_invalid_tls_certificates_yes_i_know_what_the_fuck_i_am_doing_with_this_and_i_know_this_is_insecure: bool, + // external structure; separate section + #[serde(default)] + pub ldap: LdapConfig, + // external structure; separate section #[serde(default)] pub blurhashing: BlurhashConfig, @@ -2041,6 +2046,114 @@ pub struct BlurhashConfig { pub blurhash_max_raw_size: u64, } +#[derive(Clone, Debug, Default, Deserialize)] +#[config_example_generator(filename = "conduwuit-example.toml", section = "global.ldap")] +pub struct LdapConfig { + /// Whether to enable LDAP login. + /// + /// example: "true" + #[serde(default)] + pub enable: bool, + + /// Whether to force LDAP authentication or authorize classical password + /// login. + /// + /// example: "true" + #[serde(default)] + pub ldap_only: bool, + + /// URI of the LDAP server. + /// + /// example: "ldap://ldap.example.com:389" + /// + /// default: "" + #[serde(default)] + pub uri: Option, + + /// Root of the searches. + /// + /// example: "ou=users,dc=example,dc=org" + /// + /// default: "" + #[serde(default)] + pub base_dn: String, + + /// Bind DN if anonymous search is not enabled. + /// + /// You can use the variable `{username}` that will be replaced by the + /// entered username. In such case, the password used to bind will be the + /// one provided for the login and not the one given by + /// `bind_password_file`. Beware: automatically granting admin rights will + /// not work if you use this direct bind instead of a LDAP search. + /// + /// example: "cn=ldap-reader,dc=example,dc=org" or + /// "cn={username},ou=users,dc=example,dc=org" + /// + /// default: "" + #[serde(default)] + pub bind_dn: Option, + + /// Path to a file on the system that contains the password for the + /// `bind_dn`. + /// + /// The server must be able to access the file, and it must not be empty. + /// + /// default: "" + #[serde(default)] + pub bind_password_file: Option, + + /// Search filter to limit user searches. + /// + /// You can use the variable `{username}` that will be replaced by the + /// entered username for more complex filters. + /// + /// example: "(&(objectClass=person)(memberOf=matrix))" + /// + /// default: "(objectClass=*)" + #[serde(default = "default_ldap_search_filter")] + pub filter: String, + + /// Attribute to use to uniquely identify the user. + /// + /// example: "uid" or "cn" + /// + /// default: "uid" + #[serde(default = "default_ldap_uid_attribute")] + pub uid_attribute: String, + + /// Attribute containing the display name of the user. + /// + /// example: "givenName" or "sn" + /// + /// default: "givenName" + #[serde(default = "default_ldap_name_attribute")] + pub name_attribute: String, + + /// Root of the searches for admin users. + /// + /// Defaults to `base_dn` if empty. + /// + /// example: "ou=admins,dc=example,dc=org" + /// + /// default: "" + #[serde(default)] + pub admin_base_dn: String, + + /// The LDAP search filter to find administrative users for continuwuity. + /// + /// If left blank, administrative state must be configured manually for each + /// user. + /// + /// You can use the variable `{username}` that will be replaced by the + /// entered username for more complex filters. + /// + /// example: "(objectClass=conduwuitAdmin)" or "(uid={username})" + /// + /// default: "" + #[serde(default)] + pub admin_filter: String, +} + #[derive(Deserialize, Clone, Debug)] #[serde(transparent)] struct ListeningPort { @@ -2430,3 +2543,9 @@ pub(super) fn default_blurhash_x_component() -> u32 { 4 } pub(super) fn default_blurhash_y_component() -> u32 { 3 } // end recommended & blurhashing defaults + +fn default_ldap_search_filter() -> String { "(objectClass=*)".to_owned() } + +fn default_ldap_uid_attribute() -> String { String::from("uid") } + +fn default_ldap_name_attribute() -> String { String::from("givenName") } diff --git a/src/core/debug.rs b/src/core/debug.rs index 21a5ada4..c728278d 100644 --- a/src/core/debug.rs +++ b/src/core/debug.rs @@ -100,7 +100,7 @@ pub fn trap() { #[must_use] pub fn panic_str(p: &Box) -> &'static str { - p.downcast_ref::<&str>().copied().unwrap_or_default() + (**p).downcast_ref::<&str>().copied().unwrap_or_default() } #[inline(always)] diff --git a/src/core/error/mod.rs b/src/core/error/mod.rs index e46edf09..541af793 100644 --- a/src/core/error/mod.rs +++ b/src/core/error/mod.rs @@ -110,6 +110,8 @@ pub enum Error { InconsistentRoomState(&'static str, ruma::OwnedRoomId), #[error(transparent)] IntoHttp(#[from] ruma::api::error::IntoHttpError), + #[error("{0}")] + Ldap(Cow<'static, str>), #[error(transparent)] Mxc(#[from] ruma::MxcUriError), #[error(transparent)] diff --git a/src/core/info/room_version.rs b/src/core/info/room_version.rs index 51d5d3c6..54ed8fdc 100644 --- a/src/core/info/room_version.rs +++ b/src/core/info/room_version.rs @@ -18,7 +18,7 @@ pub const STABLE_ROOM_VERSIONS: &[RoomVersionId] = &[ /// Experimental, partially supported room versions pub const UNSTABLE_ROOM_VERSIONS: &[RoomVersionId] = - &[RoomVersionId::V2, RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5]; + &[RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5]; type RoomVersion = (RoomVersionId, RoomVersionStability); diff --git a/src/database/map/get_batch.rs b/src/database/map/get_batch.rs index e23a8848..539f0c39 100644 --- a/src/database/map/get_batch.rs +++ b/src/database/map/get_batch.rs @@ -19,7 +19,7 @@ where S: Stream + Send + 'a, K: AsRef<[u8]> + Send + Sync + 'a, { - fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a; + fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a; } impl<'a, K, S> Get<'a, K, S> for S @@ -29,7 +29,7 @@ where K: AsRef<[u8]> + Send + Sync + 'a, { #[inline] - fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a { + fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a { map.get_batch(self) } } @@ -39,7 +39,7 @@ where pub(crate) fn get_batch<'a, S, K>( self: &'a Arc, keys: S, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where S: Stream + Send + 'a, K: AsRef<[u8]> + Send + Sync + 'a, diff --git a/src/database/map/keys.rs b/src/database/map/keys.rs index 7ca932a5..ac044e91 100644 --- a/src/database/map/keys.rs +++ b/src/database/map/keys.rs @@ -10,7 +10,7 @@ use super::stream::is_cached; use crate::{keyval, keyval::Key, stream}; #[implement(super::Map)] -pub fn keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send +pub fn keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send where K: Deserialize<'a> + Send, { diff --git a/src/database/map/keys_from.rs b/src/database/map/keys_from.rs index c9b1717a..11245f7b 100644 --- a/src/database/map/keys_from.rs +++ b/src/database/map/keys_from.rs @@ -15,7 +15,7 @@ use crate::{ pub fn keys_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -40,7 +40,7 @@ where pub fn keys_raw_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/keys_prefix.rs b/src/database/map/keys_prefix.rs index 09dd79ac..e6a9457f 100644 --- a/src/database/map/keys_prefix.rs +++ b/src/database/map/keys_prefix.rs @@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key}; pub fn keys_prefix<'a, K, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -37,7 +37,7 @@ where pub fn keys_raw_prefix<'a, K, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -50,7 +50,7 @@ where pub fn raw_keys_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/map/qry_batch.rs b/src/database/map/qry_batch.rs index e42d3e63..9da546e6 100644 --- a/src/database/map/qry_batch.rs +++ b/src/database/map/qry_batch.rs @@ -17,7 +17,7 @@ where S: Stream + Send + 'a, K: Serialize + Debug, { - fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a; + fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a; } impl<'a, K, S> Qry<'a, K, S> for S @@ -27,7 +27,7 @@ where K: Serialize + Debug + 'a, { #[inline] - fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a { + fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a { map.qry_batch(self) } } @@ -37,7 +37,7 @@ where pub(crate) fn qry_batch<'a, S, K>( self: &'a Arc, keys: S, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where S: Stream + Send + 'a, K: Serialize + Debug + 'a, diff --git a/src/database/map/rev_keys.rs b/src/database/map/rev_keys.rs index c00f3e55..8f48a17e 100644 --- a/src/database/map/rev_keys.rs +++ b/src/database/map/rev_keys.rs @@ -10,7 +10,7 @@ use super::rev_stream::is_cached; use crate::{keyval, keyval::Key, stream}; #[implement(super::Map)] -pub fn rev_keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send +pub fn rev_keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send where K: Deserialize<'a> + Send, { diff --git a/src/database/map/rev_keys_from.rs b/src/database/map/rev_keys_from.rs index 04e457dc..021e3b92 100644 --- a/src/database/map/rev_keys_from.rs +++ b/src/database/map/rev_keys_from.rs @@ -15,7 +15,7 @@ use crate::{ pub fn rev_keys_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -41,7 +41,7 @@ where pub fn rev_keys_raw_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/rev_keys_prefix.rs b/src/database/map/rev_keys_prefix.rs index fbe9f9ca..5b1459f5 100644 --- a/src/database/map/rev_keys_prefix.rs +++ b/src/database/map/rev_keys_prefix.rs @@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key}; pub fn rev_keys_prefix<'a, K, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -37,7 +37,7 @@ where pub fn rev_keys_raw_prefix<'a, K, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -50,7 +50,7 @@ where pub fn rev_raw_keys_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/map/rev_stream.rs b/src/database/map/rev_stream.rs index 789a52e8..92d7bac8 100644 --- a/src/database/map/rev_stream.rs +++ b/src/database/map/rev_stream.rs @@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream}; #[implement(super::Map)] pub fn rev_stream<'a, K, V>( self: &'a Arc, -) -> impl Stream>> + Send +) -> impl Stream>> + Send where K: Deserialize<'a> + Send, V: Deserialize<'a> + Send, diff --git a/src/database/map/rev_stream_from.rs b/src/database/map/rev_stream_from.rs index a612d2a2..7fef1b35 100644 --- a/src/database/map/rev_stream_from.rs +++ b/src/database/map/rev_stream_from.rs @@ -20,7 +20,7 @@ use crate::{ pub fn rev_stream_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -55,7 +55,7 @@ where pub fn rev_stream_raw_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/rev_stream_prefix.rs b/src/database/map/rev_stream_prefix.rs index 46dc9247..70d4abf7 100644 --- a/src/database/map/rev_stream_prefix.rs +++ b/src/database/map/rev_stream_prefix.rs @@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key}; pub fn rev_stream_prefix<'a, K, V, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -50,7 +50,7 @@ where pub fn rev_stream_raw_prefix<'a, K, V, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -68,7 +68,7 @@ where pub fn rev_raw_stream_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/map/stream.rs b/src/database/map/stream.rs index f7371b6c..736ab268 100644 --- a/src/database/map/stream.rs +++ b/src/database/map/stream.rs @@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream}; #[implement(super::Map)] pub fn stream<'a, K, V>( self: &'a Arc, -) -> impl Stream>> + Send +) -> impl Stream>> + Send where K: Deserialize<'a> + Send, V: Deserialize<'a> + Send, diff --git a/src/database/map/stream_from.rs b/src/database/map/stream_from.rs index ccf48db6..9acec173 100644 --- a/src/database/map/stream_from.rs +++ b/src/database/map/stream_from.rs @@ -19,7 +19,7 @@ use crate::{ pub fn stream_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -53,7 +53,7 @@ where pub fn stream_raw_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/stream_prefix.rs b/src/database/map/stream_prefix.rs index a26478aa..8210e152 100644 --- a/src/database/map/stream_prefix.rs +++ b/src/database/map/stream_prefix.rs @@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key}; pub fn stream_prefix<'a, K, V, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -50,7 +50,7 @@ where pub fn stream_raw_prefix<'a, K, V, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -68,7 +68,7 @@ where pub fn raw_stream_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/maps.rs b/src/database/maps.rs index 214dbf34..da97ef45 100644 --- a/src/database/maps.rs +++ b/src/database/maps.rs @@ -374,6 +374,10 @@ pub(super) static MAPS: &[Descriptor] = &[ name: "userid_masterkeyid", ..descriptor::RANDOM_SMALL }, + Descriptor { + name: "userid_origin", + ..descriptor::RANDOM + }, Descriptor { name: "userid_password", ..descriptor::RANDOM diff --git a/src/database/pool.rs b/src/database/pool.rs index 285aaf25..3421f779 100644 --- a/src/database/pool.rs +++ b/src/database/pool.rs @@ -443,7 +443,7 @@ pub(crate) fn into_send_seek(result: stream::State<'_>) -> stream::State<'static unsafe { std::mem::transmute(result) } } -fn into_recv_seek(result: stream::State<'static>) -> stream::State<'_> { +fn into_recv_seek(result: stream::State<'static>) -> stream::State<'static> { // SAFETY: This is to receive the State from the channel; see above. unsafe { std::mem::transmute(result) } } diff --git a/src/database/tests.rs b/src/database/tests.rs index c1a9f47c..30562a66 100644 --- a/src/database/tests.rs +++ b/src/database/tests.rs @@ -326,7 +326,7 @@ fn ser_array() { } #[test] -#[ignore] +#[ignore = "arrayvec deserialization is not implemented (separators)"] fn de_array() { let a: u64 = 123_456; let b: u64 = 987_654; @@ -358,7 +358,7 @@ fn de_array() { } #[test] -#[ignore] +#[ignore = "Nested sequences are not supported"] fn de_complex() { type Key<'a> = (&'a UserId, ArrayVec, &'a RoomId); diff --git a/src/main/Cargo.toml b/src/main/Cargo.toml index 0d7dd844..eafa1e48 100644 --- a/src/main/Cargo.toml +++ b/src/main/Cargo.toml @@ -56,6 +56,7 @@ standard = [ "jemalloc", "jemalloc_conf", "journald", + "ldap", "media_thumbnail", "systemd", "url_preview", @@ -63,7 +64,7 @@ standard = [ ] full = [ "standard", - "hardened_malloc", + # "hardened_malloc", # Conflicts with jemalloc "jemalloc_prof", "perf_measurements", "tokio_console" @@ -114,6 +115,9 @@ jemalloc_stats = [ jemalloc_conf = [ "conduwuit-core/jemalloc_conf", ] +ldap = [ + "conduwuit-api/ldap", +] media_thumbnail = [ "conduwuit-service/media_thumbnail", ] diff --git a/src/router/serve/unix.rs b/src/router/serve/unix.rs index 2af17274..9bb3dd6e 100644 --- a/src/router/serve/unix.rs +++ b/src/router/serve/unix.rs @@ -30,7 +30,7 @@ use tower::{Service, ServiceExt}; type MakeService = IntoMakeServiceWithConnectInfo; -const NULL_ADDR: net::SocketAddr = net::SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0); +const NULL_ADDR: net::SocketAddr = net::SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); const FINI_POLL_INTERVAL: Duration = Duration::from_millis(750); #[tracing::instrument(skip_all, level = "debug")] diff --git a/src/service/Cargo.toml b/src/service/Cargo.toml index fdebd1d7..6e538f40 100644 --- a/src/service/Cargo.toml +++ b/src/service/Cargo.toml @@ -53,6 +53,9 @@ jemalloc_stats = [ "conduwuit-core/jemalloc_stats", "conduwuit-database/jemalloc_stats", ] +ldap = [ + "dep:ldap3" +] media_thumbnail = [ "dep:image", ] @@ -89,6 +92,8 @@ image.workspace = true image.optional = true ipaddress.workspace = true itertools.workspace = true +ldap3.workspace = true +ldap3.optional = true log.workspace = true loole.workspace = true lru-cache.workspace = true diff --git a/src/service/admin/create.rs b/src/service/admin/create.rs index 157b4d65..755673fe 100644 --- a/src/service/admin/create.rs +++ b/src/service/admin/create.rs @@ -38,7 +38,7 @@ pub async fn create_admin_room(services: &Services) -> Result { // Create a user for the server let server_user = services.globals.server_user.as_ref(); - services.users.create(server_user, None)?; + services.users.create(server_user, None, None).await?; let create_content = { use RoomVersionId::*; diff --git a/src/service/appservice/mod.rs b/src/service/appservice/mod.rs index 7be8a471..adbf3b6e 100644 --- a/src/service/appservice/mod.rs +++ b/src/service/appservice/mod.rs @@ -4,14 +4,14 @@ mod registration_info; use std::{collections::BTreeMap, iter::IntoIterator, sync::Arc}; use async_trait::async_trait; -use conduwuit::{Result, err, utils::stream::IterStream}; +use conduwuit::{Err, Result, err, utils::stream::IterStream}; use database::Map; use futures::{Future, FutureExt, Stream, TryStreamExt}; use ruma::{RoomAliasId, RoomId, UserId, api::appservice::Registration}; use tokio::sync::{RwLock, RwLockReadGuard}; pub use self::{namespace_regex::NamespaceRegex, registration_info::RegistrationInfo}; -use crate::{Dep, sending}; +use crate::{Dep, globals, sending, users}; pub struct Service { registration_info: RwLock, @@ -20,7 +20,9 @@ pub struct Service { } struct Services { + globals: Dep, sending: Dep, + users: Dep, } struct Data { @@ -35,7 +37,9 @@ impl crate::Service for Service { Ok(Arc::new(Self { registration_info: RwLock::new(BTreeMap::new()), services: Services { + globals: args.depend::("globals"), sending: args.depend::("sending"), + users: args.depend::("users"), }, db: Data { id_appserviceregistrations: args.db["id_appserviceregistrations"].clone(), @@ -44,23 +48,93 @@ impl crate::Service for Service { } async fn worker(self: Arc) -> Result { - // Inserting registrations into cache - self.iter_db_ids() - .try_for_each(async |appservice| { - self.registration_info - .write() - .await - .insert(appservice.0, appservice.1.try_into()?); + // First, collect all appservices to check for token conflicts + let appservices: Vec<(String, Registration)> = self.iter_db_ids().try_collect().await?; - Ok(()) - }) - .await + // Check for appservice-to-appservice token conflicts + for i in 0..appservices.len() { + for j in i.saturating_add(1)..appservices.len() { + if appservices[i].1.as_token == appservices[j].1.as_token { + return Err!(Database(error!( + "Token collision detected: Appservices '{}' and '{}' have the same token", + appservices[i].0, appservices[j].0 + ))); + } + } + } + + // Process each appservice + for (id, registration) in appservices { + // During startup, resolve any token collisions in favour of appservices + // by logging out conflicting user devices + if let Ok((user_id, device_id)) = self + .services + .users + .find_from_token(®istration.as_token) + .await + { + conduwuit::warn!( + "Token collision detected during startup: Appservice '{}' token was also \ + used by user '{}' device '{}'. Logging out the user device to resolve \ + conflict.", + id, + user_id.localpart(), + device_id + ); + + self.services + .users + .remove_device(&user_id, &device_id) + .await; + } + + self.start_appservice(id, registration).await?; + } + + Ok(()) } fn name(&self) -> &str { crate::service::make_name(std::module_path!()) } } impl Service { + /// Starts an appservice, ensuring its sender_localpart user exists and is + /// active. Creates the user if it doesn't exist, or reactivates it if it + /// was deactivated. Then registers the appservice in memory for request + /// handling. + async fn start_appservice(&self, id: String, registration: Registration) -> Result { + let appservice_user_id = UserId::parse_with_server_name( + registration.sender_localpart.as_str(), + self.services.globals.server_name(), + )?; + + if !self.services.users.exists(&appservice_user_id).await { + self.services + .users + .create(&appservice_user_id, None, None) + .await?; + } else if self + .services + .users + .is_deactivated(&appservice_user_id) + .await + .unwrap_or(false) + { + // Reactivate the appservice user if it was accidentally deactivated + self.services + .users + .set_password(&appservice_user_id, None) + .await?; + } + + self.registration_info + .write() + .await + .insert(id, registration.try_into()?); + + Ok(()) + } + /// Registers an appservice and returns the ID to the caller pub async fn register_appservice( &self, @@ -68,15 +142,40 @@ impl Service { appservice_config_body: &str, ) -> Result { //TODO: Check for collisions between exclusive appservice namespaces - self.registration_info - .write() + + // Check for token collision with other appservices (allow re-registration of + // same appservice) + if let Ok(existing) = self.find_from_token(®istration.as_token).await { + if existing.registration.id != registration.id { + return Err(err!(Request(InvalidParam( + "Cannot register appservice: Token is already used by appservice '{}'. \ + Please generate a different token.", + existing.registration.id + )))); + } + } + + // Prevent token collision with existing user tokens + if self + .services + .users + .find_from_token(®istration.as_token) .await - .insert(registration.id.clone(), registration.clone().try_into()?); + .is_ok() + { + return Err(err!(Request(InvalidParam( + "Cannot register appservice: The provided token is already in use by a user \ + device. Please generate a different token for the appservice." + )))); + } self.db .id_appserviceregistrations .insert(®istration.id, appservice_config_body); + self.start_appservice(registration.id.clone(), registration.clone()) + .await?; + Ok(()) } @@ -113,12 +212,14 @@ impl Service { .map(|info| info.registration) } - pub async fn find_from_token(&self, token: &str) -> Option { + /// Returns Result to match users::find_from_token for select_ok usage + pub async fn find_from_token(&self, token: &str) -> Result { self.read() .await .values() .find(|info| info.registration.as_token == token) .cloned() + .ok_or_else(|| err!(Request(NotFound("Appservice token not found")))) } /// Checks if a given user id matches any exclusive appservice regex diff --git a/src/service/emergency/mod.rs b/src/service/emergency/mod.rs index 3a61f710..f8ecbb3e 100644 --- a/src/service/emergency/mod.rs +++ b/src/service/emergency/mod.rs @@ -41,6 +41,11 @@ impl crate::Service for Service { return Ok(()); } + if self.services.config.ldap.enable { + warn!("emergency password feature not available with LDAP enabled."); + return Ok(()); + } + self.set_emergency_access().await.inspect_err(|e| { error!("Could not set the configured emergency password for the server user: {e}"); }) @@ -57,7 +62,8 @@ impl Service { self.services .users - .set_password(server_user, self.services.config.emergency_password.as_deref())?; + .set_password(server_user, self.services.config.emergency_password.as_deref()) + .await?; let (ruleset, pwd_set) = match self.services.config.emergency_password { | Some(_) => (Ruleset::server_default(server_user), true), diff --git a/src/service/migrations.rs b/src/service/migrations.rs index cee638ba..586d6249 100644 --- a/src/service/migrations.rs +++ b/src/service/migrations.rs @@ -215,8 +215,8 @@ async fn db_lt_12(services: &Services) -> Result<()> { for username in &services .users .list_local_users() - .map(UserId::to_owned) - .collect::>() + .map(ToOwned::to_owned) + .collect::>() .await { let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name) @@ -295,8 +295,8 @@ async fn db_lt_13(services: &Services) -> Result<()> { for username in &services .users .list_local_users() - .map(UserId::to_owned) - .collect::>() + .map(ToOwned::to_owned) + .collect::>() .await { let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name) diff --git a/src/service/presence/mod.rs b/src/service/presence/mod.rs index 8f646be6..e7ce64bc 100644 --- a/src/service/presence/mod.rs +++ b/src/service/presence/mod.rs @@ -183,8 +183,8 @@ impl Service { .services .users .list_local_users() - .map(UserId::to_owned) - .collect::>() + .map(ToOwned::to_owned) + .collect::>() .await { let presence = self.db.get_presence(user_id).await; diff --git a/src/service/pusher/mod.rs b/src/service/pusher/mod.rs index baa7a72e..071bf822 100644 --- a/src/service/pusher/mod.rs +++ b/src/service/pusher/mod.rs @@ -178,7 +178,7 @@ impl Service { pub fn get_pushkeys<'a>( &'a self, sender: &'a UserId, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { let prefix = (sender, Interfix); self.db .senderkey_pusher diff --git a/src/service/rooms/alias/mod.rs b/src/service/rooms/alias/mod.rs index 7675efd4..c627092e 100644 --- a/src/service/rooms/alias/mod.rs +++ b/src/service/rooms/alias/mod.rs @@ -178,7 +178,7 @@ impl Service { pub fn local_aliases_for_room<'a>( &'a self, room_id: &'a RoomId, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .aliasid_alias @@ -188,7 +188,9 @@ impl Service { } #[tracing::instrument(skip(self), level = "debug")] - pub fn all_local_aliases<'a>(&'a self) -> impl Stream + Send + 'a { + pub fn all_local_aliases<'a>( + &'a self, + ) -> impl Stream + Send + 'a { self.db .alias_roomid .stream() diff --git a/src/service/rooms/pdu_metadata/data.rs b/src/service/rooms/pdu_metadata/data.rs index c1376cb0..854c6ea0 100644 --- a/src/service/rooms/pdu_metadata/data.rs +++ b/src/service/rooms/pdu_metadata/data.rs @@ -60,10 +60,13 @@ impl Data { target: ShortEventId, from: PduCount, dir: Direction, - ) -> impl Stream + Send + '_ { + ) -> impl Stream + Send + 'a { + // Query from exact position then filter excludes it (saturating_inc could skip + // events at min/max boundaries) + let from_unsigned = from.into_unsigned(); let mut current = ArrayVec::::new(); current.extend(target.to_be_bytes()); - current.extend(from.saturating_inc(dir).into_unsigned().to_be_bytes()); + current.extend(from_unsigned.to_be_bytes()); let current = current.as_slice(); match dir { | Direction::Forward => self.tofrom_relation.raw_keys_from(current).boxed(), @@ -73,6 +76,17 @@ impl Data { .ready_take_while(move |key| key.starts_with(&target.to_be_bytes())) .map(|to_from| u64_from_u8(&to_from[8..16])) .map(PduCount::from_unsigned) + .ready_filter(move |count| { + if from == PduCount::min() || from == PduCount::max() { + true + } else { + let count_unsigned = count.into_unsigned(); + match dir { + | Direction::Forward => count_unsigned > from_unsigned, + | Direction::Backward => count_unsigned < from_unsigned, + } + } + }) .wide_filter_map(move |shorteventid| async move { let pdu_id: RawPduId = PduId { shortroomid, shorteventid }.into(); diff --git a/src/service/rooms/read_receipt/data.rs b/src/service/rooms/read_receipt/data.rs index 62f87948..9a2fa70c 100644 --- a/src/service/rooms/read_receipt/data.rs +++ b/src/service/rooms/read_receipt/data.rs @@ -65,7 +65,7 @@ impl Data { &'a self, room_id: &'a RoomId, since: u64, - ) -> impl Stream> + Send + 'a { + ) -> impl Stream> + Send + 'a { type Key<'a> = (&'a RoomId, u64, &'a UserId); type KeyVal<'a> = (Key<'a>, CanonicalJsonObject); diff --git a/src/service/rooms/read_receipt/mod.rs b/src/service/rooms/read_receipt/mod.rs index 68ce9b7f..64081a2c 100644 --- a/src/service/rooms/read_receipt/mod.rs +++ b/src/service/rooms/read_receipt/mod.rs @@ -112,7 +112,7 @@ impl Service { &'a self, room_id: &'a RoomId, since: u64, - ) -> impl Stream> + Send + 'a { + ) -> impl Stream> + Send + 'a { self.db.readreceipts_since(room_id, since) } diff --git a/src/service/rooms/search/mod.rs b/src/service/rooms/search/mod.rs index afe3061b..ea2f90af 100644 --- a/src/service/rooms/search/mod.rs +++ b/src/service/rooms/search/mod.rs @@ -104,7 +104,7 @@ pub fn deindex_pdu(&self, shortroomid: ShortRoomId, pdu_id: &RawPduId, message_b pub async fn search_pdus<'a>( &'a self, query: &'a RoomQuery<'a>, -) -> Result<(usize, impl Stream> + Send + '_)> { +) -> Result<(usize, impl Stream> + Send + 'a)> { let pdu_ids: Vec<_> = self.search_pdu_ids(query).await?.collect().await; let filter = &query.criteria.filter; @@ -137,10 +137,10 @@ pub async fn search_pdus<'a>( // result is modeled as a stream such that callers don't have to be refactored // though an additional async/wrap still exists for now #[implement(Service)] -pub async fn search_pdu_ids( - &self, - query: &RoomQuery<'_>, -) -> Result + Send + '_ + use<'_>> { +pub async fn search_pdu_ids<'a>( + &'a self, + query: &'a RoomQuery<'_>, +) -> Result + Send + 'a + use<'a>> { let shortroomid = self.services.short.get_shortroomid(query.room_id).await?; let pdu_ids = self.search_pdu_ids_query_room(query, shortroomid).await; @@ -173,7 +173,7 @@ fn search_pdu_ids_query_words<'a>( &'a self, shortroomid: ShortRoomId, word: &'a str, -) -> impl Stream + Send + '_ { +) -> impl Stream + Send + 'a { self.search_pdu_ids_query_word(shortroomid, word) .map(move |key| -> RawPduId { let key = &key[prefix_len(word)..]; @@ -183,11 +183,11 @@ fn search_pdu_ids_query_words<'a>( /// Iterate over raw database results for a word #[implement(Service)] -fn search_pdu_ids_query_word( - &self, +fn search_pdu_ids_query_word<'a>( + &'a self, shortroomid: ShortRoomId, - word: &str, -) -> impl Stream> + Send + '_ + use<'_> { + word: &'a str, +) -> impl Stream> + Send + 'a + use<'a> { // rustc says const'ing this not yet stable let end_id: RawPduId = PduId { shortroomid, diff --git a/src/service/rooms/short/mod.rs b/src/service/rooms/short/mod.rs index 06ff6493..660bb7de 100644 --- a/src/service/rooms/short/mod.rs +++ b/src/service/rooms/short/mod.rs @@ -62,7 +62,7 @@ pub async fn get_or_create_shorteventid(&self, event_id: &EventId) -> ShortEvent pub fn multi_get_or_create_shorteventid<'a, I>( &'a self, event_ids: I, -) -> impl Stream + Send + '_ +) -> impl Stream + Send + 'a where I: Iterator + Clone + Debug + Send + 'a, { diff --git a/src/service/rooms/state/mod.rs b/src/service/rooms/state/mod.rs index 641aa6a9..386adf9d 100644 --- a/src/service/rooms/state/mod.rs +++ b/src/service/rooms/state/mod.rs @@ -388,7 +388,7 @@ impl Service { pub fn get_forward_extremities<'a>( &'a self, room_id: &'a RoomId, - ) -> impl Stream + Send + '_ { + ) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db diff --git a/src/service/rooms/state_cache/mod.rs b/src/service/rooms/state_cache/mod.rs index e9845fbf..2d8f5cc5 100644 --- a/src/service/rooms/state_cache/mod.rs +++ b/src/service/rooms/state_cache/mod.rs @@ -144,7 +144,7 @@ pub fn clear_appservice_in_room_cache(&self) { self.appservice_in_room_cache.wri pub fn room_servers<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomserverids @@ -167,7 +167,7 @@ pub async fn server_in_room<'a>(&'a self, server: &'a ServerName, room_id: &'a R pub fn server_rooms<'a>( &'a self, server: &'a ServerName, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (server, Interfix); self.db .serverroomids @@ -202,7 +202,7 @@ pub fn get_shared_rooms<'a>( &'a self, user_a: &'a UserId, user_b: &'a UserId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { use conduwuit::utils::set; let a = self.rooms_joined(user_a); @@ -216,7 +216,7 @@ pub fn get_shared_rooms<'a>( pub fn room_members<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuserid_joined @@ -239,7 +239,7 @@ pub async fn room_joined_count(&self, room_id: &RoomId) -> Result { pub fn local_users_in_room<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { self.room_members(room_id) .ready_filter(|user| self.services.globals.user_is_local(user)) } @@ -251,7 +251,7 @@ pub fn local_users_in_room<'a>( pub fn active_local_users_in_room<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { self.local_users_in_room(room_id) .filter(|user| self.services.users.is_active(user)) } @@ -273,7 +273,7 @@ pub async fn room_invited_count(&self, room_id: &RoomId) -> Result { pub fn room_useroncejoined<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuseroncejoinedids @@ -288,7 +288,7 @@ pub fn room_useroncejoined<'a>( pub fn room_members_invited<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuserid_invitecount @@ -303,7 +303,7 @@ pub fn room_members_invited<'a>( pub fn room_members_knocked<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuserid_knockedcount @@ -347,7 +347,7 @@ pub async fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result pub fn rooms_joined<'a>( &'a self, user_id: &'a UserId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { self.db .userroomid_joined .keys_raw_prefix(user_id) diff --git a/src/service/rooms/state_cache/update.rs b/src/service/rooms/state_cache/update.rs index 32c67947..86c1afe7 100644 --- a/src/service/rooms/state_cache/update.rs +++ b/src/service/rooms/state_cache/update.rs @@ -49,7 +49,7 @@ pub async fn update_membership( #[allow(clippy::collapsible_if)] if !self.services.globals.user_is_local(user_id) { if !self.services.users.exists(user_id).await { - self.services.users.create(user_id, None)?; + self.services.users.create(user_id, None, None).await?; } } diff --git a/src/service/rooms/state_cache/via.rs b/src/service/rooms/state_cache/via.rs index a818cc04..24d92a21 100644 --- a/src/service/rooms/state_cache/via.rs +++ b/src/service/rooms/state_cache/via.rs @@ -81,7 +81,7 @@ pub async fn servers_route_via(&self, room_id: &RoomId) -> Result( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { type KeyVal<'a> = (Ignore, Vec<&'a ServerName>); self.db diff --git a/src/service/rooms/timeline/create.rs b/src/service/rooms/timeline/create.rs index 66a35eca..6732cd8e 100644 --- a/src/service/rooms/timeline/create.rs +++ b/src/service/rooms/timeline/create.rs @@ -165,25 +165,6 @@ pub async fn create_hash_and_sign_event( return Err!(Request(Forbidden("Event is not authorized."))); } - // Check with the policy server - match self - .services - .event_handler - .ask_policy_server(&pdu, room_id) - .await - { - | Ok(true) => {}, - | Ok(false) => { - return Err!(Request(Forbidden(debug_warn!( - "Policy server marked this event as spam" - )))); - }, - | Err(e) => { - // fail open - warn!("Failed to check event with policy server: {e}"); - }, - } - // Hash and sign let mut pdu_json = utils::to_canonical_object(&pdu).map_err(|e| { err!(Request(BadJson(warn!("Failed to convert PDU to canonical JSON: {e}")))) @@ -222,6 +203,25 @@ pub async fn create_hash_and_sign_event( pdu_json.insert("event_id".into(), CanonicalJsonValue::String(pdu.event_id.clone().into())); + // Check with the policy server + match self + .services + .event_handler + .ask_policy_server(&pdu, room_id) + .await + { + | Ok(true) => {}, + | Ok(false) => { + return Err!(Request(Forbidden(debug_warn!( + "Policy server marked this event as spam" + )))); + }, + | Err(e) => { + // fail open + warn!("Failed to check event with policy server: {e}"); + }, + } + // Generate short event id let _shorteventid = self .services diff --git a/src/service/users/mod.rs b/src/service/users/mod.rs index d2dfccd9..6ddd8d79 100644 --- a/src/service/users/mod.rs +++ b/src/service/users/mod.rs @@ -1,11 +1,19 @@ +#[cfg(feature = "ldap")] +use std::collections::HashMap; use std::{collections::BTreeMap, mem, sync::Arc}; +#[cfg(feature = "ldap")] +use conduwuit::result::LogErr; use conduwuit::{ - Err, Error, Result, Server, at, debug_warn, err, trace, + Err, Error, Result, Server, at, debug_warn, err, is_equal_to, trace, utils::{self, ReadyExt, stream::TryIgnore, string::Unquoted}, }; +#[cfg(feature = "ldap")] +use conduwuit_core::{debug, error}; use database::{Deserialized, Ignore, Interfix, Json, Map}; use futures::{Stream, StreamExt, TryFutureExt}; +#[cfg(feature = "ldap")] +use ldap3::{LdapConnAsync, Scope, SearchEntry}; use ruma::{ DeviceId, KeyId, MilliSecondsSinceUnixEpoch, OneTimeKeyAlgorithm, OneTimeKeyId, OneTimeKeyName, OwnedDeviceId, OwnedKeyId, OwnedMxcUri, OwnedUserId, RoomId, UInt, UserId, @@ -19,7 +27,7 @@ use ruma::{ use serde::{Deserialize, Serialize}; use serde_json::json; -use crate::{Dep, account_data, admin, globals, rooms}; +use crate::{Dep, account_data, admin, appservice, globals, rooms}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct UserSuspension { @@ -40,6 +48,7 @@ struct Services { server: Arc, account_data: Dep, admin: Dep, + appservice: Dep, globals: Dep, state_accessor: Dep, state_cache: Dep, @@ -62,6 +71,7 @@ struct Data { userid_displayname: Arc, userid_lastonetimekeyupdate: Arc, userid_masterkeyid: Arc, + userid_origin: Arc, userid_password: Arc, userid_suspension: Arc, userid_selfsigningkeyid: Arc, @@ -76,6 +86,7 @@ impl crate::Service for Service { server: args.server.clone(), account_data: args.depend::("account_data"), admin: args.depend::("admin"), + appservice: args.depend::("appservice"), globals: args.depend::("globals"), state_accessor: args .depend::("rooms::state_accessor"), @@ -98,6 +109,7 @@ impl crate::Service for Service { userid_displayname: args.db["userid_displayname"].clone(), userid_lastonetimekeyupdate: args.db["userid_lastonetimekeyupdate"].clone(), userid_masterkeyid: args.db["userid_masterkeyid"].clone(), + userid_origin: args.db["userid_origin"].clone(), userid_password: args.db["userid_password"].clone(), userid_suspension: args.db["userid_suspension"].clone(), userid_selfsigningkeyid: args.db["userid_selfsigningkeyid"].clone(), @@ -134,9 +146,21 @@ impl Service { } /// Create a new user account on this homeserver. + /// + /// User origin is by default "password" (meaning that it will login using + /// its user_id/password). Users with other origins (currently only "ldap" + /// is available) have special login processes. #[inline] - pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { - self.set_password(user_id, password) + pub async fn create( + &self, + user_id: &UserId, + password: Option<&str>, + origin: Option<&str>, + ) -> Result<()> { + self.db + .userid_origin + .insert(user_id, origin.unwrap_or("password")); + self.set_password(user_id, password).await } /// Deactivate account @@ -150,7 +174,7 @@ impl Service { // result in an empty string, so the user will not be able to log in again. // Systems like changing the password without logging in should check if the // account is deactivated. - self.set_password(user_id, None)?; + self.set_password(user_id, None).await?; // TODO: Unhook 3PID Ok(()) @@ -251,13 +275,34 @@ impl Service { .ready_filter_map(|(u, p): (&UserId, &[u8])| (!p.is_empty()).then_some(u)) } + /// Returns the origin of the user (password/LDAP/...). + pub async fn origin(&self, user_id: &UserId) -> Result { + self.db.userid_origin.get(user_id).await.deserialized() + } + /// Returns the password hash for the given user. pub async fn password_hash(&self, user_id: &UserId) -> Result { self.db.userid_password.get(user_id).await.deserialized() } /// Hash and set the user's password to the Argon2 hash - pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { + pub async fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { + // Cannot change the password of a LDAP user. There are two special cases : + // - a `None` password can be used to deactivate a LDAP user + // - a "*" password is used as the default password of an active LDAP user + if cfg!(feature = "ldap") + && password.is_some_and(|pwd| pwd != "*") + && self + .db + .userid_origin + .get(user_id) + .await + .deserialized::() + .is_ok_and(is_equal_to!("ldap")) + { + return Err!(Request(InvalidParam("Cannot change password of a LDAP user"))); + } + password .map(utils::hash::password) .transpose() @@ -377,7 +422,7 @@ impl Service { pub fn all_device_ids<'a>( &'a self, user_id: &'a UserId, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { let prefix = (user_id, Interfix); self.db .userdeviceid_metadata @@ -391,6 +436,31 @@ impl Service { self.db.userdeviceid_token.qry(&key).await.deserialized() } + /// Generate a unique access token that doesn't collide with existing tokens + pub async fn generate_unique_token(&self) -> String { + loop { + let token = utils::random_string(32); + + // Check for collision with appservice tokens + if self + .services + .appservice + .find_from_token(&token) + .await + .is_ok() + { + continue; + } + + // Check for collision with user tokens + if self.db.token_userdeviceid.get(&token).await.is_ok() { + continue; + } + + return token; + } + } + /// Replaces the access token of one device. pub async fn set_token( &self, @@ -407,6 +477,19 @@ impl Service { ))); } + // Check for token collision with appservices + if self + .services + .appservice + .find_from_token(token) + .await + .is_ok() + { + return Err!(Request(InvalidParam( + "Token conflicts with an existing appservice token" + ))); + } + // Remove old token if let Ok(old_token) = self.db.userdeviceid_token.qry(&key).await { self.db.token_userdeviceid.remove(&old_token); @@ -687,7 +770,7 @@ impl Service { user_id: &'a UserId, from: u64, to: Option, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { self.keys_changed_user_or_room(user_id.as_str(), from, to) .map(|(user_id, ..)| user_id) } @@ -698,7 +781,7 @@ impl Service { room_id: &'a RoomId, from: u64, to: Option, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { self.keys_changed_user_or_room(room_id.as_str(), from, to) } @@ -707,7 +790,7 @@ impl Service { user_or_room_id: &'a str, from: u64, to: Option, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { type KeyVal<'a> = ((&'a str, u64), &'a UserId); let to = to.unwrap_or(u64::MAX); @@ -1092,6 +1175,154 @@ impl Service { self.db.useridprofilekey_value.del(key); } } + + #[cfg(not(feature = "ldap"))] + pub async fn search_ldap(&self, _user_id: &UserId) -> Result> { + Err!(FeatureDisabled("ldap")) + } + + #[cfg(feature = "ldap")] + pub async fn search_ldap(&self, user_id: &UserId) -> Result> { + let localpart = user_id.localpart().to_owned(); + let lowercased_localpart = localpart.to_lowercase(); + + let config = &self.services.server.config.ldap; + let uri = config + .uri + .as_ref() + .ok_or_else(|| err!(Ldap(error!("LDAP URI is not configured."))))?; + + debug!(?uri, "LDAP creating connection..."); + let (conn, mut ldap) = LdapConnAsync::new(uri.as_str()) + .await + .map_err(|e| err!(Ldap(error!(?user_id, "LDAP connection setup error: {e}"))))?; + + let driver = self.services.server.runtime().spawn(async move { + match conn.drive().await { + | Err(e) => error!("LDAP connection error: {e}"), + | Ok(()) => debug!("LDAP connection completed."), + } + }); + + match (&config.bind_dn, &config.bind_password_file) { + | (Some(bind_dn), Some(bind_password_file)) => { + let bind_pw = String::from_utf8(std::fs::read(bind_password_file)?)?; + ldap.simple_bind(bind_dn, bind_pw.trim()) + .await + .and_then(ldap3::LdapResult::success) + .map_err(|e| err!(Ldap(error!("LDAP bind error: {e}"))))?; + }, + | (..) => {}, + } + + let attr = [&config.uid_attribute, &config.name_attribute]; + + let user_filter = &config.filter.replace("{username}", &lowercased_localpart); + + let (entries, _result) = ldap + .search(&config.base_dn, Scope::Subtree, user_filter, &attr) + .await + .and_then(ldap3::SearchResult::success) + .inspect(|(entries, result)| trace!(?entries, ?result, "LDAP Search")) + .map_err(|e| err!(Ldap(error!(?attr, ?user_filter, "LDAP search error: {e}"))))?; + + let mut dns: HashMap = entries + .into_iter() + .filter_map(|entry| { + let search_entry = SearchEntry::construct(entry); + debug!(?search_entry, "LDAP search entry"); + search_entry + .attrs + .get(&config.uid_attribute) + .into_iter() + .chain(search_entry.attrs.get(&config.name_attribute)) + .any(|ids| ids.contains(&localpart) || ids.contains(&lowercased_localpart)) + .then_some((search_entry.dn, false)) + }) + .collect(); + + if !config.admin_filter.is_empty() { + let admin_base_dn = if config.admin_base_dn.is_empty() { + &config.base_dn + } else { + &config.admin_base_dn + }; + + let admin_filter = &config + .admin_filter + .replace("{username}", &lowercased_localpart); + + let (admin_entries, _result) = ldap + .search(admin_base_dn, Scope::Subtree, admin_filter, &attr) + .await + .and_then(ldap3::SearchResult::success) + .inspect(|(entries, result)| trace!(?entries, ?result, "LDAP Admin Search")) + .map_err(|e| { + err!(Ldap(error!(?attr, ?admin_filter, "Ldap admin search error: {e}"))) + })?; + + dns.extend(admin_entries.into_iter().filter_map(|entry| { + let search_entry = SearchEntry::construct(entry); + debug!(?search_entry, "LDAP search entry"); + search_entry + .attrs + .get(&config.uid_attribute) + .into_iter() + .chain(search_entry.attrs.get(&config.name_attribute)) + .any(|ids| ids.contains(&localpart) || ids.contains(&lowercased_localpart)) + .then_some((search_entry.dn, true)) + })); + } + + ldap.unbind() + .await + .map_err(|e| err!(Ldap(error!("LDAP unbind error: {e}"))))?; + + driver.await.log_err().ok(); + + Ok(dns.drain().collect()) + } + + #[cfg(not(feature = "ldap"))] + pub async fn auth_ldap(&self, _user_dn: &str, _password: &str) -> Result { + Err!(FeatureDisabled("ldap")) + } + + #[cfg(feature = "ldap")] + pub async fn auth_ldap(&self, user_dn: &str, password: &str) -> Result { + let config = &self.services.server.config.ldap; + let uri = config + .uri + .as_ref() + .ok_or_else(|| err!(Ldap(error!("LDAP URI is not configured."))))?; + + debug!(?uri, "LDAP creating connection..."); + let (conn, mut ldap) = LdapConnAsync::new(uri.as_str()) + .await + .map_err(|e| err!(Ldap(error!(?user_dn, "LDAP connection setup error: {e}"))))?; + + let driver = self.services.server.runtime().spawn(async move { + match conn.drive().await { + | Err(e) => error!("LDAP connection error: {e}"), + | Ok(()) => debug!("LDAP connection completed."), + } + }); + + ldap.simple_bind(user_dn, password) + .await + .and_then(ldap3::LdapResult::success) + .map_err(|e| { + err!(Request(Forbidden(debug_error!("LDAP authentication error: {e}")))) + })?; + + ldap.unbind() + .await + .map_err(|e| err!(Ldap(error!("LDAP unbind error: {e}"))))?; + + driver.await.log_err().ok(); + + Ok(()) + } } pub fn parse_master_key(