Compare commits

..

1 commit

Author SHA1 Message Date
Renovate Bot
21ea3186c7 chore(deps): update rust crate ctor to 0.5.0 2025-08-18 17:33:50 +00:00
62 changed files with 408 additions and 1707 deletions

4
.envrc
View file

@ -2,8 +2,6 @@
dotenv_if_exists dotenv_if_exists
if [ -f /etc/os-release ] && grep -q '^ID=nixos' /etc/os-release; then # use flake ".#${DIRENV_DEVSHELL:-default}"
use flake ".#${DIRENV_DEVSHELL:-default}"
fi
PATH_add bin PATH_add bin

View file

@ -1,39 +0,0 @@
name: detect-runner-os
description: |
Detect the actual OS name and version of the runner.
Provides separate outputs for name, version, and a combined slug.
outputs:
name:
description: 'OS name (e.g. Ubuntu, Debian)'
value: ${{ steps.detect.outputs.name }}
version:
description: 'OS version (e.g. 22.04, 11)'
value: ${{ steps.detect.outputs.version }}
slug:
description: 'Combined OS slug (e.g. Ubuntu-22.04)'
value: ${{ steps.detect.outputs.slug }}
runs:
using: composite
steps:
- name: Detect runner OS
id: detect
shell: bash
run: |
# Detect OS version (try lsb_release first, fall back to /etc/os-release)
OS_VERSION=$(lsb_release -rs 2>/dev/null || grep VERSION_ID /etc/os-release | cut -d'"' -f2)
# Detect OS name and capitalise (try lsb_release first, fall back to /etc/os-release)
OS_NAME=$(lsb_release -is 2>/dev/null || grep "^ID=" /etc/os-release | cut -d'=' -f2 | tr -d '"' | sed 's/\b\(.\)/\u\1/g')
# Create combined slug
OS_SLUG="${OS_NAME}-${OS_VERSION}"
# Set outputs
echo "name=${OS_NAME}" >> $GITHUB_OUTPUT
echo "version=${OS_VERSION}" >> $GITHUB_OUTPUT
echo "slug=${OS_SLUG}" >> $GITHUB_OUTPUT
# Log detection results
echo "🔍 Detected Runner OS: ${OS_NAME} ${OS_VERSION}"

View file

@ -1,167 +0,0 @@
name: setup-llvm-with-apt
description: |
Set up LLVM toolchain with APT package management and smart caching.
Supports cross-compilation architectures and additional package installation.
Creates symlinks in /usr/bin: clang, clang++, lld, llvm-ar, llvm-ranlib
inputs:
dpkg-arch:
description: 'Debian architecture for cross-compilation (e.g. arm64)'
required: false
default: ''
extra-packages:
description: 'Additional APT packages to install (space-separated)'
required: false
default: ''
llvm-version:
description: 'LLVM version to install'
required: false
default: '20'
outputs:
llvm-version:
description: 'Installed LLVM version'
value: ${{ steps.configure.outputs.version }}
runs:
using: composite
steps:
- name: Detect runner OS
id: runner-os
uses: ./.forgejo/actions/detect-runner-os
- name: Configure cross-compilation architecture
if: inputs.dpkg-arch != ''
shell: bash
run: |
echo "🏗️ Adding ${{ inputs.dpkg-arch }} architecture"
sudo dpkg --add-architecture ${{ inputs.dpkg-arch }}
# Restrict default sources to amd64
sudo sed -i 's/^deb http/deb [arch=amd64] http/g' /etc/apt/sources.list
sudo sed -i 's/^deb https/deb [arch=amd64] https/g' /etc/apt/sources.list
# Add ports sources for foreign architecture
sudo tee /etc/apt/sources.list.d/${{ inputs.dpkg-arch }}.list > /dev/null <<EOF
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe multiverse
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe multiverse
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
EOF
echo "✅ Architecture ${{ inputs.dpkg-arch }} configured"
- name: Start LLVM cache group
shell: bash
run: echo "::group::📦 Restoring LLVM cache"
- name: Check for LLVM cache
id: cache
uses: https://github.com/actions/cache@v4
with:
path: |
/usr/bin/clang-*
/usr/bin/clang++-*
/usr/bin/lld-*
/usr/bin/llvm-*
/usr/lib/llvm-*/
/usr/lib/x86_64-linux-gnu/libLLVM*.so*
/usr/lib/x86_64-linux-gnu/libclang*.so*
/etc/apt/sources.list.d/archive_uri-*
/etc/apt/trusted.gpg.d/apt.llvm.org.asc
key: llvm-${{ steps.runner-os.outputs.slug }}-v${{ inputs.llvm-version }}-v3-${{ hashFiles('**/Cargo.lock', 'rust-toolchain.toml') }}
- name: End LLVM cache group
shell: bash
run: echo "::endgroup::"
- name: Check and install LLVM if needed
id: llvm-setup
shell: bash
run: |
echo "🔍 Checking for LLVM ${{ inputs.llvm-version }}..."
# Check both binaries and libraries exist
if [ -f "/usr/bin/clang-${{ inputs.llvm-version }}" ] && \
[ -f "/usr/bin/clang++-${{ inputs.llvm-version }}" ] && \
[ -f "/usr/bin/lld-${{ inputs.llvm-version }}" ] && \
([ -f "/usr/lib/x86_64-linux-gnu/libLLVM.so.${{ inputs.llvm-version }}.1" ] || \
[ -f "/usr/lib/x86_64-linux-gnu/libLLVM-${{ inputs.llvm-version }}.so.1" ] || \
[ -f "/usr/lib/llvm-${{ inputs.llvm-version }}/lib/libLLVM.so" ]); then
echo "✅ LLVM ${{ inputs.llvm-version }} found and verified"
echo "needs-install=false" >> $GITHUB_OUTPUT
else
echo "📦 LLVM ${{ inputs.llvm-version }} not found or incomplete - installing..."
echo "::group::🔧 Installing LLVM ${{ inputs.llvm-version }}"
wget -O - https://apt.llvm.org/llvm.sh | bash -s -- ${{ inputs.llvm-version }}
echo "::endgroup::"
if [ ! -f "/usr/bin/clang-${{ inputs.llvm-version }}" ]; then
echo "❌ Failed to install LLVM ${{ inputs.llvm-version }}"
exit 1
fi
echo "✅ Installed LLVM ${{ inputs.llvm-version }}"
echo "needs-install=true" >> $GITHUB_OUTPUT
fi
- name: Prepare for additional packages
if: inputs.extra-packages != ''
shell: bash
run: |
# Update APT if LLVM was cached (installer script already does apt-get update)
if [[ "${{ steps.llvm-setup.outputs.needs-install }}" != "true" ]]; then
echo "::group::📦 Running apt-get update (LLVM cached, extra packages needed)"
sudo apt-get update
echo "::endgroup::"
fi
echo "::group::📦 Installing additional packages"
- name: Install additional packages
if: inputs.extra-packages != ''
uses: https://github.com/awalsh128/cache-apt-pkgs-action@latest
with:
packages: ${{ inputs.extra-packages }}
version: 1.0
- name: End package installation group
if: inputs.extra-packages != ''
shell: bash
run: echo "::endgroup::"
- name: Configure LLVM environment
id: configure
shell: bash
run: |
echo "::group::🔧 Configuring LLVM ${{ inputs.llvm-version }} environment"
# Create symlinks
sudo ln -sf "/usr/bin/clang-${{ inputs.llvm-version }}" /usr/bin/clang
sudo ln -sf "/usr/bin/clang++-${{ inputs.llvm-version }}" /usr/bin/clang++
sudo ln -sf "/usr/bin/lld-${{ inputs.llvm-version }}" /usr/bin/lld
sudo ln -sf "/usr/bin/llvm-ar-${{ inputs.llvm-version }}" /usr/bin/llvm-ar
sudo ln -sf "/usr/bin/llvm-ranlib-${{ inputs.llvm-version }}" /usr/bin/llvm-ranlib
echo " ✓ Created symlinks"
# Setup library paths
LLVM_LIB_PATH="/usr/lib/llvm-${{ inputs.llvm-version }}/lib"
if [ -d "$LLVM_LIB_PATH" ]; then
echo "LD_LIBRARY_PATH=${LLVM_LIB_PATH}:${LD_LIBRARY_PATH:-}" >> $GITHUB_ENV
echo "LIBCLANG_PATH=${LLVM_LIB_PATH}" >> $GITHUB_ENV
echo "$LLVM_LIB_PATH" | sudo tee "/etc/ld.so.conf.d/llvm-${{ inputs.llvm-version }}.conf" > /dev/null
sudo ldconfig
echo " ✓ Configured library paths"
else
# Fallback to standard library location
if [ -d "/usr/lib/x86_64-linux-gnu" ]; then
echo "LIBCLANG_PATH=/usr/lib/x86_64-linux-gnu" >> $GITHUB_ENV
echo " ✓ Using fallback library path"
fi
fi
# Set output
echo "version=${{ inputs.llvm-version }}" >> $GITHUB_OUTPUT
echo "::endgroup::"
echo "✅ LLVM ready: $(clang --version | head -1)"

View file

@ -1,236 +0,0 @@
name: setup-rust
description: |
Set up Rust toolchain with sccache for compilation caching.
Respects rust-toolchain.toml by default or accepts explicit version override.
inputs:
cache-key-suffix:
description: 'Optional suffix for cache keys (e.g. platform identifier)'
required: false
default: ''
rust-components:
description: 'Additional Rust components to install (space-separated)'
required: false
default: ''
rust-target:
description: 'Rust target triple (e.g. x86_64-unknown-linux-gnu)'
required: false
default: ''
rust-version:
description: 'Rust version to install (e.g. nightly). Defaults to 1.87.0'
required: false
default: '1.87.0'
sccache-cache-limit:
description: 'Maximum size limit for sccache local cache (e.g. 2G, 500M)'
required: false
default: '2G'
github-token:
description: 'GitHub token for downloading sccache from GitHub releases'
required: false
default: ''
outputs:
rust-version:
description: 'Installed Rust version'
value: ${{ steps.rust-setup.outputs.version }}
runs:
using: composite
steps:
- name: Detect runner OS
id: runner-os
uses: ./.forgejo/actions/detect-runner-os
- name: Configure Cargo environment
shell: bash
run: |
# Use workspace-relative paths for better control and consistency
echo "CARGO_HOME=${{ github.workspace }}/.cargo" >> $GITHUB_ENV
echo "CARGO_TARGET_DIR=${{ github.workspace }}/target" >> $GITHUB_ENV
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> $GITHUB_ENV
echo "RUSTUP_HOME=${{ github.workspace }}/.rustup" >> $GITHUB_ENV
# Limit binstall resolution timeout to avoid GitHub rate limit delays
echo "BINSTALL_MAXIMUM_RESOLUTION_TIMEOUT=10" >> $GITHUB_ENV
# Ensure directories exist for first run
mkdir -p "${{ github.workspace }}/.cargo"
mkdir -p "${{ github.workspace }}/.sccache"
mkdir -p "${{ github.workspace }}/target"
mkdir -p "${{ github.workspace }}/.rustup"
- name: Start cache restore group
shell: bash
run: echo "::group::📦 Restoring caches (registry, toolchain, build artifacts)"
- name: Cache Cargo registry and git
id: registry-cache
uses: https://github.com/actions/cache@v4
with:
path: |
.cargo/registry/index
.cargo/registry/cache
.cargo/git/db
# Registry cache saved per workflow, restored from any workflow's cache
# Each workflow maintains its own registry that accumulates its needed crates
key: cargo-registry-${{ steps.runner-os.outputs.slug }}-${{ github.workflow }}
restore-keys: |
cargo-registry-${{ steps.runner-os.outputs.slug }}-
- name: Cache toolchain binaries
id: toolchain-cache
uses: https://github.com/actions/cache@v4
with:
path: |
.cargo/bin
.rustup/toolchains
.rustup/update-hashes
# Shared toolchain cache across all Rust versions
key: toolchain-${{ steps.runner-os.outputs.slug }}
- name: Debug GitHub token availability
shell: bash
run: |
if [ -z "${{ inputs.github-token }}" ]; then
echo "⚠️ No GitHub token provided - sccache will use fallback download method"
else
echo "✅ GitHub token provided for sccache"
fi
- name: Setup sccache
uses: https://github.com/mozilla-actions/sccache-action@v0.0.9
with:
token: ${{ inputs.github-token }}
- name: Cache build artifacts
id: build-cache
uses: https://github.com/actions/cache@v4
with:
path: |
target/**/deps
!target/**/deps/*.rlib
target/**/build
target/**/.fingerprint
target/**/incremental
target/**/*.d
/timelord/
# Build artifacts - cache per code change, restore from deps when code changes
key: >-
build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }}
restore-keys: |
build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-
- name: End cache restore group
shell: bash
run: echo "::endgroup::"
- name: Setup Rust toolchain
shell: bash
run: |
# Install rustup if not already cached
if ! command -v rustup &> /dev/null; then
echo "::group::📦 Installing rustup"
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain none
source "$CARGO_HOME/env"
echo "::endgroup::"
else
echo "✅ rustup already available"
fi
# Setup the appropriate Rust version
if [[ -n "${{ inputs.rust-version }}" ]]; then
echo "::group::📦 Setting up Rust ${{ inputs.rust-version }}"
# Set override first to prevent rust-toolchain.toml from auto-installing
rustup override set ${{ inputs.rust-version }} 2>/dev/null || true
# Check if we need to install/update the toolchain
if rustup toolchain list | grep -q "^${{ inputs.rust-version }}-"; then
rustup update ${{ inputs.rust-version }}
else
rustup toolchain install ${{ inputs.rust-version }} --profile minimal -c cargo,clippy,rustfmt
fi
else
echo "::group::📦 Setting up Rust from rust-toolchain.toml"
rustup show
fi
echo "::endgroup::"
- name: Configure PATH and install tools
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.github-token }}
run: |
# Add .cargo/bin to PATH permanently for all subsequent steps
echo "${{ github.workspace }}/.cargo/bin" >> $GITHUB_PATH
# For this step only, we need to add it to PATH since GITHUB_PATH takes effect in the next step
export PATH="${{ github.workspace }}/.cargo/bin:$PATH"
# Install cargo-binstall for fast binary installations
if command -v cargo-binstall &> /dev/null; then
echo "✅ cargo-binstall already available"
else
echo "::group::📦 Installing cargo-binstall"
curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
echo "::endgroup::"
fi
if command -v prek &> /dev/null; then
echo "✅ prek already available"
else
echo "::group::📦 Installing prek"
# prek isn't regularly published to crates.io, so we use git source
cargo-binstall -y --no-symlinks --git https://github.com/j178/prek prek
echo "::endgroup::"
fi
if command -v timelord &> /dev/null; then
echo "✅ timelord already available"
else
echo "::group::📦 Installing timelord"
cargo-binstall -y --no-symlinks timelord-cli
echo "::endgroup::"
fi
- name: Configure sccache environment
shell: bash
run: |
echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV
echo "CMAKE_C_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "CMAKE_CXX_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "CMAKE_CUDA_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV
# Configure incremental compilation GC
# If we restored from old cache (partial hit), clean up aggressively
if [[ "${{ steps.build-cache.outputs.cache-hit }}" != "true" ]]; then
echo "♻️ Partial cache hit - enabling cache cleanup"
echo "CARGO_INCREMENTAL_GC_THRESHOLD=5" >> $GITHUB_ENV
fi
- name: Install Rust components
if: inputs.rust-components != ''
shell: bash
run: |
echo "📦 Installing components: ${{ inputs.rust-components }}"
rustup component add ${{ inputs.rust-components }}
- name: Install Rust target
if: inputs.rust-target != ''
shell: bash
run: |
echo "📦 Installing target: ${{ inputs.rust-target }}"
rustup target add ${{ inputs.rust-target }}
- name: Output version and summary
id: rust-setup
shell: bash
run: |
RUST_VERSION=$(rustc --version | cut -d' ' -f2)
echo "version=$RUST_VERSION" >> $GITHUB_OUTPUT
echo "📋 Setup complete:"
echo " Rust: $(rustc --version)"
echo " Cargo: $(cargo --version)"
echo " prek: $(prek --version 2>/dev/null || echo 'installed')"
echo " timelord: $(timelord --version 2>/dev/null || echo 'installed')"

View file

@ -2,6 +2,7 @@ name: Checks / Prek
on: on:
push: push:
pull_request:
permissions: permissions:
contents: read contents: read
@ -16,64 +17,18 @@ jobs:
with: with:
persist-credentials: false persist-credentials: false
- name: Setup Rust nightly - name: Install uv
uses: ./.forgejo/actions/setup-rust uses: https://github.com/astral-sh/setup-uv@v6
with: with:
rust-version: nightly enable-cache: true
github-token: ${{ secrets.GH_PUBLIC_RO }} ignore-nothing-to-cache: true
cache-dependency-glob: ''
- name: Run prek - name: Run prek
run: | run: |
prek run \ uvx prek run \
--all-files \ --all-files \
--hook-stage manual \ --hook-stage manual \
--show-diff-on-failure \ --show-diff-on-failure \
--color=always \ --color=always \
-v -v
- name: Check Rust formatting
run: |
cargo +nightly fmt --all -- --check && \
echo "✅ Formatting check passed" || \
exit 1
clippy-and-tests:
name: Clippy and Cargo Tests
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup LLVM
uses: ./.forgejo/actions/setup-llvm-with-apt
with:
extra-packages: liburing-dev liburing2
- name: Setup Rust with caching
uses: ./.forgejo/actions/setup-rust
with:
github-token: ${{ secrets.GH_PUBLIC_RO }}
- name: Run Clippy lints
run: |
cargo clippy \
--workspace \
--features full \
--locked \
--no-deps \
--profile test \
-- \
-D warnings
- name: Run Cargo tests
run: |
cargo test \
--workspace \
--features full \
--locked \
--profile test \
--all-targets \
--no-fail-fast

View file

@ -0,0 +1,144 @@
name: Checks / Rust
on:
push:
jobs:
format:
name: Format
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
with:
toolchain: "nightly"
components: "rustfmt"
- name: Check formatting
run: |
cargo +nightly fmt --all -- --check
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
- uses: https://github.com/actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
github-api-url: https://api.github.com
owner: ${{ vars.GH_APP_OWNER }}
repositories: ""
- name: Install sccache
uses: ./.forgejo/actions/sccache
with:
token: ${{ steps.app-token.outputs.token }}
- run: sudo apt-get update
- name: Install system dependencies
uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1
with:
packages: clang liburing-dev
version: 1
- name: Cache Rust registry
uses: actions/cache@v3
with:
path: |
~/.cargo/git
!~/.cargo/git/checkouts
~/.cargo/registry
!~/.cargo/registry/src
key: rust-registry-${{hashFiles('**/Cargo.lock') }}
- name: Timelord
uses: ./.forgejo/actions/timelord
with:
key: sccache-v0
path: .
- name: Clippy
run: |
cargo clippy \
--workspace \
--features full \
--locked \
--no-deps \
--profile test \
-- \
-D warnings
- name: Show sccache stats
if: always()
run: sccache --show-stats
cargo-test:
name: Cargo Test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
- uses: https://github.com/actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
github-api-url: https://api.github.com
owner: ${{ vars.GH_APP_OWNER }}
repositories: ""
- name: Install sccache
uses: ./.forgejo/actions/sccache
with:
token: ${{ steps.app-token.outputs.token }}
- run: sudo apt-get update
- name: Install system dependencies
uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1
with:
packages: clang liburing-dev
version: 1
- name: Cache Rust registry
uses: actions/cache@v3
with:
path: |
~/.cargo/git
!~/.cargo/git/checkouts
~/.cargo/registry
!~/.cargo/registry/src
key: rust-registry-${{hashFiles('**/Cargo.lock') }}
- name: Timelord
uses: ./.forgejo/actions/timelord
with:
key: sccache-v0
path: .
- name: Cargo Test
run: |
cargo test \
--workspace \
--features full \
--locked \
--profile test \
--all-targets \
--no-fail-fast
- name: Show sccache stats
if: always()
run: sccache --show-stats

413
Cargo.lock generated
View file

@ -126,7 +126,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -183,7 +183,7 @@ dependencies = [
"rustc-hash 2.1.1", "rustc-hash 2.1.1",
"serde", "serde",
"serde_derive", "serde_derive",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -198,45 +198,6 @@ dependencies = [
"winnow", "winnow",
] ]
[[package]]
name = "asn1-rs"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0"
dependencies = [
"asn1-rs-derive",
"asn1-rs-impl",
"displaydoc",
"nom",
"num-traits",
"rusticata-macros",
"thiserror 1.0.69",
"time",
]
[[package]]
name = "asn1-rs-derive"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"synstructure 0.12.6",
]
[[package]]
name = "asn1-rs-impl"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]] [[package]]
name = "assign" name = "assign"
version = "1.1.1" version = "1.1.1"
@ -289,7 +250,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -300,7 +261,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -472,11 +433,11 @@ dependencies = [
"hyper", "hyper",
"hyper-util", "hyper-util",
"pin-project-lite", "pin-project-lite",
"rustls 0.23.29", "rustls",
"rustls-pemfile 2.2.0", "rustls-pemfile",
"rustls-pki-types", "rustls-pki-types",
"tokio", "tokio",
"tokio-rustls 0.26.2", "tokio-rustls",
"tower-service", "tower-service",
] ]
@ -491,9 +452,9 @@ dependencies = [
"http", "http",
"http-body-util", "http-body-util",
"pin-project", "pin-project",
"rustls 0.23.29", "rustls",
"tokio", "tokio",
"tokio-rustls 0.26.2", "tokio-rustls",
"tokio-util", "tokio-util",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
@ -560,7 +521,7 @@ dependencies = [
"regex", "regex",
"rustc-hash 1.1.0", "rustc-hash 1.1.0",
"shlex", "shlex",
"syn 2.0.104", "syn",
"which", "which",
] ]
@ -579,7 +540,7 @@ dependencies = [
"regex", "regex",
"rustc-hash 2.1.1", "rustc-hash 2.1.1",
"shlex", "shlex",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -833,7 +794,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1011,7 +972,7 @@ dependencies = [
"rand 0.8.5", "rand 0.8.5",
"regex", "regex",
"reqwest", "reqwest",
"ring 0.17.14", "ring",
"ruma", "ruma",
"sanitize-filename", "sanitize-filename",
"serde", "serde",
@ -1058,7 +1019,7 @@ dependencies = [
"itertools 0.14.0", "itertools 0.14.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1083,7 +1044,7 @@ dependencies = [
"hyper-util", "hyper-util",
"log", "log",
"ruma", "ruma",
"rustls 0.23.29", "rustls",
"sd-notify", "sd-notify",
"sentry", "sentry",
"sentry-tower", "sentry-tower",
@ -1113,7 +1074,6 @@ dependencies = [
"image", "image",
"ipaddress", "ipaddress",
"itertools 0.14.0", "itertools 0.14.0",
"ldap3",
"log", "log",
"loole", "loole",
"lru-cache", "lru-cache",
@ -1223,16 +1183,6 @@ dependencies = [
"crossterm", "crossterm",
] ]
[[package]]
name = "core-foundation"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "core-foundation" name = "core-foundation"
version = "0.10.1" version = "0.10.1"
@ -1306,7 +1256,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"strict", "strict",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1416,7 +1366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
dependencies = [ dependencies = [
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1443,7 +1393,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1484,20 +1434,6 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "der-parser"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e"
dependencies = [
"asn1-rs",
"displaydoc",
"nom",
"num-bigint",
"num-traits",
"rusticata-macros",
]
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.4.0" version = "0.4.0"
@ -1525,7 +1461,7 @@ dependencies = [
"convert_case", "convert_case",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1547,7 +1483,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1608,7 +1544,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1628,7 +1564,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -1798,7 +1734,6 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
"futures-executor",
"futures-io", "futures-io",
"futures-sink", "futures-sink",
"futures-task", "futures-task",
@ -1846,7 +1781,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -2095,7 +2030,7 @@ dependencies = [
"ipnet", "ipnet",
"once_cell", "once_cell",
"rand 0.9.2", "rand 0.9.2",
"ring 0.17.14", "ring",
"serde", "serde",
"thiserror 2.0.12", "thiserror 2.0.12",
"tinyvec", "tinyvec",
@ -2187,7 +2122,7 @@ dependencies = [
"markup5ever", "markup5ever",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -2281,11 +2216,11 @@ dependencies = [
"http", "http",
"hyper", "hyper",
"hyper-util", "hyper-util",
"rustls 0.23.29", "rustls",
"rustls-native-certs 0.8.1", "rustls-native-certs",
"rustls-pki-types", "rustls-pki-types",
"tokio", "tokio",
"tokio-rustls 0.26.2", "tokio-rustls",
"tower-service", "tower-service",
"webpki-roots 1.0.2", "webpki-roots 1.0.2",
] ]
@ -2509,7 +2444,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -2678,7 +2613,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"regex", "regex",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -2693,43 +2628,6 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lber"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2df7f9fd9f64cf8f59e1a4a0753fe7d575a5b38d3d7ac5758dcee9357d83ef0a"
dependencies = [
"bytes",
"nom",
]
[[package]]
name = "ldap3"
version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166199a8207874a275144c8a94ff6eed5fcbf5c52303e4d9b4d53a0c7ac76554"
dependencies = [
"async-trait",
"bytes",
"futures",
"futures-util",
"lazy_static",
"lber",
"log",
"nom",
"percent-encoding",
"ring 0.16.20",
"rustls 0.21.12",
"rustls-native-certs 0.6.3",
"thiserror 1.0.69",
"tokio",
"tokio-rustls 0.24.1",
"tokio-stream",
"tokio-util",
"url",
"x509-parser",
]
[[package]] [[package]]
name = "lebe" name = "lebe"
version = "0.5.2" version = "0.5.2"
@ -2968,7 +2866,7 @@ checksum = "a9882ef5c56df184b8ffc107fc6c61e33ee3a654b021961d790a78571bb9d67a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3134,7 +3032,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3196,15 +3094,6 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "oid-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff"
dependencies = [
"asn1-rs",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.21.3" version = "1.21.3"
@ -3395,7 +3284,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"proc-macro2-diagnostics", "proc-macro2-diagnostics",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3469,7 +3358,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3569,7 +3458,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2" checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3598,7 +3487,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
"version_check", "version_check",
"yansi", "yansi",
] ]
@ -3619,7 +3508,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b"
dependencies = [ dependencies = [
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3642,7 +3531,7 @@ dependencies = [
"itertools 0.14.0", "itertools 0.14.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -3708,7 +3597,7 @@ dependencies = [
"quinn-proto", "quinn-proto",
"quinn-udp", "quinn-udp",
"rustc-hash 2.1.1", "rustc-hash 2.1.1",
"rustls 0.23.29", "rustls",
"socket2", "socket2",
"thiserror 2.0.12", "thiserror 2.0.12",
"tokio", "tokio",
@ -3726,9 +3615,9 @@ dependencies = [
"getrandom 0.3.3", "getrandom 0.3.3",
"lru-slab", "lru-slab",
"rand 0.9.2", "rand 0.9.2",
"ring 0.17.14", "ring",
"rustc-hash 2.1.1", "rustc-hash 2.1.1",
"rustls 0.23.29", "rustls",
"rustls-pki-types", "rustls-pki-types",
"slab", "slab",
"thiserror 2.0.12", "thiserror 2.0.12",
@ -3987,16 +3876,16 @@ dependencies = [
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"quinn", "quinn",
"rustls 0.23.29", "rustls",
"rustls-native-certs 0.8.1", "rustls-native-certs",
"rustls-pemfile 2.2.0", "rustls-pemfile",
"rustls-pki-types", "rustls-pki-types",
"serde", "serde",
"serde_json", "serde_json",
"serde_urlencoded", "serde_urlencoded",
"sync_wrapper", "sync_wrapper",
"tokio", "tokio",
"tokio-rustls 0.26.2", "tokio-rustls",
"tokio-socks", "tokio-socks",
"tokio-util", "tokio-util",
"tower 0.5.2", "tower 0.5.2",
@ -4020,21 +3909,6 @@ version = "0.8.52"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce"
[[package]]
name = "ring"
version = "0.16.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
dependencies = [
"cc",
"libc",
"once_cell",
"spin",
"untrusted 0.7.1",
"web-sys",
"winapi",
]
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.17.14" version = "0.17.14"
@ -4045,7 +3919,7 @@ dependencies = [
"cfg-if", "cfg-if",
"getrandom 0.2.16", "getrandom 0.2.16",
"libc", "libc",
"untrusted 0.9.0", "untrusted",
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
@ -4219,7 +4093,7 @@ dependencies = [
"quote", "quote",
"ruma-identifiers-validation", "ruma-identifiers-validation",
"serde", "serde",
"syn 2.0.104", "syn",
"toml", "toml",
] ]
@ -4304,15 +4178,6 @@ dependencies = [
"semver", "semver",
] ]
[[package]]
name = "rusticata-macros"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632"
dependencies = [
"nom",
]
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.44" version = "0.38.44"
@ -4339,18 +4204,6 @@ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.60.2",
] ]
[[package]]
name = "rustls"
version = "0.21.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
"ring 0.17.14",
"rustls-webpki 0.101.7",
"sct",
]
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.23.29" version = "0.23.29"
@ -4360,25 +4213,13 @@ dependencies = [
"aws-lc-rs", "aws-lc-rs",
"log", "log",
"once_cell", "once_cell",
"ring 0.17.14", "ring",
"rustls-pki-types", "rustls-pki-types",
"rustls-webpki 0.103.4", "rustls-webpki",
"subtle", "subtle",
"zeroize", "zeroize",
] ]
[[package]]
name = "rustls-native-certs"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
dependencies = [
"openssl-probe",
"rustls-pemfile 1.0.4",
"schannel",
"security-framework 2.11.1",
]
[[package]] [[package]]
name = "rustls-native-certs" name = "rustls-native-certs"
version = "0.8.1" version = "0.8.1"
@ -4388,16 +4229,7 @@ dependencies = [
"openssl-probe", "openssl-probe",
"rustls-pki-types", "rustls-pki-types",
"schannel", "schannel",
"security-framework 3.2.0", "security-framework",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
"base64 0.21.7",
] ]
[[package]] [[package]]
@ -4419,16 +4251,6 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
"ring 0.17.14",
"untrusted 0.9.0",
]
[[package]] [[package]]
name = "rustls-webpki" name = "rustls-webpki"
version = "0.103.4" version = "0.103.4"
@ -4436,9 +4258,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc"
dependencies = [ dependencies = [
"aws-lc-rs", "aws-lc-rs",
"ring 0.17.14", "ring",
"rustls-pki-types", "rustls-pki-types",
"untrusted 0.9.0", "untrusted",
] ]
[[package]] [[package]]
@ -4497,16 +4319,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring 0.17.14",
"untrusted 0.9.0",
]
[[package]] [[package]]
name = "sd-notify" name = "sd-notify"
version = "0.4.5" version = "0.4.5"
@ -4516,19 +4328,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "security-framework"
version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
dependencies = [
"bitflags 2.9.1",
"core-foundation 0.9.4",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "3.2.0" version = "3.2.0"
@ -4536,7 +4335,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"core-foundation 0.10.1", "core-foundation",
"core-foundation-sys", "core-foundation-sys",
"libc", "libc",
"security-framework-sys", "security-framework-sys",
@ -4566,7 +4365,7 @@ checksum = "255914a8e53822abd946e2ce8baa41d4cded6b8e938913b7f7b9da5b7ab44335"
dependencies = [ dependencies = [
"httpdate", "httpdate",
"reqwest", "reqwest",
"rustls 0.23.29", "rustls",
"sentry-backtrace", "sentry-backtrace",
"sentry-contexts", "sentry-contexts",
"sentry-core", "sentry-core",
@ -4710,7 +4509,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -4924,12 +4723,6 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]] [[package]]
name = "spki" name = "spki"
version = "0.7.3" version = "0.7.3"
@ -4998,17 +4791,6 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.104" version = "2.0.104"
@ -5029,18 +4811,6 @@ dependencies = [
"futures-core", "futures-core",
] ]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]] [[package]]
name = "synstructure" name = "synstructure"
version = "0.13.2" version = "0.13.2"
@ -5049,7 +4819,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -5140,7 +4910,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -5151,7 +4921,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -5318,7 +5088,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -5333,23 +5103,13 @@ dependencies = [
"tokio-stream", "tokio-stream",
] ]
[[package]]
name = "tokio-rustls"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls 0.21.12",
"tokio",
]
[[package]] [[package]]
name = "tokio-rustls" name = "tokio-rustls"
version = "0.26.2" version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
dependencies = [ dependencies = [
"rustls 0.23.29", "rustls",
"tokio", "tokio",
] ]
@ -5547,7 +5307,7 @@ source = "git+https://forgejo.ellis.link/continuwuation/tracing?rev=1e64095a8051
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -5701,24 +5461,12 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
[[package]]
name = "unicode-xid"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]] [[package]]
name = "unsafe-libyaml" name = "unsafe-libyaml"
version = "0.2.11" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]] [[package]]
name = "untrusted" name = "untrusted"
version = "0.9.0" version = "0.9.0"
@ -5734,7 +5482,7 @@ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"log", "log",
"once_cell", "once_cell",
"rustls 0.23.29", "rustls",
"rustls-pki-types", "rustls-pki-types",
"url", "url",
"webpki-roots 0.26.11", "webpki-roots 0.26.11",
@ -5869,7 +5617,7 @@ dependencies = [
"log", "log",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -5904,7 +5652,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -6084,7 +5832,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -6095,7 +5843,7 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -6416,23 +6164,6 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "x509-parser"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7069fba5b66b9193bd2c5d3d4ff12b839118f6bcbef5328efafafb5395cf63da"
dependencies = [
"asn1-rs",
"data-encoding",
"der-parser",
"lazy_static",
"nom",
"oid-registry",
"rusticata-macros",
"thiserror 1.0.69",
"time",
]
[[package]] [[package]]
name = "xml5ever" name = "xml5ever"
version = "0.18.1" version = "0.18.1"
@ -6490,8 +6221,8 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
"synstructure 0.13.2", "synstructure",
] ]
[[package]] [[package]]
@ -6511,7 +6242,7 @@ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]
@ -6531,8 +6262,8 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
"synstructure 0.13.2", "synstructure",
] ]
[[package]] [[package]]
@ -6571,7 +6302,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.104", "syn",
] ]
[[package]] [[package]]

View file

@ -546,11 +546,6 @@ features = ["std"]
[workspace.dependencies.maplit] [workspace.dependencies.maplit]
version = "1.0.2" version = "1.0.2"
[workspace.dependencies.ldap3]
version = "0.11.5"
default-features = false
features = ["sync", "tls-rustls"]
# #
# Patches # Patches
# #
@ -872,7 +867,7 @@ unused-qualifications = "warn"
#unused-results = "warn" # TODO #unused-results = "warn" # TODO
## some sadness ## some sadness
mismatched_lifetime_syntaxes = "allow" # TODO! elided_named_lifetimes = "allow" # TODO!
let_underscore_drop = "allow" let_underscore_drop = "allow"
missing_docs = "allow" missing_docs = "allow"
# cfgs cannot be limited to expected cfgs or their de facto non-transitive/opt-in use-case e.g. # cfgs cannot be limited to expected cfgs or their de facto non-transitive/opt-in use-case e.g.
@ -1011,6 +1006,3 @@ literal_string_with_formatting_args = { level = "allow", priority = 1 }
needless_raw_string_hashes = "allow" needless_raw_string_hashes = "allow"
# TODO: Enable this lint & fix all instances
collapsible_if = "allow"

View file

@ -1696,10 +1696,6 @@
# #
#config_reload_signal = true #config_reload_signal = true
# This item is undocumented. Please contribute documentation for it.
#
#ldap = false
[global.tls] [global.tls]
# Path to a valid TLS certificate file. # Path to a valid TLS certificate file.
@ -1778,91 +1774,3 @@
# is 33.55MB. Setting it to 0 disables blurhashing. # is 33.55MB. Setting it to 0 disables blurhashing.
# #
#blurhash_max_raw_size = 33554432 #blurhash_max_raw_size = 33554432
[global.ldap]
# Whether to enable LDAP login.
#
# example: "true"
#
#enable = false
# Whether to force LDAP authentication or authorize classical password
# login.
#
# example: "true"
#
#ldap_only = false
# URI of the LDAP server.
#
# example: "ldap://ldap.example.com:389"
#
#uri = ""
# Root of the searches.
#
# example: "ou=users,dc=example,dc=org"
#
#base_dn = ""
# Bind DN if anonymous search is not enabled.
#
# You can use the variable `{username}` that will be replaced by the
# entered username. In such case, the password used to bind will be the
# one provided for the login and not the one given by
# `bind_password_file`. Beware: automatically granting admin rights will
# not work if you use this direct bind instead of a LDAP search.
#
# example: "cn=ldap-reader,dc=example,dc=org" or
# "cn={username},ou=users,dc=example,dc=org"
#
#bind_dn = ""
# Path to a file on the system that contains the password for the
# `bind_dn`.
#
# The server must be able to access the file, and it must not be empty.
#
#bind_password_file = ""
# Search filter to limit user searches.
#
# You can use the variable `{username}` that will be replaced by the
# entered username for more complex filters.
#
# example: "(&(objectClass=person)(memberOf=matrix))"
#
#filter = "(objectClass=*)"
# Attribute to use to uniquely identify the user.
#
# example: "uid" or "cn"
#
#uid_attribute = "uid"
# Attribute containing the display name of the user.
#
# example: "givenName" or "sn"
#
#name_attribute = "givenName"
# Root of the searches for admin users.
#
# Defaults to `base_dn` if empty.
#
# example: "ou=admins,dc=example,dc=org"
#
#admin_base_dn = ""
# The LDAP search filter to find administrative users for continuwuity.
#
# If left blank, administrative state must be configured manually for each
# user.
#
# You can use the variable `{username}` that will be replaced by the
# entered username for more complex filters.
#
# example: "(objectClass=conduwuitAdmin)" or "(uid={username})"
#
#admin_filter = ""

View file

@ -1,68 +0,0 @@
[Unit]
Description=Continuwuity - Matrix homeserver
Documentation=https://continuwuity.org/
Wants=network-online.target
After=network-online.target
Alias=matrix-conduwuit.service
[Service]
DynamicUser=yes
User=conduwuit
Group=conduwuit
Type=notify
Environment="CONTINUWUITY_CONFIG=/etc/conduwuit/conduwuit.toml"
Environment="CONTINUWUITY_LOG_TO_JOURNALD=true"
Environment="CONTINUWUITY_JOURNALD_IDENTIFIER=%N"
ExecStart=/usr/bin/conduwuit
AmbientCapabilities=
CapabilityBoundingSet=
DevicePolicy=closed
LockPersonality=yes
MemoryDenyWriteExecute=yes
NoNewPrivileges=yes
#ProcSubset=pid
ProtectClock=yes
ProtectControlGroups=yes
ProtectHome=yes
ProtectHostname=yes
ProtectKernelLogs=yes
ProtectKernelModules=yes
ProtectKernelTunables=yes
ProtectProc=invisible
ProtectSystem=strict
PrivateDevices=yes
PrivateMounts=yes
PrivateTmp=yes
PrivateUsers=yes
PrivateIPC=yes
RemoveIPC=yes
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
RestrictNamespaces=yes
RestrictRealtime=yes
RestrictSUIDSGID=yes
SystemCallArchitectures=native
SystemCallFilter=@system-service @resources
SystemCallFilter=~@clock @debug @module @mount @reboot @swap @cpu-emulation @obsolete @timer @chown @setuid @privileged @keyring @ipc
SystemCallErrorNumber=EPERM
StateDirectory=conduwuit
ConfigurationDirectory=conduwuit
RuntimeDirectory=conduwuit
RuntimeDirectoryMode=0750
Restart=on-failure
RestartSec=5
TimeoutStopSec=2m
TimeoutStartSec=2m
StartLimitInterval=1m
StartLimitBurst=5
[Install]
WantedBy=multi-user.target

View file

@ -1,80 +0,0 @@
# This should be run using rpkg-util: https://docs.pagure.org/rpkg-util
# it requires Internet access and is not suitable for Fedora main repos
# TODO: rpkg-util is no longer maintained, find a replacement
Name: continuwuity
Version: {{{ git_repo_version }}}
Release: 1%{?dist}
Summary: Very cool Matrix chat homeserver written in Rust
License: Apache-2.0 AND MIT
URL: https://continuwuity.org
VCS: {{{ git_repo_vcs }}}
Source: {{{ git_repo_pack }}}
BuildRequires: cargo-rpm-macros >= 25
BuildRequires: systemd-rpm-macros
# Needed to build rust-librocksdb-sys
BuildRequires: clang
BuildRequires: liburing-devel
Requires: liburing
Requires: glibc
Requires: libstdc++
%global _description %{expand:
A cool hard fork of Conduit, a Matrix homeserver written in Rust}
%description %{_description}
%prep
{{{ git_repo_setup_macro }}}
%cargo_prep -N
# Perform an online build so Git dependencies can be retrieved
sed -i 's/^offline = true$//' .cargo/config.toml
%build
%cargo_build
# Here's the one legally required mystery incantation in this file.
# Some of our dependencies have source files which are (for some reason) marked as excutable.
# Files in .cargo/registry/ are copied into /usr/src/ by the debuginfo machinery
# at the end of the build step, and then the BRP shebang mangling script checks
# the entire buildroot to find executable files, and fails the build because
# it thinks Rust's file attributes are shebangs because they start with `#!`.
# So we have to clear the executable bit on all of them before that happens.
find .cargo/registry/ -executable -name "*.rs" -exec chmod -x {} +
# TODO: this fails currently because it's forced to run in offline mode
# {cargo_license -- --no-dev} > LICENSE.dependencies
%install
install -Dpm0755 target/rpm/conduwuit -t %{buildroot}%{_bindir}
install -Dpm0644 fedora/conduwuit.service -t %{buildroot}%{_unitdir}
install -Dpm0644 conduwuit-example.toml %{buildroot}%{_sysconfdir}/conduwuit/conduwuit.toml
%files
%license LICENSE
%license src/core/matrix/state_res/LICENSE
%doc CODE_OF_CONDUCT.md
%doc CONTRIBUTING.md
%doc README.md
%doc SECURITY.md
%config %{_sysconfdir}/conduwuit/conduwuit.toml
%{_bindir}/conduwuit
%{_unitdir}/conduwuit.service
# Do not create /var/lib/conduwuit, systemd will create it if necessary
%post
%systemd_post conduwuit.service
%preun
%systemd_preun conduwuit.service
%postun
%systemd_postun_with_restart conduwuit.service
%changelog
{{{ git_repo_changelog }}}

24
flake.lock generated
View file

@ -153,11 +153,11 @@
"rust-analyzer-src": "rust-analyzer-src" "rust-analyzer-src": "rust-analyzer-src"
}, },
"locked": { "locked": {
"lastModified": 1755585599, "lastModified": 1751525020,
"narHash": "sha256-tl/0cnsqB/Yt7DbaGMel2RLa7QG5elA8lkaOXli6VdY=", "narHash": "sha256-oDO6lCYS5Bf4jUITChj9XV7k3TP38DE0Ckz5n5ORCME=",
"owner": "nix-community", "owner": "nix-community",
"repo": "fenix", "repo": "fenix",
"rev": "6ed03ef4c8ec36d193c18e06b9ecddde78fb7e42", "rev": "a1a5f92f47787e7df9f30e5e5ac13e679215aa1e",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -516,16 +516,16 @@
"rocksdb": { "rocksdb": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1753385396, "lastModified": 1741308171,
"narHash": "sha256-/Hvy1yTH/0D5aa7bc+/uqFugCQq4InTdwlRw88vA5IY=", "narHash": "sha256-YdBvdQ75UJg5ffwNjxizpviCVwVDJnBkM8ZtGIduMgY=",
"ref": "10.4.fb", "ref": "v9.11.1",
"rev": "28d4b7276c16ed3e28af1bd96162d6442ce25923", "rev": "3ce04794bcfbbb0d2e6f81ae35fc4acf688b6986",
"revCount": 13318, "revCount": 13177,
"type": "git", "type": "git",
"url": "https://forgejo.ellis.link/continuwuation/rocksdb" "url": "https://forgejo.ellis.link/continuwuation/rocksdb"
}, },
"original": { "original": {
"ref": "10.4.fb", "ref": "v9.11.1",
"type": "git", "type": "git",
"url": "https://forgejo.ellis.link/continuwuation/rocksdb" "url": "https://forgejo.ellis.link/continuwuation/rocksdb"
} }
@ -546,11 +546,11 @@
"rust-analyzer-src": { "rust-analyzer-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1755504847, "lastModified": 1751433876,
"narHash": "sha256-VX0B9hwhJypCGqncVVLC+SmeMVd/GAYbJZ0MiiUn2Pk=", "narHash": "sha256-IsdwOcvLLDDlkFNwhdD5BZy20okIQL01+UQ7Kxbqh8s=",
"owner": "rust-lang", "owner": "rust-lang",
"repo": "rust-analyzer", "repo": "rust-analyzer",
"rev": "a905e3b21b144d77e1b304e49f3264f6f8d4db75", "rev": "11d45c881389dae90b0da5a94cde52c79d0fc7ef",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -17,7 +17,7 @@
nix-filter.url = "github:numtide/nix-filter?ref=main"; nix-filter.url = "github:numtide/nix-filter?ref=main";
nixpkgs.url = "github:NixOS/nixpkgs?ref=nixpkgs-unstable"; nixpkgs.url = "github:NixOS/nixpkgs?ref=nixpkgs-unstable";
rocksdb = { rocksdb = {
url = "git+https://forgejo.ellis.link/continuwuation/rocksdb?ref=10.4.fb"; url = "git+https://forgejo.ellis.link/continuwuation/rocksdb?ref=v9.11.1";
flake = false; flake = false;
}; };
}; };
@ -31,17 +31,13 @@
inherit system; inherit system;
}; };
fnx = inputs.fenix.packages.${system};
# The Rust toolchain to use # The Rust toolchain to use
toolchain = fnx.combine [ toolchain = inputs.fenix.packages.${system}.fromToolchainFile {
(fnx.fromToolchainFile { file = ./rust-toolchain.toml;
file = ./rust-toolchain.toml;
# See also `rust-toolchain.toml` # See also `rust-toolchain.toml`
sha256 = "sha256-+9FmLhAOezBZCOziO0Qct1NOrfpjNsXxc/8I0c7BdKE="; sha256 = "sha256-KUm16pHj+cRedf8vxs/Hd2YWxpOrWZ7UOrwhILdSJBU=";
}) };
fnx.complete.rustfmt
];
mkScope = mkScope =
pkgs: pkgs:
@ -66,7 +62,7 @@
}).overrideAttrs }).overrideAttrs
(old: { (old: {
src = inputs.rocksdb; src = inputs.rocksdb;
version = "v10.4.fb"; version = "v9.11.1";
cmakeFlags = cmakeFlags =
pkgs.lib.subtractLists [ pkgs.lib.subtractLists [
# No real reason to have snappy or zlib, no one uses this # No real reason to have snappy or zlib, no one uses this

View file

@ -13,8 +13,8 @@
"enabled": true "enabled": true
}, },
"labels": [ "labels": [
"Dependencies", "dependencies",
"Dependencies/Renovate" "github_actions"
], ],
"ignoreDeps": [ "ignoreDeps": [
"tikv-jemallocator", "tikv-jemallocator",

View file

@ -9,16 +9,13 @@
# If you're having trouble making the relevant changes, bug a maintainer. # If you're having trouble making the relevant changes, bug a maintainer.
[toolchain] [toolchain]
channel = "1.87.0"
profile = "minimal" profile = "minimal"
channel = "1.89.0"
components = [ components = [
# For rust-analyzer # For rust-analyzer
"rust-src", "rust-src",
"rust-analyzer", "rust-analyzer",
# For CI and editors # For CI and editors
"rustfmt",
"clippy", "clippy",
# you have to install rustfmt nightly yourself (if you're not on NixOS)
#
# The rust-toolchain.toml file doesn't provide any syntax for specifying components from different toolchains
# "rustfmt"
] ]

View file

@ -68,8 +68,7 @@ pub(super) async fn create_user(&self, username: String, password: Option<String
// Create user // Create user
self.services self.services
.users .users
.create(&user_id, Some(password.as_str()), None) .create(&user_id, Some(password.as_str()))?;
.await?;
// Default to pretty displayname // Default to pretty displayname
let mut displayname = user_id.localpart().to_owned(); let mut displayname = user_id.localpart().to_owned();
@ -285,7 +284,6 @@ pub(super) async fn reset_password(&self, username: String, password: Option<Str
.services .services
.users .users
.set_password(&user_id, Some(new_password.as_str())) .set_password(&user_id, Some(new_password.as_str()))
.await
{ {
| Err(e) => return Err!("Couldn't reset the password for user {user_id}: {e}"), | Err(e) => return Err!("Couldn't reset the password for user {user_id}: {e}"),
| Ok(()) => { | Ok(()) => {

View file

@ -49,9 +49,6 @@ jemalloc_stats = [
"conduwuit-core/jemalloc_stats", "conduwuit-core/jemalloc_stats",
"conduwuit-service/jemalloc_stats", "conduwuit-service/jemalloc_stats",
] ]
ldap = [
"conduwuit-service/ldap"
]
release_max_log_level = [ release_max_log_level = [
"conduwuit-core/release_max_log_level", "conduwuit-core/release_max_log_level",
"conduwuit-service/release_max_log_level", "conduwuit-service/release_max_log_level",

View file

@ -373,7 +373,7 @@ pub(crate) async fn register_route(
let password = if is_guest { None } else { body.password.as_deref() }; let password = if is_guest { None } else { body.password.as_deref() };
// Create user // Create user
services.users.create(&user_id, password, None).await?; services.users.create(&user_id, password)?;
// Default to pretty displayname // Default to pretty displayname
let mut displayname = user_id.localpart().to_owned(); let mut displayname = user_id.localpart().to_owned();
@ -659,8 +659,7 @@ pub(crate) async fn change_password_route(
services services
.users .users
.set_password(sender_user, Some(&body.new_password)) .set_password(sender_user, Some(&body.new_password))?;
.await?;
if body.logout_devices { if body.logout_devices {
// Logout all devices except the current one // Logout all devices except the current one

View file

@ -90,7 +90,7 @@ pub(crate) async fn get_displayname_route(
.await .await
{ {
if !services.users.exists(&body.user_id).await { if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?; services.users.create(&body.user_id, None)?;
} }
services services
@ -189,7 +189,7 @@ pub(crate) async fn get_avatar_url_route(
.await .await
{ {
if !services.users.exists(&body.user_id).await { if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?; services.users.create(&body.user_id, None)?;
} }
services services
@ -248,7 +248,7 @@ pub(crate) async fn get_profile_route(
.await .await
{ {
if !services.users.exists(&body.user_id).await { if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?; services.users.create(&body.user_id, None)?;
} }
services services

View file

@ -3,14 +3,13 @@ use std::time::Duration;
use axum::extract::State; use axum::extract::State;
use axum_client_ip::InsecureClientIp; use axum_client_ip::InsecureClientIp;
use conduwuit::{ use conduwuit::{
Err, Error, Result, debug, err, info, Err, Error, Result, debug, err, info, utils,
utils::{self, ReadyExt, hash}, utils::{ReadyExt, hash},
}; };
use conduwuit_core::{debug_error, debug_warn}; use conduwuit_service::uiaa::SESSION_ID_LENGTH;
use conduwuit_service::{Services, uiaa::SESSION_ID_LENGTH};
use futures::StreamExt; use futures::StreamExt;
use ruma::{ use ruma::{
OwnedUserId, UserId, UserId,
api::client::{ api::client::{
session::{ session::{
get_login_token, get_login_token,
@ -50,154 +49,6 @@ pub(crate) async fn get_login_types_route(
])) ]))
} }
/// Authenticates the given user by its ID and its password.
///
/// Returns the user ID if successful, and an error otherwise.
#[tracing::instrument(skip_all, fields(%user_id), name = "password")]
pub(crate) async fn password_login(
services: &Services,
user_id: &UserId,
lowercased_user_id: &UserId,
password: &str,
) -> Result<OwnedUserId> {
// Restrict login to accounts only of type 'password', including untyped
// legacy accounts which are equivalent to 'password'.
if services
.users
.origin(user_id)
.await
.is_ok_and(|origin| origin != "password")
{
return Err!(Request(Forbidden("Account does not permit password login.")));
}
let (hash, user_id) = match services.users.password_hash(user_id).await {
| Ok(hash) => (hash, user_id),
| Err(_) => services
.users
.password_hash(lowercased_user_id)
.await
.map(|hash| (hash, lowercased_user_id))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?,
};
if hash.is_empty() {
return Err!(Request(UserDeactivated("The user has been deactivated")));
}
hash::verify_password(password, &hash)
.inspect_err(|e| debug_error!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
Ok(user_id.to_owned())
}
/// Authenticates the given user through the configured LDAP server.
///
/// Creates the user if the user is found in the LDAP and do not already have an
/// account.
#[tracing::instrument(skip_all, fields(%user_id), name = "ldap")]
pub(super) async fn ldap_login(
services: &Services,
user_id: &UserId,
lowercased_user_id: &UserId,
password: &str,
) -> Result<OwnedUserId> {
let (user_dn, is_ldap_admin) = match services.config.ldap.bind_dn.as_ref() {
| Some(bind_dn) if bind_dn.contains("{username}") =>
(bind_dn.replace("{username}", lowercased_user_id.localpart()), false),
| _ => {
debug!("Searching user in LDAP");
let dns = services.users.search_ldap(user_id).await?;
if dns.len() >= 2 {
return Err!(Ldap("LDAP search returned two or more results"));
}
let Some((user_dn, is_admin)) = dns.first() else {
return password_login(services, user_id, lowercased_user_id, password).await;
};
(user_dn.clone(), *is_admin)
},
};
let user_id = services
.users
.auth_ldap(&user_dn, password)
.await
.map(|()| lowercased_user_id.to_owned())?;
// LDAP users are automatically created on first login attempt. This is a very
// common feature that can be seen on many services using a LDAP provider for
// their users (synapse, Nextcloud, Jellyfin, ...).
//
// LDAP users are crated with a dummy password but non empty because an empty
// password is reserved for deactivated accounts. The conduwuit password field
// will never be read to login a LDAP user so it's not an issue.
if !services.users.exists(lowercased_user_id).await {
services
.users
.create(lowercased_user_id, Some("*"), Some("ldap"))
.await?;
}
let is_conduwuit_admin = services.admin.user_is_admin(lowercased_user_id).await;
if is_ldap_admin && !is_conduwuit_admin {
services.admin.make_user_admin(lowercased_user_id).await?;
} else if !is_ldap_admin && is_conduwuit_admin {
services.admin.revoke_admin(lowercased_user_id).await?;
}
Ok(user_id)
}
pub(crate) async fn handle_login(
services: &Services,
body: &Ruma<login::v3::Request>,
identifier: Option<&uiaa::UserIdentifier>,
password: &str,
user: Option<&String>,
) -> Result<OwnedUserId> {
debug!("Got password login type");
let user_id =
if let Some(uiaa::UserIdentifier::UserIdOrLocalpart(user_id)) = identifier {
UserId::parse_with_server_name(user_id, &services.config.server_name)
} else if let Some(user) = user {
UserId::parse_with_server_name(user, &services.config.server_name)
} else {
return Err!(Request(Unknown(
debug_warn!(?body.login_info, "Valid identifier or username was not provided (invalid or unsupported login type?)")
)));
}
.map_err(|e| err!(Request(InvalidUsername(warn!("Username is invalid: {e}")))))?;
let lowercased_user_id = UserId::parse_with_server_name(
user_id.localpart().to_lowercase(),
&services.config.server_name,
)?;
if !services.globals.user_is_local(&user_id)
|| !services.globals.user_is_local(&lowercased_user_id)
{
return Err!(Request(Unknown("User ID does not belong to this homeserver")));
}
if cfg!(feature = "ldap") && services.config.ldap.enable {
match Box::pin(ldap_login(services, &user_id, &lowercased_user_id, password)).await {
| Ok(user_id) => Ok(user_id),
| Err(err) if services.config.ldap.ldap_only => Err(err),
| Err(err) => {
debug_warn!("{err}");
password_login(services, &user_id, &lowercased_user_id, password).await
},
}
} else {
password_login(services, &user_id, &lowercased_user_id, password).await
}
}
/// # `POST /_matrix/client/v3/login` /// # `POST /_matrix/client/v3/login`
/// ///
/// Authenticates the user and returns an access token it can use in subsequent /// Authenticates the user and returns an access token it can use in subsequent
@ -229,7 +80,70 @@ pub(crate) async fn login_route(
password, password,
user, user,
.. ..
}) => handle_login(&services, &body, identifier.as_ref(), password, user.as_ref()).await?, }) => {
debug!("Got password login type");
let user_id =
if let Some(uiaa::UserIdentifier::UserIdOrLocalpart(user_id)) = identifier {
UserId::parse_with_server_name(user_id, &services.config.server_name)
} else if let Some(user) = user {
UserId::parse_with_server_name(user, &services.config.server_name)
} else {
return Err!(Request(Unknown(
debug_warn!(?body.login_info, "Valid identifier or username was not provided (invalid or unsupported login type?)")
)));
}
.map_err(|e| err!(Request(InvalidUsername(warn!("Username is invalid: {e}")))))?;
let lowercased_user_id = UserId::parse_with_server_name(
user_id.localpart().to_lowercase(),
&services.config.server_name,
)?;
if !services.globals.user_is_local(&user_id)
|| !services.globals.user_is_local(&lowercased_user_id)
{
return Err!(Request(Unknown("User ID does not belong to this homeserver")));
}
// first try the username as-is
let hash = services
.users
.password_hash(&user_id)
.await
.inspect_err(|e| debug!("{e}"));
match hash {
| Ok(hash) => {
if hash.is_empty() {
return Err!(Request(UserDeactivated("The user has been deactivated")));
}
hash::verify_password(password, &hash)
.inspect_err(|e| debug!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
user_id
},
| Err(_e) => {
let hash_lowercased_user_id = services
.users
.password_hash(&lowercased_user_id)
.await
.inspect_err(|e| debug!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
if hash_lowercased_user_id.is_empty() {
return Err!(Request(UserDeactivated("The user has been deactivated")));
}
hash::verify_password(password, &hash_lowercased_user_id)
.inspect_err(|e| debug!("{e}"))
.map_err(|_| err!(Request(Forbidden("Wrong username or password."))))?;
lowercased_user_id
},
}
},
| login::v3::LoginInfo::Token(login::v3::Token { token }) => { | login::v3::LoginInfo::Token(login::v3::Token { token }) => {
debug!("Got token login type"); debug!("Got token login type");
if !services.server.config.login_via_existing_session { if !services.server.config.login_via_existing_session {

View file

@ -45,7 +45,6 @@ use crate::{
type TodoRooms = BTreeMap<OwnedRoomId, (BTreeSet<TypeStateKey>, usize, u64)>; type TodoRooms = BTreeMap<OwnedRoomId, (BTreeSet<TypeStateKey>, usize, u64)>;
const SINGLE_CONNECTION_SYNC: &str = "single_connection_sync"; const SINGLE_CONNECTION_SYNC: &str = "single_connection_sync";
#[allow(clippy::cognitive_complexity)]
/// POST `/_matrix/client/unstable/org.matrix.msc3575/sync` /// POST `/_matrix/client/unstable/org.matrix.msc3575/sync`
/// ///
/// Sliding Sync endpoint (future endpoint: `/_matrix/client/v4/sync`) /// Sliding Sync endpoint (future endpoint: `/_matrix/client/v4/sync`)

View file

@ -292,7 +292,7 @@ pub(crate) async fn get_timezone_key_route(
.await .await
{ {
if !services.users.exists(&body.user_id).await { if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?; services.users.create(&body.user_id, None)?;
} }
services services
@ -352,7 +352,7 @@ pub(crate) async fn get_profile_key_route(
.await .await
{ {
if !services.users.exists(&body.user_id).await { if !services.users.exists(&body.user_id).await {
services.users.create(&body.user_id, None, None).await?; services.users.create(&body.user_id, None)?;
} }
services services

View file

@ -1,4 +1,3 @@
#![allow(clippy::doc_link_with_quotes)]
pub mod check; pub mod check;
pub mod manager; pub mod manager;
pub mod proxy; pub mod proxy;
@ -1948,10 +1947,6 @@ pub struct Config {
pub allow_invalid_tls_certificates_yes_i_know_what_the_fuck_i_am_doing_with_this_and_i_know_this_is_insecure: pub allow_invalid_tls_certificates_yes_i_know_what_the_fuck_i_am_doing_with_this_and_i_know_this_is_insecure:
bool, bool,
// external structure; separate section
#[serde(default)]
pub ldap: LdapConfig,
// external structure; separate section // external structure; separate section
#[serde(default)] #[serde(default)]
pub blurhashing: BlurhashConfig, pub blurhashing: BlurhashConfig,
@ -2046,114 +2041,6 @@ pub struct BlurhashConfig {
pub blurhash_max_raw_size: u64, pub blurhash_max_raw_size: u64,
} }
#[derive(Clone, Debug, Default, Deserialize)]
#[config_example_generator(filename = "conduwuit-example.toml", section = "global.ldap")]
pub struct LdapConfig {
/// Whether to enable LDAP login.
///
/// example: "true"
#[serde(default)]
pub enable: bool,
/// Whether to force LDAP authentication or authorize classical password
/// login.
///
/// example: "true"
#[serde(default)]
pub ldap_only: bool,
/// URI of the LDAP server.
///
/// example: "ldap://ldap.example.com:389"
///
/// default: ""
#[serde(default)]
pub uri: Option<Url>,
/// Root of the searches.
///
/// example: "ou=users,dc=example,dc=org"
///
/// default: ""
#[serde(default)]
pub base_dn: String,
/// Bind DN if anonymous search is not enabled.
///
/// You can use the variable `{username}` that will be replaced by the
/// entered username. In such case, the password used to bind will be the
/// one provided for the login and not the one given by
/// `bind_password_file`. Beware: automatically granting admin rights will
/// not work if you use this direct bind instead of a LDAP search.
///
/// example: "cn=ldap-reader,dc=example,dc=org" or
/// "cn={username},ou=users,dc=example,dc=org"
///
/// default: ""
#[serde(default)]
pub bind_dn: Option<String>,
/// Path to a file on the system that contains the password for the
/// `bind_dn`.
///
/// The server must be able to access the file, and it must not be empty.
///
/// default: ""
#[serde(default)]
pub bind_password_file: Option<PathBuf>,
/// Search filter to limit user searches.
///
/// You can use the variable `{username}` that will be replaced by the
/// entered username for more complex filters.
///
/// example: "(&(objectClass=person)(memberOf=matrix))"
///
/// default: "(objectClass=*)"
#[serde(default = "default_ldap_search_filter")]
pub filter: String,
/// Attribute to use to uniquely identify the user.
///
/// example: "uid" or "cn"
///
/// default: "uid"
#[serde(default = "default_ldap_uid_attribute")]
pub uid_attribute: String,
/// Attribute containing the display name of the user.
///
/// example: "givenName" or "sn"
///
/// default: "givenName"
#[serde(default = "default_ldap_name_attribute")]
pub name_attribute: String,
/// Root of the searches for admin users.
///
/// Defaults to `base_dn` if empty.
///
/// example: "ou=admins,dc=example,dc=org"
///
/// default: ""
#[serde(default)]
pub admin_base_dn: String,
/// The LDAP search filter to find administrative users for continuwuity.
///
/// If left blank, administrative state must be configured manually for each
/// user.
///
/// You can use the variable `{username}` that will be replaced by the
/// entered username for more complex filters.
///
/// example: "(objectClass=conduwuitAdmin)" or "(uid={username})"
///
/// default: ""
#[serde(default)]
pub admin_filter: String,
}
#[derive(Deserialize, Clone, Debug)] #[derive(Deserialize, Clone, Debug)]
#[serde(transparent)] #[serde(transparent)]
struct ListeningPort { struct ListeningPort {
@ -2543,9 +2430,3 @@ pub(super) fn default_blurhash_x_component() -> u32 { 4 }
pub(super) fn default_blurhash_y_component() -> u32 { 3 } pub(super) fn default_blurhash_y_component() -> u32 { 3 }
// end recommended & blurhashing defaults // end recommended & blurhashing defaults
fn default_ldap_search_filter() -> String { "(objectClass=*)".to_owned() }
fn default_ldap_uid_attribute() -> String { String::from("uid") }
fn default_ldap_name_attribute() -> String { String::from("givenName") }

View file

@ -100,7 +100,7 @@ pub fn trap() {
#[must_use] #[must_use]
pub fn panic_str(p: &Box<dyn Any + Send>) -> &'static str { pub fn panic_str(p: &Box<dyn Any + Send>) -> &'static str {
(**p).downcast_ref::<&str>().copied().unwrap_or_default() p.downcast_ref::<&str>().copied().unwrap_or_default()
} }
#[inline(always)] #[inline(always)]

View file

@ -110,8 +110,6 @@ pub enum Error {
InconsistentRoomState(&'static str, ruma::OwnedRoomId), InconsistentRoomState(&'static str, ruma::OwnedRoomId),
#[error(transparent)] #[error(transparent)]
IntoHttp(#[from] ruma::api::error::IntoHttpError), IntoHttp(#[from] ruma::api::error::IntoHttpError),
#[error("{0}")]
Ldap(Cow<'static, str>),
#[error(transparent)] #[error(transparent)]
Mxc(#[from] ruma::MxcUriError), Mxc(#[from] ruma::MxcUriError),
#[error(transparent)] #[error(transparent)]

View file

@ -19,7 +19,7 @@ where
S: Stream<Item = K> + Send + 'a, S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a, K: AsRef<[u8]> + Send + Sync + 'a,
{ {
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a; fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
} }
impl<'a, K, S> Get<'a, K, S> for S impl<'a, K, S> Get<'a, K, S> for S
@ -29,7 +29,7 @@ where
K: AsRef<[u8]> + Send + Sync + 'a, K: AsRef<[u8]> + Send + Sync + 'a,
{ {
#[inline] #[inline]
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a { fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
map.get_batch(self) map.get_batch(self)
} }
} }
@ -39,7 +39,7 @@ where
pub(crate) fn get_batch<'a, S, K>( pub(crate) fn get_batch<'a, S, K>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
keys: S, keys: S,
) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a ) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
where where
S: Stream<Item = K> + Send + 'a, S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a, K: AsRef<[u8]> + Send + Sync + 'a,

View file

@ -10,7 +10,7 @@ use super::stream::is_cached;
use crate::{keyval, keyval::Key, stream}; use crate::{keyval, keyval::Key, stream};
#[implement(super::Map)] #[implement(super::Map)]
pub fn keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'a, K>>> + Send pub fn keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'_, K>>> + Send
where where
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
{ {

View file

@ -15,7 +15,7 @@ use crate::{
pub fn keys_from<'a, K, P>( pub fn keys_from<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P> ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -40,7 +40,7 @@ where
pub fn keys_raw_from<'a, K, P>( pub fn keys_raw_from<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P> ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync, P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,

View file

@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key};
pub fn keys_prefix<'a, K, P>( pub fn keys_prefix<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &P, prefix: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P> ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -37,7 +37,7 @@ where
pub fn keys_raw_prefix<'a, K, P>( pub fn keys_raw_prefix<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + 'a ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a, K: Deserialize<'a> + Send + 'a,
@ -50,7 +50,7 @@ where
pub fn raw_keys_prefix<'a, P>( pub fn raw_keys_prefix<'a, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a>>> + Send + 'a ) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{ {

View file

@ -17,7 +17,7 @@ where
S: Stream<Item = K> + Send + 'a, S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug, K: Serialize + Debug,
{ {
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a; fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
} }
impl<'a, K, S> Qry<'a, K, S> for S impl<'a, K, S> Qry<'a, K, S> for S
@ -27,7 +27,7 @@ where
K: Serialize + Debug + 'a, K: Serialize + Debug + 'a,
{ {
#[inline] #[inline]
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a { fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
map.qry_batch(self) map.qry_batch(self)
} }
} }
@ -37,7 +37,7 @@ where
pub(crate) fn qry_batch<'a, S, K>( pub(crate) fn qry_batch<'a, S, K>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
keys: S, keys: S,
) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a ) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
where where
S: Stream<Item = K> + Send + 'a, S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug + 'a, K: Serialize + Debug + 'a,

View file

@ -10,7 +10,7 @@ use super::rev_stream::is_cached;
use crate::{keyval, keyval::Key, stream}; use crate::{keyval, keyval::Key, stream};
#[implement(super::Map)] #[implement(super::Map)]
pub fn rev_keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'a, K>>> + Send pub fn rev_keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'_, K>>> + Send
where where
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
{ {

View file

@ -15,7 +15,7 @@ use crate::{
pub fn rev_keys_from<'a, K, P>( pub fn rev_keys_from<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P> ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -41,7 +41,7 @@ where
pub fn rev_keys_raw_from<'a, K, P>( pub fn rev_keys_raw_from<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P> ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync, P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,

View file

@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key};
pub fn rev_keys_prefix<'a, K, P>( pub fn rev_keys_prefix<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &P, prefix: &P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P> ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -37,7 +37,7 @@ where
pub fn rev_keys_raw_prefix<'a, K, P>( pub fn rev_keys_raw_prefix<'a, K, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + 'a ) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a, K: Deserialize<'a> + Send + 'a,
@ -50,7 +50,7 @@ where
pub fn rev_raw_keys_prefix<'a, P>( pub fn rev_raw_keys_prefix<'a, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<Key<'a>>> + Send + 'a ) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{ {

View file

@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream};
#[implement(super::Map)] #[implement(super::Map)]
pub fn rev_stream<'a, K, V>( pub fn rev_stream<'a, K, V>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
where where
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
V: Deserialize<'a> + Send, V: Deserialize<'a> + Send,

View file

@ -20,7 +20,7 @@ use crate::{
pub fn rev_stream_from<'a, K, V, P>( pub fn rev_stream_from<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P> ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -55,7 +55,7 @@ where
pub fn rev_stream_raw_from<'a, K, V, P>( pub fn rev_stream_raw_from<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P> ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync, P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,

View file

@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key};
pub fn rev_stream_prefix<'a, K, V, P>( pub fn rev_stream_prefix<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &P, prefix: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P> ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -50,7 +50,7 @@ where
pub fn rev_stream_raw_prefix<'a, K, V, P>( pub fn rev_stream_raw_prefix<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + 'a ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a, K: Deserialize<'a> + Send + 'a,
@ -68,7 +68,7 @@ where
pub fn rev_raw_stream_prefix<'a, P>( pub fn rev_raw_stream_prefix<'a, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a>>> + Send + 'a ) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{ {

View file

@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream};
#[implement(super::Map)] #[implement(super::Map)]
pub fn stream<'a, K, V>( pub fn stream<'a, K, V>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
where where
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
V: Deserialize<'a> + Send, V: Deserialize<'a> + Send,

View file

@ -19,7 +19,7 @@ use crate::{
pub fn stream_from<'a, K, V, P>( pub fn stream_from<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P> ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -53,7 +53,7 @@ where
pub fn stream_raw_from<'a, K, V, P>( pub fn stream_raw_from<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
from: &P, from: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P> ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync, P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,

View file

@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key};
pub fn stream_prefix<'a, K, V, P>( pub fn stream_prefix<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &P, prefix: &P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P> ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
where where
P: Serialize + ?Sized + Debug, P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send, K: Deserialize<'a> + Send,
@ -50,7 +50,7 @@ where
pub fn stream_raw_prefix<'a, K, V, P>( pub fn stream_raw_prefix<'a, K, V, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + 'a ) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a, K: Deserialize<'a> + Send + 'a,
@ -68,7 +68,7 @@ where
pub fn raw_stream_prefix<'a, P>( pub fn raw_stream_prefix<'a, P>(
self: &'a Arc<Self>, self: &'a Arc<Self>,
prefix: &'a P, prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'a>>> + Send + 'a ) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
where where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{ {

View file

@ -374,10 +374,6 @@ pub(super) static MAPS: &[Descriptor] = &[
name: "userid_masterkeyid", name: "userid_masterkeyid",
..descriptor::RANDOM_SMALL ..descriptor::RANDOM_SMALL
}, },
Descriptor {
name: "userid_origin",
..descriptor::RANDOM
},
Descriptor { Descriptor {
name: "userid_password", name: "userid_password",
..descriptor::RANDOM ..descriptor::RANDOM

View file

@ -443,7 +443,7 @@ pub(crate) fn into_send_seek(result: stream::State<'_>) -> stream::State<'static
unsafe { std::mem::transmute(result) } unsafe { std::mem::transmute(result) }
} }
fn into_recv_seek(result: stream::State<'static>) -> stream::State<'static> { fn into_recv_seek(result: stream::State<'static>) -> stream::State<'_> {
// SAFETY: This is to receive the State from the channel; see above. // SAFETY: This is to receive the State from the channel; see above.
unsafe { std::mem::transmute(result) } unsafe { std::mem::transmute(result) }
} }

View file

@ -326,7 +326,7 @@ fn ser_array() {
} }
#[test] #[test]
#[ignore = "arrayvec deserialization is not implemented (separators)"] #[ignore]
fn de_array() { fn de_array() {
let a: u64 = 123_456; let a: u64 = 123_456;
let b: u64 = 987_654; let b: u64 = 987_654;
@ -358,7 +358,7 @@ fn de_array() {
} }
#[test] #[test]
#[ignore = "Nested sequences are not supported"] #[ignore]
fn de_complex() { fn de_complex() {
type Key<'a> = (&'a UserId, ArrayVec<u64, 2>, &'a RoomId); type Key<'a> = (&'a UserId, ArrayVec<u64, 2>, &'a RoomId);

View file

@ -56,7 +56,6 @@ standard = [
"jemalloc", "jemalloc",
"jemalloc_conf", "jemalloc_conf",
"journald", "journald",
"ldap",
"media_thumbnail", "media_thumbnail",
"systemd", "systemd",
"url_preview", "url_preview",
@ -115,9 +114,6 @@ jemalloc_stats = [
jemalloc_conf = [ jemalloc_conf = [
"conduwuit-core/jemalloc_conf", "conduwuit-core/jemalloc_conf",
] ]
ldap = [
"conduwuit-api/ldap",
]
media_thumbnail = [ media_thumbnail = [
"conduwuit-service/media_thumbnail", "conduwuit-service/media_thumbnail",
] ]

View file

@ -30,7 +30,7 @@ use tower::{Service, ServiceExt};
type MakeService = IntoMakeServiceWithConnectInfo<Router, net::SocketAddr>; type MakeService = IntoMakeServiceWithConnectInfo<Router, net::SocketAddr>;
const NULL_ADDR: net::SocketAddr = net::SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); const NULL_ADDR: net::SocketAddr = net::SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0);
const FINI_POLL_INTERVAL: Duration = Duration::from_millis(750); const FINI_POLL_INTERVAL: Duration = Duration::from_millis(750);
#[tracing::instrument(skip_all, level = "debug")] #[tracing::instrument(skip_all, level = "debug")]

View file

@ -53,9 +53,6 @@ jemalloc_stats = [
"conduwuit-core/jemalloc_stats", "conduwuit-core/jemalloc_stats",
"conduwuit-database/jemalloc_stats", "conduwuit-database/jemalloc_stats",
] ]
ldap = [
"dep:ldap3"
]
media_thumbnail = [ media_thumbnail = [
"dep:image", "dep:image",
] ]
@ -92,8 +89,6 @@ image.workspace = true
image.optional = true image.optional = true
ipaddress.workspace = true ipaddress.workspace = true
itertools.workspace = true itertools.workspace = true
ldap3.workspace = true
ldap3.optional = true
log.workspace = true log.workspace = true
loole.workspace = true loole.workspace = true
lru-cache.workspace = true lru-cache.workspace = true

View file

@ -38,7 +38,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
// Create a user for the server // Create a user for the server
let server_user = services.globals.server_user.as_ref(); let server_user = services.globals.server_user.as_ref();
services.users.create(server_user, None, None).await?; services.users.create(server_user, None)?;
let create_content = { let create_content = {
use RoomVersionId::*; use RoomVersionId::*;

View file

@ -109,10 +109,7 @@ impl Service {
)?; )?;
if !self.services.users.exists(&appservice_user_id).await { if !self.services.users.exists(&appservice_user_id).await {
self.services self.services.users.create(&appservice_user_id, None)?;
.users
.create(&appservice_user_id, None, None)
.await?;
} else if self } else if self
.services .services
.users .users
@ -123,8 +120,7 @@ impl Service {
// Reactivate the appservice user if it was accidentally deactivated // Reactivate the appservice user if it was accidentally deactivated
self.services self.services
.users .users
.set_password(&appservice_user_id, None) .set_password(&appservice_user_id, None)?;
.await?;
} }
self.registration_info self.registration_info

View file

@ -41,11 +41,6 @@ impl crate::Service for Service {
return Ok(()); return Ok(());
} }
if self.services.config.ldap.enable {
warn!("emergency password feature not available with LDAP enabled.");
return Ok(());
}
self.set_emergency_access().await.inspect_err(|e| { self.set_emergency_access().await.inspect_err(|e| {
error!("Could not set the configured emergency password for the server user: {e}"); error!("Could not set the configured emergency password for the server user: {e}");
}) })
@ -62,8 +57,7 @@ impl Service {
self.services self.services
.users .users
.set_password(server_user, self.services.config.emergency_password.as_deref()) .set_password(server_user, self.services.config.emergency_password.as_deref())?;
.await?;
let (ruleset, pwd_set) = match self.services.config.emergency_password { let (ruleset, pwd_set) = match self.services.config.emergency_password {
| Some(_) => (Ruleset::server_default(server_user), true), | Some(_) => (Ruleset::server_default(server_user), true),

View file

@ -215,8 +215,8 @@ async fn db_lt_12(services: &Services) -> Result<()> {
for username in &services for username in &services
.users .users
.list_local_users() .list_local_users()
.map(ToOwned::to_owned) .map(UserId::to_owned)
.collect::<Vec<OwnedUserId>>() .collect::<Vec<_>>()
.await .await
{ {
let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name) let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name)
@ -295,8 +295,8 @@ async fn db_lt_13(services: &Services) -> Result<()> {
for username in &services for username in &services
.users .users
.list_local_users() .list_local_users()
.map(ToOwned::to_owned) .map(UserId::to_owned)
.collect::<Vec<OwnedUserId>>() .collect::<Vec<_>>()
.await .await
{ {
let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name) let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name)

View file

@ -183,8 +183,8 @@ impl Service {
.services .services
.users .users
.list_local_users() .list_local_users()
.map(ToOwned::to_owned) .map(UserId::to_owned)
.collect::<Vec<OwnedUserId>>() .collect::<Vec<_>>()
.await .await
{ {
let presence = self.db.get_presence(user_id).await; let presence = self.db.get_presence(user_id).await;

View file

@ -178,7 +178,7 @@ impl Service {
pub fn get_pushkeys<'a>( pub fn get_pushkeys<'a>(
&'a self, &'a self,
sender: &'a UserId, sender: &'a UserId,
) -> impl Stream<Item = &'a str> + Send + 'a { ) -> impl Stream<Item = &str> + Send + 'a {
let prefix = (sender, Interfix); let prefix = (sender, Interfix);
self.db self.db
.senderkey_pusher .senderkey_pusher

View file

@ -178,7 +178,7 @@ impl Service {
pub fn local_aliases_for_room<'a>( pub fn local_aliases_for_room<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a RoomAliasId> + Send + 'a { ) -> impl Stream<Item = &RoomAliasId> + Send + 'a {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db
.aliasid_alias .aliasid_alias
@ -188,9 +188,7 @@ impl Service {
} }
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub fn all_local_aliases<'a>( pub fn all_local_aliases<'a>(&'a self) -> impl Stream<Item = (&RoomId, &str)> + Send + 'a {
&'a self,
) -> impl Stream<Item = (&'a RoomId, &'a str)> + Send + 'a {
self.db self.db
.alias_roomid .alias_roomid
.stream() .stream()

View file

@ -60,7 +60,7 @@ impl Data {
target: ShortEventId, target: ShortEventId,
from: PduCount, from: PduCount,
dir: Direction, dir: Direction,
) -> impl Stream<Item = (PduCount, impl Event)> + Send + 'a { ) -> impl Stream<Item = (PduCount, impl Event)> + Send + '_ {
// Query from exact position then filter excludes it (saturating_inc could skip // Query from exact position then filter excludes it (saturating_inc could skip
// events at min/max boundaries) // events at min/max boundaries)
let from_unsigned = from.into_unsigned(); let from_unsigned = from.into_unsigned();

View file

@ -65,7 +65,7 @@ impl Data {
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
since: u64, since: u64,
) -> impl Stream<Item = ReceiptItem<'a>> + Send + 'a { ) -> impl Stream<Item = ReceiptItem<'_>> + Send + 'a {
type Key<'a> = (&'a RoomId, u64, &'a UserId); type Key<'a> = (&'a RoomId, u64, &'a UserId);
type KeyVal<'a> = (Key<'a>, CanonicalJsonObject); type KeyVal<'a> = (Key<'a>, CanonicalJsonObject);

View file

@ -112,7 +112,7 @@ impl Service {
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
since: u64, since: u64,
) -> impl Stream<Item = ReceiptItem<'a>> + Send + 'a { ) -> impl Stream<Item = ReceiptItem<'_>> + Send + 'a {
self.db.readreceipts_since(room_id, since) self.db.readreceipts_since(room_id, since)
} }

View file

@ -104,7 +104,7 @@ pub fn deindex_pdu(&self, shortroomid: ShortRoomId, pdu_id: &RawPduId, message_b
pub async fn search_pdus<'a>( pub async fn search_pdus<'a>(
&'a self, &'a self,
query: &'a RoomQuery<'a>, query: &'a RoomQuery<'a>,
) -> Result<(usize, impl Stream<Item = impl Event + use<>> + Send + 'a)> { ) -> Result<(usize, impl Stream<Item = impl Event + use<>> + Send + '_)> {
let pdu_ids: Vec<_> = self.search_pdu_ids(query).await?.collect().await; let pdu_ids: Vec<_> = self.search_pdu_ids(query).await?.collect().await;
let filter = &query.criteria.filter; let filter = &query.criteria.filter;
@ -137,10 +137,10 @@ pub async fn search_pdus<'a>(
// result is modeled as a stream such that callers don't have to be refactored // result is modeled as a stream such that callers don't have to be refactored
// though an additional async/wrap still exists for now // though an additional async/wrap still exists for now
#[implement(Service)] #[implement(Service)]
pub async fn search_pdu_ids<'a>( pub async fn search_pdu_ids(
&'a self, &self,
query: &'a RoomQuery<'_>, query: &RoomQuery<'_>,
) -> Result<impl Stream<Item = RawPduId> + Send + 'a + use<'a>> { ) -> Result<impl Stream<Item = RawPduId> + Send + '_ + use<'_>> {
let shortroomid = self.services.short.get_shortroomid(query.room_id).await?; let shortroomid = self.services.short.get_shortroomid(query.room_id).await?;
let pdu_ids = self.search_pdu_ids_query_room(query, shortroomid).await; let pdu_ids = self.search_pdu_ids_query_room(query, shortroomid).await;
@ -173,7 +173,7 @@ fn search_pdu_ids_query_words<'a>(
&'a self, &'a self,
shortroomid: ShortRoomId, shortroomid: ShortRoomId,
word: &'a str, word: &'a str,
) -> impl Stream<Item = RawPduId> + Send + 'a { ) -> impl Stream<Item = RawPduId> + Send + '_ {
self.search_pdu_ids_query_word(shortroomid, word) self.search_pdu_ids_query_word(shortroomid, word)
.map(move |key| -> RawPduId { .map(move |key| -> RawPduId {
let key = &key[prefix_len(word)..]; let key = &key[prefix_len(word)..];
@ -183,11 +183,11 @@ fn search_pdu_ids_query_words<'a>(
/// Iterate over raw database results for a word /// Iterate over raw database results for a word
#[implement(Service)] #[implement(Service)]
fn search_pdu_ids_query_word<'a>( fn search_pdu_ids_query_word(
&'a self, &self,
shortroomid: ShortRoomId, shortroomid: ShortRoomId,
word: &'a str, word: &str,
) -> impl Stream<Item = Val<'a>> + Send + 'a + use<'a> { ) -> impl Stream<Item = Val<'_>> + Send + '_ + use<'_> {
// rustc says const'ing this not yet stable // rustc says const'ing this not yet stable
let end_id: RawPduId = PduId { let end_id: RawPduId = PduId {
shortroomid, shortroomid,

View file

@ -62,7 +62,7 @@ pub async fn get_or_create_shorteventid(&self, event_id: &EventId) -> ShortEvent
pub fn multi_get_or_create_shorteventid<'a, I>( pub fn multi_get_or_create_shorteventid<'a, I>(
&'a self, &'a self,
event_ids: I, event_ids: I,
) -> impl Stream<Item = ShortEventId> + Send + 'a ) -> impl Stream<Item = ShortEventId> + Send + '_
where where
I: Iterator<Item = &'a EventId> + Clone + Debug + Send + 'a, I: Iterator<Item = &'a EventId> + Clone + Debug + Send + 'a,
{ {

View file

@ -388,7 +388,7 @@ impl Service {
pub fn get_forward_extremities<'a>( pub fn get_forward_extremities<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a EventId> + Send + 'a { ) -> impl Stream<Item = &EventId> + Send + '_ {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db

View file

@ -144,7 +144,7 @@ pub fn clear_appservice_in_room_cache(&self) { self.appservice_in_room_cache.wri
pub fn room_servers<'a>( pub fn room_servers<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a ServerName> + Send + 'a { ) -> impl Stream<Item = &ServerName> + Send + 'a {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db
.roomserverids .roomserverids
@ -167,7 +167,7 @@ pub async fn server_in_room<'a>(&'a self, server: &'a ServerName, room_id: &'a R
pub fn server_rooms<'a>( pub fn server_rooms<'a>(
&'a self, &'a self,
server: &'a ServerName, server: &'a ServerName,
) -> impl Stream<Item = &'a RoomId> + Send + 'a { ) -> impl Stream<Item = &RoomId> + Send + 'a {
let prefix = (server, Interfix); let prefix = (server, Interfix);
self.db self.db
.serverroomids .serverroomids
@ -202,7 +202,7 @@ pub fn get_shared_rooms<'a>(
&'a self, &'a self,
user_a: &'a UserId, user_a: &'a UserId,
user_b: &'a UserId, user_b: &'a UserId,
) -> impl Stream<Item = &'a RoomId> + Send + 'a { ) -> impl Stream<Item = &RoomId> + Send + 'a {
use conduwuit::utils::set; use conduwuit::utils::set;
let a = self.rooms_joined(user_a); let a = self.rooms_joined(user_a);
@ -216,7 +216,7 @@ pub fn get_shared_rooms<'a>(
pub fn room_members<'a>( pub fn room_members<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db
.roomuserid_joined .roomuserid_joined
@ -239,7 +239,7 @@ pub async fn room_joined_count(&self, room_id: &RoomId) -> Result<u64> {
pub fn local_users_in_room<'a>( pub fn local_users_in_room<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
self.room_members(room_id) self.room_members(room_id)
.ready_filter(|user| self.services.globals.user_is_local(user)) .ready_filter(|user| self.services.globals.user_is_local(user))
} }
@ -251,7 +251,7 @@ pub fn local_users_in_room<'a>(
pub fn active_local_users_in_room<'a>( pub fn active_local_users_in_room<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
self.local_users_in_room(room_id) self.local_users_in_room(room_id)
.filter(|user| self.services.users.is_active(user)) .filter(|user| self.services.users.is_active(user))
} }
@ -273,7 +273,7 @@ pub async fn room_invited_count(&self, room_id: &RoomId) -> Result<u64> {
pub fn room_useroncejoined<'a>( pub fn room_useroncejoined<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db
.roomuseroncejoinedids .roomuseroncejoinedids
@ -288,7 +288,7 @@ pub fn room_useroncejoined<'a>(
pub fn room_members_invited<'a>( pub fn room_members_invited<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db
.roomuserid_invitecount .roomuserid_invitecount
@ -303,7 +303,7 @@ pub fn room_members_invited<'a>(
pub fn room_members_knocked<'a>( pub fn room_members_knocked<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
let prefix = (room_id, Interfix); let prefix = (room_id, Interfix);
self.db self.db
.roomuserid_knockedcount .roomuserid_knockedcount
@ -347,7 +347,7 @@ pub async fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result
pub fn rooms_joined<'a>( pub fn rooms_joined<'a>(
&'a self, &'a self,
user_id: &'a UserId, user_id: &'a UserId,
) -> impl Stream<Item = &'a RoomId> + Send + 'a { ) -> impl Stream<Item = &RoomId> + Send + 'a {
self.db self.db
.userroomid_joined .userroomid_joined
.keys_raw_prefix(user_id) .keys_raw_prefix(user_id)

View file

@ -49,7 +49,7 @@ pub async fn update_membership(
#[allow(clippy::collapsible_if)] #[allow(clippy::collapsible_if)]
if !self.services.globals.user_is_local(user_id) { if !self.services.globals.user_is_local(user_id) {
if !self.services.users.exists(user_id).await { if !self.services.users.exists(user_id).await {
self.services.users.create(user_id, None, None).await?; self.services.users.create(user_id, None)?;
} }
} }

View file

@ -81,7 +81,7 @@ pub async fn servers_route_via(&self, room_id: &RoomId) -> Result<Vec<OwnedServe
pub fn servers_invite_via<'a>( pub fn servers_invite_via<'a>(
&'a self, &'a self,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> impl Stream<Item = &'a ServerName> + Send + 'a { ) -> impl Stream<Item = &ServerName> + Send + 'a {
type KeyVal<'a> = (Ignore, Vec<&'a ServerName>); type KeyVal<'a> = (Ignore, Vec<&'a ServerName>);
self.db self.db

View file

@ -1,19 +1,11 @@
#[cfg(feature = "ldap")]
use std::collections::HashMap;
use std::{collections::BTreeMap, mem, sync::Arc}; use std::{collections::BTreeMap, mem, sync::Arc};
#[cfg(feature = "ldap")]
use conduwuit::result::LogErr;
use conduwuit::{ use conduwuit::{
Err, Error, Result, Server, at, debug_warn, err, is_equal_to, trace, Err, Error, Result, Server, at, debug_warn, err, trace,
utils::{self, ReadyExt, stream::TryIgnore, string::Unquoted}, utils::{self, ReadyExt, stream::TryIgnore, string::Unquoted},
}; };
#[cfg(feature = "ldap")]
use conduwuit_core::{debug, error};
use database::{Deserialized, Ignore, Interfix, Json, Map}; use database::{Deserialized, Ignore, Interfix, Json, Map};
use futures::{Stream, StreamExt, TryFutureExt}; use futures::{Stream, StreamExt, TryFutureExt};
#[cfg(feature = "ldap")]
use ldap3::{LdapConnAsync, Scope, SearchEntry};
use ruma::{ use ruma::{
DeviceId, KeyId, MilliSecondsSinceUnixEpoch, OneTimeKeyAlgorithm, OneTimeKeyId, DeviceId, KeyId, MilliSecondsSinceUnixEpoch, OneTimeKeyAlgorithm, OneTimeKeyId,
OneTimeKeyName, OwnedDeviceId, OwnedKeyId, OwnedMxcUri, OwnedUserId, RoomId, UInt, UserId, OneTimeKeyName, OwnedDeviceId, OwnedKeyId, OwnedMxcUri, OwnedUserId, RoomId, UInt, UserId,
@ -71,7 +63,6 @@ struct Data {
userid_displayname: Arc<Map>, userid_displayname: Arc<Map>,
userid_lastonetimekeyupdate: Arc<Map>, userid_lastonetimekeyupdate: Arc<Map>,
userid_masterkeyid: Arc<Map>, userid_masterkeyid: Arc<Map>,
userid_origin: Arc<Map>,
userid_password: Arc<Map>, userid_password: Arc<Map>,
userid_suspension: Arc<Map>, userid_suspension: Arc<Map>,
userid_selfsigningkeyid: Arc<Map>, userid_selfsigningkeyid: Arc<Map>,
@ -109,7 +100,6 @@ impl crate::Service for Service {
userid_displayname: args.db["userid_displayname"].clone(), userid_displayname: args.db["userid_displayname"].clone(),
userid_lastonetimekeyupdate: args.db["userid_lastonetimekeyupdate"].clone(), userid_lastonetimekeyupdate: args.db["userid_lastonetimekeyupdate"].clone(),
userid_masterkeyid: args.db["userid_masterkeyid"].clone(), userid_masterkeyid: args.db["userid_masterkeyid"].clone(),
userid_origin: args.db["userid_origin"].clone(),
userid_password: args.db["userid_password"].clone(), userid_password: args.db["userid_password"].clone(),
userid_suspension: args.db["userid_suspension"].clone(), userid_suspension: args.db["userid_suspension"].clone(),
userid_selfsigningkeyid: args.db["userid_selfsigningkeyid"].clone(), userid_selfsigningkeyid: args.db["userid_selfsigningkeyid"].clone(),
@ -146,21 +136,9 @@ impl Service {
} }
/// Create a new user account on this homeserver. /// Create a new user account on this homeserver.
///
/// User origin is by default "password" (meaning that it will login using
/// its user_id/password). Users with other origins (currently only "ldap"
/// is available) have special login processes.
#[inline] #[inline]
pub async fn create( pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
&self, self.set_password(user_id, password)
user_id: &UserId,
password: Option<&str>,
origin: Option<&str>,
) -> Result<()> {
self.db
.userid_origin
.insert(user_id, origin.unwrap_or("password"));
self.set_password(user_id, password).await
} }
/// Deactivate account /// Deactivate account
@ -174,7 +152,7 @@ impl Service {
// result in an empty string, so the user will not be able to log in again. // result in an empty string, so the user will not be able to log in again.
// Systems like changing the password without logging in should check if the // Systems like changing the password without logging in should check if the
// account is deactivated. // account is deactivated.
self.set_password(user_id, None).await?; self.set_password(user_id, None)?;
// TODO: Unhook 3PID // TODO: Unhook 3PID
Ok(()) Ok(())
@ -275,34 +253,13 @@ impl Service {
.ready_filter_map(|(u, p): (&UserId, &[u8])| (!p.is_empty()).then_some(u)) .ready_filter_map(|(u, p): (&UserId, &[u8])| (!p.is_empty()).then_some(u))
} }
/// Returns the origin of the user (password/LDAP/...).
pub async fn origin(&self, user_id: &UserId) -> Result<String> {
self.db.userid_origin.get(user_id).await.deserialized()
}
/// Returns the password hash for the given user. /// Returns the password hash for the given user.
pub async fn password_hash(&self, user_id: &UserId) -> Result<String> { pub async fn password_hash(&self, user_id: &UserId) -> Result<String> {
self.db.userid_password.get(user_id).await.deserialized() self.db.userid_password.get(user_id).await.deserialized()
} }
/// Hash and set the user's password to the Argon2 hash /// Hash and set the user's password to the Argon2 hash
pub async fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
// Cannot change the password of a LDAP user. There are two special cases :
// - a `None` password can be used to deactivate a LDAP user
// - a "*" password is used as the default password of an active LDAP user
if cfg!(feature = "ldap")
&& password.is_some_and(|pwd| pwd != "*")
&& self
.db
.userid_origin
.get(user_id)
.await
.deserialized::<String>()
.is_ok_and(is_equal_to!("ldap"))
{
return Err!(Request(InvalidParam("Cannot change password of a LDAP user")));
}
password password
.map(utils::hash::password) .map(utils::hash::password)
.transpose() .transpose()
@ -422,7 +379,7 @@ impl Service {
pub fn all_device_ids<'a>( pub fn all_device_ids<'a>(
&'a self, &'a self,
user_id: &'a UserId, user_id: &'a UserId,
) -> impl Stream<Item = &'a DeviceId> + Send + 'a { ) -> impl Stream<Item = &DeviceId> + Send + 'a {
let prefix = (user_id, Interfix); let prefix = (user_id, Interfix);
self.db self.db
.userdeviceid_metadata .userdeviceid_metadata
@ -770,7 +727,7 @@ impl Service {
user_id: &'a UserId, user_id: &'a UserId,
from: u64, from: u64,
to: Option<u64>, to: Option<u64>,
) -> impl Stream<Item = &'a UserId> + Send + 'a { ) -> impl Stream<Item = &UserId> + Send + 'a {
self.keys_changed_user_or_room(user_id.as_str(), from, to) self.keys_changed_user_or_room(user_id.as_str(), from, to)
.map(|(user_id, ..)| user_id) .map(|(user_id, ..)| user_id)
} }
@ -781,7 +738,7 @@ impl Service {
room_id: &'a RoomId, room_id: &'a RoomId,
from: u64, from: u64,
to: Option<u64>, to: Option<u64>,
) -> impl Stream<Item = (&'a UserId, u64)> + Send + 'a { ) -> impl Stream<Item = (&UserId, u64)> + Send + 'a {
self.keys_changed_user_or_room(room_id.as_str(), from, to) self.keys_changed_user_or_room(room_id.as_str(), from, to)
} }
@ -790,7 +747,7 @@ impl Service {
user_or_room_id: &'a str, user_or_room_id: &'a str,
from: u64, from: u64,
to: Option<u64>, to: Option<u64>,
) -> impl Stream<Item = (&'a UserId, u64)> + Send + 'a { ) -> impl Stream<Item = (&UserId, u64)> + Send + 'a {
type KeyVal<'a> = ((&'a str, u64), &'a UserId); type KeyVal<'a> = ((&'a str, u64), &'a UserId);
let to = to.unwrap_or(u64::MAX); let to = to.unwrap_or(u64::MAX);
@ -1175,154 +1132,6 @@ impl Service {
self.db.useridprofilekey_value.del(key); self.db.useridprofilekey_value.del(key);
} }
} }
#[cfg(not(feature = "ldap"))]
pub async fn search_ldap(&self, _user_id: &UserId) -> Result<Vec<(String, bool)>> {
Err!(FeatureDisabled("ldap"))
}
#[cfg(feature = "ldap")]
pub async fn search_ldap(&self, user_id: &UserId) -> Result<Vec<(String, bool)>> {
let localpart = user_id.localpart().to_owned();
let lowercased_localpart = localpart.to_lowercase();
let config = &self.services.server.config.ldap;
let uri = config
.uri
.as_ref()
.ok_or_else(|| err!(Ldap(error!("LDAP URI is not configured."))))?;
debug!(?uri, "LDAP creating connection...");
let (conn, mut ldap) = LdapConnAsync::new(uri.as_str())
.await
.map_err(|e| err!(Ldap(error!(?user_id, "LDAP connection setup error: {e}"))))?;
let driver = self.services.server.runtime().spawn(async move {
match conn.drive().await {
| Err(e) => error!("LDAP connection error: {e}"),
| Ok(()) => debug!("LDAP connection completed."),
}
});
match (&config.bind_dn, &config.bind_password_file) {
| (Some(bind_dn), Some(bind_password_file)) => {
let bind_pw = String::from_utf8(std::fs::read(bind_password_file)?)?;
ldap.simple_bind(bind_dn, bind_pw.trim())
.await
.and_then(ldap3::LdapResult::success)
.map_err(|e| err!(Ldap(error!("LDAP bind error: {e}"))))?;
},
| (..) => {},
}
let attr = [&config.uid_attribute, &config.name_attribute];
let user_filter = &config.filter.replace("{username}", &lowercased_localpart);
let (entries, _result) = ldap
.search(&config.base_dn, Scope::Subtree, user_filter, &attr)
.await
.and_then(ldap3::SearchResult::success)
.inspect(|(entries, result)| trace!(?entries, ?result, "LDAP Search"))
.map_err(|e| err!(Ldap(error!(?attr, ?user_filter, "LDAP search error: {e}"))))?;
let mut dns: HashMap<String, bool> = entries
.into_iter()
.filter_map(|entry| {
let search_entry = SearchEntry::construct(entry);
debug!(?search_entry, "LDAP search entry");
search_entry
.attrs
.get(&config.uid_attribute)
.into_iter()
.chain(search_entry.attrs.get(&config.name_attribute))
.any(|ids| ids.contains(&localpart) || ids.contains(&lowercased_localpart))
.then_some((search_entry.dn, false))
})
.collect();
if !config.admin_filter.is_empty() {
let admin_base_dn = if config.admin_base_dn.is_empty() {
&config.base_dn
} else {
&config.admin_base_dn
};
let admin_filter = &config
.admin_filter
.replace("{username}", &lowercased_localpart);
let (admin_entries, _result) = ldap
.search(admin_base_dn, Scope::Subtree, admin_filter, &attr)
.await
.and_then(ldap3::SearchResult::success)
.inspect(|(entries, result)| trace!(?entries, ?result, "LDAP Admin Search"))
.map_err(|e| {
err!(Ldap(error!(?attr, ?admin_filter, "Ldap admin search error: {e}")))
})?;
dns.extend(admin_entries.into_iter().filter_map(|entry| {
let search_entry = SearchEntry::construct(entry);
debug!(?search_entry, "LDAP search entry");
search_entry
.attrs
.get(&config.uid_attribute)
.into_iter()
.chain(search_entry.attrs.get(&config.name_attribute))
.any(|ids| ids.contains(&localpart) || ids.contains(&lowercased_localpart))
.then_some((search_entry.dn, true))
}));
}
ldap.unbind()
.await
.map_err(|e| err!(Ldap(error!("LDAP unbind error: {e}"))))?;
driver.await.log_err().ok();
Ok(dns.drain().collect())
}
#[cfg(not(feature = "ldap"))]
pub async fn auth_ldap(&self, _user_dn: &str, _password: &str) -> Result {
Err!(FeatureDisabled("ldap"))
}
#[cfg(feature = "ldap")]
pub async fn auth_ldap(&self, user_dn: &str, password: &str) -> Result {
let config = &self.services.server.config.ldap;
let uri = config
.uri
.as_ref()
.ok_or_else(|| err!(Ldap(error!("LDAP URI is not configured."))))?;
debug!(?uri, "LDAP creating connection...");
let (conn, mut ldap) = LdapConnAsync::new(uri.as_str())
.await
.map_err(|e| err!(Ldap(error!(?user_dn, "LDAP connection setup error: {e}"))))?;
let driver = self.services.server.runtime().spawn(async move {
match conn.drive().await {
| Err(e) => error!("LDAP connection error: {e}"),
| Ok(()) => debug!("LDAP connection completed."),
}
});
ldap.simple_bind(user_dn, password)
.await
.and_then(ldap3::LdapResult::success)
.map_err(|e| {
err!(Request(Forbidden(debug_error!("LDAP authentication error: {e}"))))
})?;
ldap.unbind()
.await
.map_err(|e| err!(Ldap(error!("LDAP unbind error: {e}"))))?;
driver.await.log_err().ok();
Ok(())
}
} }
pub fn parse_master_key( pub fn parse_master_key(