Compare commits

..

11 commits

Author SHA1 Message Date
nexy7574
5e71470131
fix(hydra): Fix ruma dependency 2025-08-30 17:01:12 +01:00
nexy7574
cfd68efb99
style: Reformat and whatnot 2025-08-30 17:00:31 +01:00
nexy7574
327fa02cd9
feat(hydra): Initial public commit for v12 support
# Conflicts:
#	src/core/info/room_version.rs
#	src/service/rooms/timeline/create.rs
2025-08-30 16:55:21 +01:00
Tom Foster
609e239436 fix(fedora): Correct linting issues in RPM spec file
The Fedora RPM packaging files added in PR #950 weren't passing pre-commit
checks, causing CI failures for any branches rebased after that merge. This
applies prek linting fixes (typo correction, trailing whitespace removal,
and EOF newline) to ensure CI passes for all contributors.
2025-08-30 16:10:41 +01:00
Ginger
34417c96ae Update URL to point at the landing page 2025-08-28 21:10:46 +00:00
Ginger
f33f281edb Update long description to match deb package 2025-08-28 21:10:46 +00:00
Ginger
ddbca59193 Add spec and service files for creating an RPM package 2025-08-28 21:10:46 +00:00
Tom Foster
b5a2e49ae4 fix: Resolve Clippy CI failures from elided lifetime warnings
The latest Rust nightly compiler (2025-08-27) introduced the
elided-named-lifetimes lint which causes Clippy CI checks to fail
when an elided lifetime ('_) resolves to a named lifetime that's
already in scope.

This commit fixes the Clippy warnings by:
- Making lifetime relationships explicit where 'a is already in scope
- Keeping elided lifetimes ('_) in functions without explicit
  lifetime parameters
- Ensuring proper lifetime handling in the database pool module

Affected files (17 total):
- Database map modules: Handle, Key, and KeyVal references in get,
  qry, keys, and stream operations
- Database pool module: into_recv_seek function

This change resolves the CI build failures without changing any
functionality, ensuring the codebase remains compatible with the
latest nightly Clippy checks.
2025-08-28 21:13:19 +01:00
Jade Ellis
37248a4f68
chore: Add reasons for test skips 2025-08-28 20:10:05 +01:00
Tom Foster
dd22325ea2 refactor(ci): Consolidate Rust checks with optimised toolchain setup
Merge rust-checks.yml into prek-checks.yml for a unified workflow that
runs formatting and clippy/test checks in parallel jobs.

Add reusable composite actions:
- setup-rust: Smart Rust toolchain management with caching
  * Uses cargo-binstall for pre-built binary downloads
  * Integrates Mozilla sccache-action for compilation caching
  * Workspace-relative paths for better cache control
  * GitHub token support for improved rate limits
- setup-llvm-with-apt: LLVM installation with native dependencies
- detect-runner-os: Consistent OS detection for cache keys

Key improvements:
- Install prek via cargo-binstall --git (crates.io outdated at v0.0.1)
- Download timelord-cli from cargo-quickinstall
- Set BINSTALL_MAXIMUM_RESOLUTION_TIMEOUT=10 to avoid rate limit delays
- Default Rust version 1.87.0 with override support
- Remove redundant sccache stats (handled by Mozilla action)

Significantly reduces CI runtime through binary downloads instead of
compilation while maintaining all existing quality checks.
2025-08-28 19:20:14 +01:00
nex
30a56d5cb9
Update renovate.json 2025-08-28 17:15:32 +00:00
43 changed files with 737 additions and 237 deletions

View file

@ -0,0 +1,39 @@
name: detect-runner-os
description: |
Detect the actual OS name and version of the runner.
Provides separate outputs for name, version, and a combined slug.
outputs:
name:
description: 'OS name (e.g. Ubuntu, Debian)'
value: ${{ steps.detect.outputs.name }}
version:
description: 'OS version (e.g. 22.04, 11)'
value: ${{ steps.detect.outputs.version }}
slug:
description: 'Combined OS slug (e.g. Ubuntu-22.04)'
value: ${{ steps.detect.outputs.slug }}
runs:
using: composite
steps:
- name: Detect runner OS
id: detect
shell: bash
run: |
# Detect OS version (try lsb_release first, fall back to /etc/os-release)
OS_VERSION=$(lsb_release -rs 2>/dev/null || grep VERSION_ID /etc/os-release | cut -d'"' -f2)
# Detect OS name and capitalise (try lsb_release first, fall back to /etc/os-release)
OS_NAME=$(lsb_release -is 2>/dev/null || grep "^ID=" /etc/os-release | cut -d'=' -f2 | tr -d '"' | sed 's/\b\(.\)/\u\1/g')
# Create combined slug
OS_SLUG="${OS_NAME}-${OS_VERSION}"
# Set outputs
echo "name=${OS_NAME}" >> $GITHUB_OUTPUT
echo "version=${OS_VERSION}" >> $GITHUB_OUTPUT
echo "slug=${OS_SLUG}" >> $GITHUB_OUTPUT
# Log detection results
echo "🔍 Detected Runner OS: ${OS_NAME} ${OS_VERSION}"

View file

@ -0,0 +1,167 @@
name: setup-llvm-with-apt
description: |
Set up LLVM toolchain with APT package management and smart caching.
Supports cross-compilation architectures and additional package installation.
Creates symlinks in /usr/bin: clang, clang++, lld, llvm-ar, llvm-ranlib
inputs:
dpkg-arch:
description: 'Debian architecture for cross-compilation (e.g. arm64)'
required: false
default: ''
extra-packages:
description: 'Additional APT packages to install (space-separated)'
required: false
default: ''
llvm-version:
description: 'LLVM version to install'
required: false
default: '20'
outputs:
llvm-version:
description: 'Installed LLVM version'
value: ${{ steps.configure.outputs.version }}
runs:
using: composite
steps:
- name: Detect runner OS
id: runner-os
uses: ./.forgejo/actions/detect-runner-os
- name: Configure cross-compilation architecture
if: inputs.dpkg-arch != ''
shell: bash
run: |
echo "🏗️ Adding ${{ inputs.dpkg-arch }} architecture"
sudo dpkg --add-architecture ${{ inputs.dpkg-arch }}
# Restrict default sources to amd64
sudo sed -i 's/^deb http/deb [arch=amd64] http/g' /etc/apt/sources.list
sudo sed -i 's/^deb https/deb [arch=amd64] https/g' /etc/apt/sources.list
# Add ports sources for foreign architecture
sudo tee /etc/apt/sources.list.d/${{ inputs.dpkg-arch }}.list > /dev/null <<EOF
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe multiverse
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe multiverse
deb [arch=${{ inputs.dpkg-arch }}] http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
EOF
echo "✅ Architecture ${{ inputs.dpkg-arch }} configured"
- name: Start LLVM cache group
shell: bash
run: echo "::group::📦 Restoring LLVM cache"
- name: Check for LLVM cache
id: cache
uses: https://github.com/actions/cache@v4
with:
path: |
/usr/bin/clang-*
/usr/bin/clang++-*
/usr/bin/lld-*
/usr/bin/llvm-*
/usr/lib/llvm-*/
/usr/lib/x86_64-linux-gnu/libLLVM*.so*
/usr/lib/x86_64-linux-gnu/libclang*.so*
/etc/apt/sources.list.d/archive_uri-*
/etc/apt/trusted.gpg.d/apt.llvm.org.asc
key: llvm-${{ steps.runner-os.outputs.slug }}-v${{ inputs.llvm-version }}-v3-${{ hashFiles('**/Cargo.lock', 'rust-toolchain.toml') }}
- name: End LLVM cache group
shell: bash
run: echo "::endgroup::"
- name: Check and install LLVM if needed
id: llvm-setup
shell: bash
run: |
echo "🔍 Checking for LLVM ${{ inputs.llvm-version }}..."
# Check both binaries and libraries exist
if [ -f "/usr/bin/clang-${{ inputs.llvm-version }}" ] && \
[ -f "/usr/bin/clang++-${{ inputs.llvm-version }}" ] && \
[ -f "/usr/bin/lld-${{ inputs.llvm-version }}" ] && \
([ -f "/usr/lib/x86_64-linux-gnu/libLLVM.so.${{ inputs.llvm-version }}.1" ] || \
[ -f "/usr/lib/x86_64-linux-gnu/libLLVM-${{ inputs.llvm-version }}.so.1" ] || \
[ -f "/usr/lib/llvm-${{ inputs.llvm-version }}/lib/libLLVM.so" ]); then
echo "✅ LLVM ${{ inputs.llvm-version }} found and verified"
echo "needs-install=false" >> $GITHUB_OUTPUT
else
echo "📦 LLVM ${{ inputs.llvm-version }} not found or incomplete - installing..."
echo "::group::🔧 Installing LLVM ${{ inputs.llvm-version }}"
wget -O - https://apt.llvm.org/llvm.sh | bash -s -- ${{ inputs.llvm-version }}
echo "::endgroup::"
if [ ! -f "/usr/bin/clang-${{ inputs.llvm-version }}" ]; then
echo "❌ Failed to install LLVM ${{ inputs.llvm-version }}"
exit 1
fi
echo "✅ Installed LLVM ${{ inputs.llvm-version }}"
echo "needs-install=true" >> $GITHUB_OUTPUT
fi
- name: Prepare for additional packages
if: inputs.extra-packages != ''
shell: bash
run: |
# Update APT if LLVM was cached (installer script already does apt-get update)
if [[ "${{ steps.llvm-setup.outputs.needs-install }}" != "true" ]]; then
echo "::group::📦 Running apt-get update (LLVM cached, extra packages needed)"
sudo apt-get update
echo "::endgroup::"
fi
echo "::group::📦 Installing additional packages"
- name: Install additional packages
if: inputs.extra-packages != ''
uses: https://github.com/awalsh128/cache-apt-pkgs-action@latest
with:
packages: ${{ inputs.extra-packages }}
version: 1.0
- name: End package installation group
if: inputs.extra-packages != ''
shell: bash
run: echo "::endgroup::"
- name: Configure LLVM environment
id: configure
shell: bash
run: |
echo "::group::🔧 Configuring LLVM ${{ inputs.llvm-version }} environment"
# Create symlinks
sudo ln -sf "/usr/bin/clang-${{ inputs.llvm-version }}" /usr/bin/clang
sudo ln -sf "/usr/bin/clang++-${{ inputs.llvm-version }}" /usr/bin/clang++
sudo ln -sf "/usr/bin/lld-${{ inputs.llvm-version }}" /usr/bin/lld
sudo ln -sf "/usr/bin/llvm-ar-${{ inputs.llvm-version }}" /usr/bin/llvm-ar
sudo ln -sf "/usr/bin/llvm-ranlib-${{ inputs.llvm-version }}" /usr/bin/llvm-ranlib
echo " ✓ Created symlinks"
# Setup library paths
LLVM_LIB_PATH="/usr/lib/llvm-${{ inputs.llvm-version }}/lib"
if [ -d "$LLVM_LIB_PATH" ]; then
echo "LD_LIBRARY_PATH=${LLVM_LIB_PATH}:${LD_LIBRARY_PATH:-}" >> $GITHUB_ENV
echo "LIBCLANG_PATH=${LLVM_LIB_PATH}" >> $GITHUB_ENV
echo "$LLVM_LIB_PATH" | sudo tee "/etc/ld.so.conf.d/llvm-${{ inputs.llvm-version }}.conf" > /dev/null
sudo ldconfig
echo " ✓ Configured library paths"
else
# Fallback to standard library location
if [ -d "/usr/lib/x86_64-linux-gnu" ]; then
echo "LIBCLANG_PATH=/usr/lib/x86_64-linux-gnu" >> $GITHUB_ENV
echo " ✓ Using fallback library path"
fi
fi
# Set output
echo "version=${{ inputs.llvm-version }}" >> $GITHUB_OUTPUT
echo "::endgroup::"
echo "✅ LLVM ready: $(clang --version | head -1)"

View file

@ -0,0 +1,236 @@
name: setup-rust
description: |
Set up Rust toolchain with sccache for compilation caching.
Respects rust-toolchain.toml by default or accepts explicit version override.
inputs:
cache-key-suffix:
description: 'Optional suffix for cache keys (e.g. platform identifier)'
required: false
default: ''
rust-components:
description: 'Additional Rust components to install (space-separated)'
required: false
default: ''
rust-target:
description: 'Rust target triple (e.g. x86_64-unknown-linux-gnu)'
required: false
default: ''
rust-version:
description: 'Rust version to install (e.g. nightly). Defaults to 1.87.0'
required: false
default: '1.87.0'
sccache-cache-limit:
description: 'Maximum size limit for sccache local cache (e.g. 2G, 500M)'
required: false
default: '2G'
github-token:
description: 'GitHub token for downloading sccache from GitHub releases'
required: false
default: ''
outputs:
rust-version:
description: 'Installed Rust version'
value: ${{ steps.rust-setup.outputs.version }}
runs:
using: composite
steps:
- name: Detect runner OS
id: runner-os
uses: ./.forgejo/actions/detect-runner-os
- name: Configure Cargo environment
shell: bash
run: |
# Use workspace-relative paths for better control and consistency
echo "CARGO_HOME=${{ github.workspace }}/.cargo" >> $GITHUB_ENV
echo "CARGO_TARGET_DIR=${{ github.workspace }}/target" >> $GITHUB_ENV
echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> $GITHUB_ENV
echo "RUSTUP_HOME=${{ github.workspace }}/.rustup" >> $GITHUB_ENV
# Limit binstall resolution timeout to avoid GitHub rate limit delays
echo "BINSTALL_MAXIMUM_RESOLUTION_TIMEOUT=10" >> $GITHUB_ENV
# Ensure directories exist for first run
mkdir -p "${{ github.workspace }}/.cargo"
mkdir -p "${{ github.workspace }}/.sccache"
mkdir -p "${{ github.workspace }}/target"
mkdir -p "${{ github.workspace }}/.rustup"
- name: Start cache restore group
shell: bash
run: echo "::group::📦 Restoring caches (registry, toolchain, build artifacts)"
- name: Cache Cargo registry and git
id: registry-cache
uses: https://github.com/actions/cache@v4
with:
path: |
.cargo/registry/index
.cargo/registry/cache
.cargo/git/db
# Registry cache saved per workflow, restored from any workflow's cache
# Each workflow maintains its own registry that accumulates its needed crates
key: cargo-registry-${{ steps.runner-os.outputs.slug }}-${{ github.workflow }}
restore-keys: |
cargo-registry-${{ steps.runner-os.outputs.slug }}-
- name: Cache toolchain binaries
id: toolchain-cache
uses: https://github.com/actions/cache@v4
with:
path: |
.cargo/bin
.rustup/toolchains
.rustup/update-hashes
# Shared toolchain cache across all Rust versions
key: toolchain-${{ steps.runner-os.outputs.slug }}
- name: Debug GitHub token availability
shell: bash
run: |
if [ -z "${{ inputs.github-token }}" ]; then
echo "⚠️ No GitHub token provided - sccache will use fallback download method"
else
echo "✅ GitHub token provided for sccache"
fi
- name: Setup sccache
uses: https://github.com/mozilla-actions/sccache-action@v0.0.9
with:
token: ${{ inputs.github-token }}
- name: Cache build artifacts
id: build-cache
uses: https://github.com/actions/cache@v4
with:
path: |
target/**/deps
!target/**/deps/*.rlib
target/**/build
target/**/.fingerprint
target/**/incremental
target/**/*.d
/timelord/
# Build artifacts - cache per code change, restore from deps when code changes
key: >-
build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }}
restore-keys: |
build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-
- name: End cache restore group
shell: bash
run: echo "::endgroup::"
- name: Setup Rust toolchain
shell: bash
run: |
# Install rustup if not already cached
if ! command -v rustup &> /dev/null; then
echo "::group::📦 Installing rustup"
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain none
source "$CARGO_HOME/env"
echo "::endgroup::"
else
echo "✅ rustup already available"
fi
# Setup the appropriate Rust version
if [[ -n "${{ inputs.rust-version }}" ]]; then
echo "::group::📦 Setting up Rust ${{ inputs.rust-version }}"
# Set override first to prevent rust-toolchain.toml from auto-installing
rustup override set ${{ inputs.rust-version }} 2>/dev/null || true
# Check if we need to install/update the toolchain
if rustup toolchain list | grep -q "^${{ inputs.rust-version }}-"; then
rustup update ${{ inputs.rust-version }}
else
rustup toolchain install ${{ inputs.rust-version }} --profile minimal -c cargo,clippy,rustfmt
fi
else
echo "::group::📦 Setting up Rust from rust-toolchain.toml"
rustup show
fi
echo "::endgroup::"
- name: Configure PATH and install tools
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.github-token }}
run: |
# Add .cargo/bin to PATH permanently for all subsequent steps
echo "${{ github.workspace }}/.cargo/bin" >> $GITHUB_PATH
# For this step only, we need to add it to PATH since GITHUB_PATH takes effect in the next step
export PATH="${{ github.workspace }}/.cargo/bin:$PATH"
# Install cargo-binstall for fast binary installations
if command -v cargo-binstall &> /dev/null; then
echo "✅ cargo-binstall already available"
else
echo "::group::📦 Installing cargo-binstall"
curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
echo "::endgroup::"
fi
if command -v prek &> /dev/null; then
echo "✅ prek already available"
else
echo "::group::📦 Installing prek"
# prek isn't regularly published to crates.io, so we use git source
cargo-binstall -y --no-symlinks --git https://github.com/j178/prek prek
echo "::endgroup::"
fi
if command -v timelord &> /dev/null; then
echo "✅ timelord already available"
else
echo "::group::📦 Installing timelord"
cargo-binstall -y --no-symlinks timelord-cli
echo "::endgroup::"
fi
- name: Configure sccache environment
shell: bash
run: |
echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV
echo "CMAKE_C_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "CMAKE_CXX_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "CMAKE_CUDA_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV
echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV
# Configure incremental compilation GC
# If we restored from old cache (partial hit), clean up aggressively
if [[ "${{ steps.build-cache.outputs.cache-hit }}" != "true" ]]; then
echo "♻️ Partial cache hit - enabling cache cleanup"
echo "CARGO_INCREMENTAL_GC_THRESHOLD=5" >> $GITHUB_ENV
fi
- name: Install Rust components
if: inputs.rust-components != ''
shell: bash
run: |
echo "📦 Installing components: ${{ inputs.rust-components }}"
rustup component add ${{ inputs.rust-components }}
- name: Install Rust target
if: inputs.rust-target != ''
shell: bash
run: |
echo "📦 Installing target: ${{ inputs.rust-target }}"
rustup target add ${{ inputs.rust-target }}
- name: Output version and summary
id: rust-setup
shell: bash
run: |
RUST_VERSION=$(rustc --version | cut -d' ' -f2)
echo "version=$RUST_VERSION" >> $GITHUB_OUTPUT
echo "📋 Setup complete:"
echo " Rust: $(rustc --version)"
echo " Cargo: $(cargo --version)"
echo " prek: $(prek --version 2>/dev/null || echo 'installed')"
echo " timelord: $(timelord --version 2>/dev/null || echo 'installed')"

View file

@ -2,7 +2,6 @@ name: Checks / Prek
on:
push:
pull_request:
permissions:
contents: read
@ -17,18 +16,64 @@ jobs:
with:
persist-credentials: false
- name: Install uv
uses: https://github.com/astral-sh/setup-uv@v5
- name: Setup Rust nightly
uses: ./.forgejo/actions/setup-rust
with:
enable-cache: true
ignore-nothing-to-cache: true
cache-dependency-glob: ''
rust-version: nightly
github-token: ${{ secrets.GH_PUBLIC_RO }}
- name: Run prek
run: |
uvx prek run \
prek run \
--all-files \
--hook-stage manual \
--show-diff-on-failure \
--color=always \
-v
- name: Check Rust formatting
run: |
cargo +nightly fmt --all -- --check && \
echo "✅ Formatting check passed" || \
exit 1
clippy-and-tests:
name: Clippy and Cargo Tests
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup LLVM
uses: ./.forgejo/actions/setup-llvm-with-apt
with:
extra-packages: liburing-dev liburing2
- name: Setup Rust with caching
uses: ./.forgejo/actions/setup-rust
with:
github-token: ${{ secrets.GH_PUBLIC_RO }}
- name: Run Clippy lints
run: |
cargo clippy \
--workspace \
--features full \
--locked \
--no-deps \
--profile test \
-- \
-D warnings
- name: Run Cargo tests
run: |
cargo test \
--workspace \
--features full \
--locked \
--profile test \
--all-targets \
--no-fail-fast

View file

@ -1,144 +0,0 @@
name: Checks / Rust
on:
push:
jobs:
format:
name: Format
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
with:
toolchain: "nightly"
components: "rustfmt"
- name: Check formatting
run: |
cargo +nightly fmt --all -- --check
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
- uses: https://github.com/actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
github-api-url: https://api.github.com
owner: ${{ vars.GH_APP_OWNER }}
repositories: ""
- name: Install sccache
uses: ./.forgejo/actions/sccache
with:
token: ${{ steps.app-token.outputs.token }}
- run: sudo apt-get update
- name: Install system dependencies
uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1
with:
packages: clang liburing-dev
version: 1
- name: Cache Rust registry
uses: actions/cache@v3
with:
path: |
~/.cargo/git
!~/.cargo/git/checkouts
~/.cargo/registry
!~/.cargo/registry/src
key: rust-registry-${{hashFiles('**/Cargo.lock') }}
- name: Timelord
uses: ./.forgejo/actions/timelord
with:
key: sccache-v0
path: .
- name: Clippy
run: |
cargo clippy \
--workspace \
--features full \
--locked \
--no-deps \
--profile test \
-- \
-D warnings
- name: Show sccache stats
if: always()
run: sccache --show-stats
cargo-test:
name: Cargo Test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install rust
uses: ./.forgejo/actions/rust-toolchain
- uses: https://github.com/actions/create-github-app-token@v2
id: app-token
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
github-api-url: https://api.github.com
owner: ${{ vars.GH_APP_OWNER }}
repositories: ""
- name: Install sccache
uses: ./.forgejo/actions/sccache
with:
token: ${{ steps.app-token.outputs.token }}
- run: sudo apt-get update
- name: Install system dependencies
uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1
with:
packages: clang liburing-dev
version: 1
- name: Cache Rust registry
uses: actions/cache@v3
with:
path: |
~/.cargo/git
!~/.cargo/git/checkouts
~/.cargo/registry
!~/.cargo/registry/src
key: rust-registry-${{hashFiles('**/Cargo.lock') }}
- name: Timelord
uses: ./.forgejo/actions/timelord
with:
key: sccache-v0
path: .
- name: Cargo Test
run: |
cargo test \
--workspace \
--features full \
--locked \
--profile test \
--all-targets \
--no-fail-fast
- name: Show sccache stats
if: always()
run: sccache --show-stats

11
Cargo.lock generated
View file

@ -4058,6 +4058,7 @@ checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3"
[[package]]
name = "ruma"
version = "0.10.1"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"assign",
"js_int",
@ -4077,6 +4078,7 @@ dependencies = [
[[package]]
name = "ruma-appservice-api"
version = "0.10.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"js_int",
"ruma-common",
@ -4088,6 +4090,7 @@ dependencies = [
[[package]]
name = "ruma-client-api"
version = "0.18.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"as_variant",
"assign",
@ -4110,6 +4113,7 @@ dependencies = [
[[package]]
name = "ruma-common"
version = "0.13.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"as_variant",
"base64 0.22.1",
@ -4141,6 +4145,7 @@ dependencies = [
[[package]]
name = "ruma-events"
version = "0.28.1"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"as_variant",
"indexmap 2.10.0",
@ -4165,6 +4170,7 @@ dependencies = [
[[package]]
name = "ruma-federation-api"
version = "0.9.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"bytes",
"headers",
@ -4186,6 +4192,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers-validation"
version = "0.9.5"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"js_int",
"thiserror 2.0.12",
@ -4194,6 +4201,7 @@ dependencies = [
[[package]]
name = "ruma-identity-service-api"
version = "0.9.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"js_int",
"ruma-common",
@ -4203,6 +4211,7 @@ dependencies = [
[[package]]
name = "ruma-macros"
version = "0.13.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"cfg-if",
"proc-macro-crate",
@ -4217,6 +4226,7 @@ dependencies = [
[[package]]
name = "ruma-push-gateway-api"
version = "0.9.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"js_int",
"ruma-common",
@ -4228,6 +4238,7 @@ dependencies = [
[[package]]
name = "ruma-signatures"
version = "0.15.0"
source = "git+https://forgejo.ellis.link/continuwuation/ruwuma?rev=191e5c541e8339080bebb5ac6855a682330bb886#191e5c541e8339080bebb5ac6855a682330bb886"
dependencies = [
"base64 0.22.1",
"ed25519-dalek",

View file

@ -350,10 +350,9 @@ version = "0.1.2"
# Used for matrix spec type definitions and helpers
[workspace.dependencies.ruma]
#git = "https://forgejo.ellis.link/continuwuation/ruwuma"
git = "https://forgejo.ellis.link/continuwuation/ruwuma"
#branch = "conduwuit-changes"
#rev = "b753738047d1f443aca870896ef27ecaacf027da"
path = "../ruwuma/crates/ruma"
rev = "191e5c541e8339080bebb5ac6855a682330bb886"
features = [
"compat",
"rand",

68
fedora/conduwuit.service Normal file
View file

@ -0,0 +1,68 @@
[Unit]
Description=Continuwuity - Matrix homeserver
Documentation=https://continuwuity.org/
Wants=network-online.target
After=network-online.target
Alias=matrix-conduwuit.service
[Service]
DynamicUser=yes
User=conduwuit
Group=conduwuit
Type=notify
Environment="CONTINUWUITY_CONFIG=/etc/conduwuit/conduwuit.toml"
Environment="CONTINUWUITY_LOG_TO_JOURNALD=true"
Environment="CONTINUWUITY_JOURNALD_IDENTIFIER=%N"
ExecStart=/usr/bin/conduwuit
AmbientCapabilities=
CapabilityBoundingSet=
DevicePolicy=closed
LockPersonality=yes
MemoryDenyWriteExecute=yes
NoNewPrivileges=yes
#ProcSubset=pid
ProtectClock=yes
ProtectControlGroups=yes
ProtectHome=yes
ProtectHostname=yes
ProtectKernelLogs=yes
ProtectKernelModules=yes
ProtectKernelTunables=yes
ProtectProc=invisible
ProtectSystem=strict
PrivateDevices=yes
PrivateMounts=yes
PrivateTmp=yes
PrivateUsers=yes
PrivateIPC=yes
RemoveIPC=yes
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
RestrictNamespaces=yes
RestrictRealtime=yes
RestrictSUIDSGID=yes
SystemCallArchitectures=native
SystemCallFilter=@system-service @resources
SystemCallFilter=~@clock @debug @module @mount @reboot @swap @cpu-emulation @obsolete @timer @chown @setuid @privileged @keyring @ipc
SystemCallErrorNumber=EPERM
StateDirectory=conduwuit
ConfigurationDirectory=conduwuit
RuntimeDirectory=conduwuit
RuntimeDirectoryMode=0750
Restart=on-failure
RestartSec=5
TimeoutStopSec=2m
TimeoutStartSec=2m
StartLimitInterval=1m
StartLimitBurst=5
[Install]
WantedBy=multi-user.target

View file

@ -0,0 +1,80 @@
# This should be run using rpkg-util: https://docs.pagure.org/rpkg-util
# it requires Internet access and is not suitable for Fedora main repos
# TODO: rpkg-util is no longer maintained, find a replacement
Name: continuwuity
Version: {{{ git_repo_version }}}
Release: 1%{?dist}
Summary: Very cool Matrix chat homeserver written in Rust
License: Apache-2.0 AND MIT
URL: https://continuwuity.org
VCS: {{{ git_repo_vcs }}}
Source: {{{ git_repo_pack }}}
BuildRequires: cargo-rpm-macros >= 25
BuildRequires: systemd-rpm-macros
# Needed to build rust-librocksdb-sys
BuildRequires: clang
BuildRequires: liburing-devel
Requires: liburing
Requires: glibc
Requires: libstdc++
%global _description %{expand:
A cool hard fork of Conduit, a Matrix homeserver written in Rust}
%description %{_description}
%prep
{{{ git_repo_setup_macro }}}
%cargo_prep -N
# Perform an online build so Git dependencies can be retrieved
sed -i 's/^offline = true$//' .cargo/config.toml
%build
%cargo_build
# Here's the one legally required mystery incantation in this file.
# Some of our dependencies have source files which are (for some reason) marked as executable.
# Files in .cargo/registry/ are copied into /usr/src/ by the debuginfo machinery
# at the end of the build step, and then the BRP shebang mangling script checks
# the entire buildroot to find executable files, and fails the build because
# it thinks Rust's file attributes are shebangs because they start with `#!`.
# So we have to clear the executable bit on all of them before that happens.
find .cargo/registry/ -executable -name "*.rs" -exec chmod -x {} +
# TODO: this fails currently because it's forced to run in offline mode
# {cargo_license -- --no-dev} > LICENSE.dependencies
%install
install -Dpm0755 target/rpm/conduwuit -t %{buildroot}%{_bindir}
install -Dpm0644 fedora/conduwuit.service -t %{buildroot}%{_unitdir}
install -Dpm0644 conduwuit-example.toml %{buildroot}%{_sysconfdir}/conduwuit/conduwuit.toml
%files
%license LICENSE
%license src/core/matrix/state_res/LICENSE
%doc CODE_OF_CONDUCT.md
%doc CONTRIBUTING.md
%doc README.md
%doc SECURITY.md
%config %{_sysconfdir}/conduwuit/conduwuit.toml
%{_bindir}/conduwuit
%{_unitdir}/conduwuit.service
# Do not create /var/lib/conduwuit, systemd will create it if necessary
%post
%systemd_post conduwuit.service
%preun
%systemd_preun conduwuit.service
%postun
%systemd_postun_with_restart conduwuit.service
%changelog
{{{ git_repo_changelog }}}

View file

@ -13,8 +13,8 @@
"enabled": true
},
"labels": [
"dependencies",
"github_actions"
"Dependencies",
"Dependencies/Renovate"
],
"ignoreDeps": [
"tikv-jemallocator",

View file

@ -200,7 +200,7 @@ async fn is_event_report_valid(
valid"
);
if room_id != &pdu.room_id_or_hash() {
if room_id != pdu.room_id_or_hash() {
return Err!(Request(NotFound("Event ID does not belong to the reported room",)));
}

View file

@ -91,7 +91,7 @@ pub(crate) async fn create_room_route(
// check if room ID doesn't already exist instead of erroring on auth check
if let Some(ref room_id) = room_id {
if services.rooms.short.get_shortroomid(&room_id).await.is_ok() {
if services.rooms.short.get_shortroomid(room_id).await.is_ok() {
return Err!(Request(RoomInUse("Room with that custom room ID already exists",)));
}
}

View file

@ -289,11 +289,11 @@ where
let mut path: Vec<OwnedEventId> = Vec::new();
let mut seen: HashSet<OwnedEventId> = HashSet::new();
let next_event = |stack: &mut Vec<Vec<_>>, path: &mut Vec<_>| {
while stack.last().is_some_and(|s| s.is_empty()) {
while stack.last().is_some_and(std::vec::Vec::is_empty) {
stack.pop();
path.pop();
}
stack.last_mut().and_then(|s| s.pop())
stack.last_mut().and_then(std::vec::Vec::pop)
};
while let Some(event_id) = next_event(&mut stack, &mut path) {
path.push(event_id.clone());

View file

@ -19,7 +19,7 @@ where
S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a,
{
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a;
}
impl<'a, K, S> Get<'a, K, S> for S
@ -29,7 +29,7 @@ where
K: AsRef<[u8]> + Send + Sync + 'a,
{
#[inline]
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a {
map.get_batch(self)
}
}
@ -39,7 +39,7 @@ where
pub(crate) fn get_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a
where
S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a,

View file

@ -10,7 +10,7 @@ use super::stream::is_cached;
use crate::{keyval, keyval::Key, stream};
#[implement(super::Map)]
pub fn keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'_, K>>> + Send
pub fn keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'a, K>>> + Send
where
K: Deserialize<'a> + Send,
{

View file

@ -15,7 +15,7 @@ use crate::{
pub fn keys_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -40,7 +40,7 @@ where
pub fn keys_raw_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key};
pub fn keys_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -37,7 +37,7 @@ where
pub fn keys_raw_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -50,7 +50,7 @@ where
pub fn raw_keys_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'a>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -17,7 +17,7 @@ where
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug,
{
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a;
}
impl<'a, K, S> Qry<'a, K, S> for S
@ -27,7 +27,7 @@ where
K: Serialize + Debug + 'a,
{
#[inline]
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a {
map.qry_batch(self)
}
}
@ -37,7 +37,7 @@ where
pub(crate) fn qry_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
) -> impl Stream<Item = Result<Handle<'a>>> + Send + 'a
where
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug + 'a,

View file

@ -10,7 +10,7 @@ use super::rev_stream::is_cached;
use crate::{keyval, keyval::Key, stream};
#[implement(super::Map)]
pub fn rev_keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'_, K>>> + Send
pub fn rev_keys<'a, K>(self: &'a Arc<Self>) -> impl Stream<Item = Result<Key<'a, K>>> + Send
where
K: Deserialize<'a> + Send,
{

View file

@ -15,7 +15,7 @@ use crate::{
pub fn rev_keys_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -41,7 +41,7 @@ where
pub fn rev_keys_raw_from<'a, K, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key};
pub fn rev_keys_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + use<'a, K, P>
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + use<'a, K, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -37,7 +37,7 @@ where
pub fn rev_keys_raw_prefix<'a, K, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'a, K>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -50,7 +50,7 @@ where
pub fn rev_raw_keys_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
) -> impl Stream<Item = Result<Key<'a>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream};
#[implement(super::Map)]
pub fn rev_stream<'a, K, V>(
self: &'a Arc<Self>,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send
where
K: Deserialize<'a> + Send,
V: Deserialize<'a> + Send,

View file

@ -20,7 +20,7 @@ use crate::{
pub fn rev_stream_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -55,7 +55,7 @@ where
pub fn rev_stream_raw_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key};
pub fn rev_stream_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -50,7 +50,7 @@ where
pub fn rev_stream_raw_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -68,7 +68,7 @@ where
pub fn rev_raw_stream_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'a>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream};
#[implement(super::Map)]
pub fn stream<'a, K, V>(
self: &'a Arc<Self>,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send
where
K: Deserialize<'a> + Send,
V: Deserialize<'a> + Send,

View file

@ -19,7 +19,7 @@ use crate::{
pub fn stream_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -53,7 +53,7 @@ where
pub fn stream_raw_from<'a, K, V, P>(
self: &'a Arc<Self>,
from: &P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
where
P: AsRef<[u8]> + ?Sized + Debug + Sync,
K: Deserialize<'a> + Send,

View file

@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key};
pub fn stream_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + use<'a, K, V, P>
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + use<'a, K, V, P>
where
P: Serialize + ?Sized + Debug,
K: Deserialize<'a> + Send,
@ -50,7 +50,7 @@ where
pub fn stream_raw_prefix<'a, K, V, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'a, K, V>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
K: Deserialize<'a> + Send + 'a,
@ -68,7 +68,7 @@ where
pub fn raw_stream_prefix<'a, P>(
self: &'a Arc<Self>,
prefix: &'a P,
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
) -> impl Stream<Item = Result<KeyVal<'a>>> + Send + 'a
where
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
{

View file

@ -443,7 +443,7 @@ pub(crate) fn into_send_seek(result: stream::State<'_>) -> stream::State<'static
unsafe { std::mem::transmute(result) }
}
fn into_recv_seek(result: stream::State<'static>) -> stream::State<'_> {
fn into_recv_seek(result: stream::State<'static>) -> stream::State<'static> {
// SAFETY: This is to receive the State from the channel; see above.
unsafe { std::mem::transmute(result) }
}

View file

@ -326,7 +326,7 @@ fn ser_array() {
}
#[test]
#[ignore]
#[ignore = "arrayvec deserialization is not implemented (separators)"]
fn de_array() {
let a: u64 = 123_456;
let b: u64 = 987_654;
@ -358,7 +358,7 @@ fn de_array() {
}
#[test]
#[ignore]
#[ignore = "Nested sequences are not supported"]
fn de_complex() {
type Key<'a> = (&'a UserId, ArrayVec<u64, 2>, &'a RoomId);

View file

@ -215,8 +215,8 @@ async fn db_lt_12(services: &Services) -> Result<()> {
for username in &services
.users
.list_local_users()
.map(UserId::to_owned)
.collect::<Vec<_>>()
.map(ToOwned::to_owned)
.collect::<Vec<OwnedUserId>>()
.await
{
let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name)
@ -295,8 +295,8 @@ async fn db_lt_13(services: &Services) -> Result<()> {
for username in &services
.users
.list_local_users()
.map(UserId::to_owned)
.collect::<Vec<_>>()
.map(ToOwned::to_owned)
.collect::<Vec<OwnedUserId>>()
.await
{
let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name)

View file

@ -183,8 +183,8 @@ impl Service {
.services
.users
.list_local_users()
.map(UserId::to_owned)
.collect::<Vec<_>>()
.map(ToOwned::to_owned)
.collect::<Vec<OwnedUserId>>()
.await
{
let presence = self.db.get_presence(user_id).await;

View file

@ -178,7 +178,7 @@ impl Service {
pub fn get_pushkeys<'a>(
&'a self,
sender: &'a UserId,
) -> impl Stream<Item = &str> + Send + 'a {
) -> impl Stream<Item = &'a str> + Send + 'a {
let prefix = (sender, Interfix);
self.db
.senderkey_pusher

View file

@ -178,7 +178,7 @@ impl Service {
pub fn local_aliases_for_room<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &RoomAliasId> + Send + 'a {
) -> impl Stream<Item = &'a RoomAliasId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.aliasid_alias
@ -188,7 +188,9 @@ impl Service {
}
#[tracing::instrument(skip(self), level = "debug")]
pub fn all_local_aliases<'a>(&'a self) -> impl Stream<Item = (&RoomId, &str)> + Send + 'a {
pub fn all_local_aliases<'a>(
&'a self,
) -> impl Stream<Item = (&'a RoomId, &'a str)> + Send + 'a {
self.db
.alias_roomid
.stream()

View file

@ -58,10 +58,8 @@ pub async fn handle_incoming_pdu<'a>(
value: BTreeMap<String, CanonicalJsonValue>,
is_timeline_event: bool,
) -> Result<Option<RawPduId>> {
if room_id.is_empty() {
// TODO(hydra): Room IDs should be calculated before this function is called
panic!("room ID cannot be empty");
}
// TODO(hydra): Room IDs should be calculated before this function is called
assert!(!room_id.is_empty(), "room ID cannot be empty");
// 1. Skip the PDU if we already have it as a timeline event
if let Ok(pdu_id) = self.services.timeline.get_pdu_id(event_id).await {
return Ok(Some(pdu_id));

View file

@ -60,7 +60,7 @@ impl Data {
target: ShortEventId,
from: PduCount,
dir: Direction,
) -> impl Stream<Item = (PduCount, impl Event)> + Send + '_ {
) -> impl Stream<Item = (PduCount, impl Event)> + Send + 'a {
// Query from exact position then filter excludes it (saturating_inc could skip
// events at min/max boundaries)
let from_unsigned = from.into_unsigned();

View file

@ -65,7 +65,7 @@ impl Data {
&'a self,
room_id: &'a RoomId,
since: u64,
) -> impl Stream<Item = ReceiptItem<'_>> + Send + 'a {
) -> impl Stream<Item = ReceiptItem<'a>> + Send + 'a {
type Key<'a> = (&'a RoomId, u64, &'a UserId);
type KeyVal<'a> = (Key<'a>, CanonicalJsonObject);

View file

@ -112,7 +112,7 @@ impl Service {
&'a self,
room_id: &'a RoomId,
since: u64,
) -> impl Stream<Item = ReceiptItem<'_>> + Send + 'a {
) -> impl Stream<Item = ReceiptItem<'a>> + Send + 'a {
self.db.readreceipts_since(room_id, since)
}

View file

@ -104,7 +104,7 @@ pub fn deindex_pdu(&self, shortroomid: ShortRoomId, pdu_id: &RawPduId, message_b
pub async fn search_pdus<'a>(
&'a self,
query: &'a RoomQuery<'a>,
) -> Result<(usize, impl Stream<Item = impl Event + use<>> + Send + '_)> {
) -> Result<(usize, impl Stream<Item = impl Event + use<>> + Send + 'a)> {
let pdu_ids: Vec<_> = self.search_pdu_ids(query).await?.collect().await;
let filter = &query.criteria.filter;
@ -137,10 +137,10 @@ pub async fn search_pdus<'a>(
// result is modeled as a stream such that callers don't have to be refactored
// though an additional async/wrap still exists for now
#[implement(Service)]
pub async fn search_pdu_ids(
&self,
query: &RoomQuery<'_>,
) -> Result<impl Stream<Item = RawPduId> + Send + '_ + use<'_>> {
pub async fn search_pdu_ids<'a>(
&'a self,
query: &'a RoomQuery<'_>,
) -> Result<impl Stream<Item = RawPduId> + Send + 'a + use<'a>> {
let shortroomid = self.services.short.get_shortroomid(query.room_id).await?;
let pdu_ids = self.search_pdu_ids_query_room(query, shortroomid).await;
@ -173,7 +173,7 @@ fn search_pdu_ids_query_words<'a>(
&'a self,
shortroomid: ShortRoomId,
word: &'a str,
) -> impl Stream<Item = RawPduId> + Send + '_ {
) -> impl Stream<Item = RawPduId> + Send + 'a {
self.search_pdu_ids_query_word(shortroomid, word)
.map(move |key| -> RawPduId {
let key = &key[prefix_len(word)..];
@ -183,11 +183,11 @@ fn search_pdu_ids_query_words<'a>(
/// Iterate over raw database results for a word
#[implement(Service)]
fn search_pdu_ids_query_word(
&self,
fn search_pdu_ids_query_word<'a>(
&'a self,
shortroomid: ShortRoomId,
word: &str,
) -> impl Stream<Item = Val<'_>> + Send + '_ + use<'_> {
word: &'a str,
) -> impl Stream<Item = Val<'a>> + Send + 'a + use<'a> {
// rustc says const'ing this not yet stable
let end_id: RawPduId = PduId {
shortroomid,

View file

@ -62,7 +62,7 @@ pub async fn get_or_create_shorteventid(&self, event_id: &EventId) -> ShortEvent
pub fn multi_get_or_create_shorteventid<'a, I>(
&'a self,
event_ids: I,
) -> impl Stream<Item = ShortEventId> + Send + '_
) -> impl Stream<Item = ShortEventId> + Send + 'a
where
I: Iterator<Item = &'a EventId> + Clone + Debug + Send + 'a,
{

View file

@ -16,7 +16,6 @@ use conduwuit_database::{Deserialized, Ignore, Interfix, Map};
use futures::{
FutureExt, Stream, StreamExt, TryFutureExt, TryStreamExt, future::join_all, pin_mut,
};
use log::trace;
use ruma::{
EventId, OwnedEventId, OwnedRoomId, RoomId, RoomVersionId, UserId,
events::{
@ -394,7 +393,7 @@ impl Service {
pub fn get_forward_extremities<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &EventId> + Send + '_ {
) -> impl Stream<Item = &'a EventId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db

View file

@ -144,7 +144,7 @@ pub fn clear_appservice_in_room_cache(&self) { self.appservice_in_room_cache.wri
pub fn room_servers<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &ServerName> + Send + 'a {
) -> impl Stream<Item = &'a ServerName> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomserverids
@ -167,7 +167,7 @@ pub async fn server_in_room<'a>(&'a self, server: &'a ServerName, room_id: &'a R
pub fn server_rooms<'a>(
&'a self,
server: &'a ServerName,
) -> impl Stream<Item = &RoomId> + Send + 'a {
) -> impl Stream<Item = &'a RoomId> + Send + 'a {
let prefix = (server, Interfix);
self.db
.serverroomids
@ -202,7 +202,7 @@ pub fn get_shared_rooms<'a>(
&'a self,
user_a: &'a UserId,
user_b: &'a UserId,
) -> impl Stream<Item = &RoomId> + Send + 'a {
) -> impl Stream<Item = &'a RoomId> + Send + 'a {
use conduwuit::utils::set;
let a = self.rooms_joined(user_a);
@ -216,7 +216,7 @@ pub fn get_shared_rooms<'a>(
pub fn room_members<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuserid_joined
@ -239,7 +239,7 @@ pub async fn room_joined_count(&self, room_id: &RoomId) -> Result<u64> {
pub fn local_users_in_room<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
self.room_members(room_id)
.ready_filter(|user| self.services.globals.user_is_local(user))
}
@ -251,7 +251,7 @@ pub fn local_users_in_room<'a>(
pub fn active_local_users_in_room<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
self.local_users_in_room(room_id)
.filter(|user| self.services.users.is_active(user))
}
@ -273,7 +273,7 @@ pub async fn room_invited_count(&self, room_id: &RoomId) -> Result<u64> {
pub fn room_useroncejoined<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuseroncejoinedids
@ -288,7 +288,7 @@ pub fn room_useroncejoined<'a>(
pub fn room_members_invited<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuserid_invitecount
@ -303,7 +303,7 @@ pub fn room_members_invited<'a>(
pub fn room_members_knocked<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
let prefix = (room_id, Interfix);
self.db
.roomuserid_knockedcount
@ -347,7 +347,7 @@ pub async fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result
pub fn rooms_joined<'a>(
&'a self,
user_id: &'a UserId,
) -> impl Stream<Item = &RoomId> + Send + 'a {
) -> impl Stream<Item = &'a RoomId> + Send + 'a {
self.db
.userroomid_joined
.keys_raw_prefix(user_id)

View file

@ -81,7 +81,7 @@ pub async fn servers_route_via(&self, room_id: &RoomId) -> Result<Vec<OwnedServe
pub fn servers_invite_via<'a>(
&'a self,
room_id: &'a RoomId,
) -> impl Stream<Item = &ServerName> + Send + 'a {
) -> impl Stream<Item = &'a ServerName> + Send + 'a {
type KeyVal<'a> = (Ignore, Vec<&'a ServerName>);
self.db

View file

@ -422,7 +422,7 @@ impl Service {
pub fn all_device_ids<'a>(
&'a self,
user_id: &'a UserId,
) -> impl Stream<Item = &DeviceId> + Send + 'a {
) -> impl Stream<Item = &'a DeviceId> + Send + 'a {
let prefix = (user_id, Interfix);
self.db
.userdeviceid_metadata
@ -770,7 +770,7 @@ impl Service {
user_id: &'a UserId,
from: u64,
to: Option<u64>,
) -> impl Stream<Item = &UserId> + Send + 'a {
) -> impl Stream<Item = &'a UserId> + Send + 'a {
self.keys_changed_user_or_room(user_id.as_str(), from, to)
.map(|(user_id, ..)| user_id)
}
@ -781,7 +781,7 @@ impl Service {
room_id: &'a RoomId,
from: u64,
to: Option<u64>,
) -> impl Stream<Item = (&UserId, u64)> + Send + 'a {
) -> impl Stream<Item = (&'a UserId, u64)> + Send + 'a {
self.keys_changed_user_or_room(room_id.as_str(), from, to)
}
@ -790,7 +790,7 @@ impl Service {
user_or_room_id: &'a str,
from: u64,
to: Option<u64>,
) -> impl Stream<Item = (&UserId, u64)> + Send + 'a {
) -> impl Stream<Item = (&'a UserId, u64)> + Send + 'a {
type KeyVal<'a> = ((&'a str, u64), &'a UserId);
let to = to.unwrap_or(u64::MAX);