diff --git a/.forgejo/actions/detect-runner-os/action.yml b/.forgejo/actions/detect-runner-os/action.yml new file mode 100644 index 00000000..6ada1d5d --- /dev/null +++ b/.forgejo/actions/detect-runner-os/action.yml @@ -0,0 +1,39 @@ +name: detect-runner-os +description: | + Detect the actual OS name and version of the runner. + Provides separate outputs for name, version, and a combined slug. + +outputs: + name: + description: 'OS name (e.g. Ubuntu, Debian)' + value: ${{ steps.detect.outputs.name }} + version: + description: 'OS version (e.g. 22.04, 11)' + value: ${{ steps.detect.outputs.version }} + slug: + description: 'Combined OS slug (e.g. Ubuntu-22.04)' + value: ${{ steps.detect.outputs.slug }} + +runs: + using: composite + steps: + - name: Detect runner OS + id: detect + shell: bash + run: | + # Detect OS version (try lsb_release first, fall back to /etc/os-release) + OS_VERSION=$(lsb_release -rs 2>/dev/null || grep VERSION_ID /etc/os-release | cut -d'"' -f2) + + # Detect OS name and capitalise (try lsb_release first, fall back to /etc/os-release) + OS_NAME=$(lsb_release -is 2>/dev/null || grep "^ID=" /etc/os-release | cut -d'=' -f2 | tr -d '"' | sed 's/\b\(.\)/\u\1/g') + + # Create combined slug + OS_SLUG="${OS_NAME}-${OS_VERSION}" + + # Set outputs + echo "name=${OS_NAME}" >> $GITHUB_OUTPUT + echo "version=${OS_VERSION}" >> $GITHUB_OUTPUT + echo "slug=${OS_SLUG}" >> $GITHUB_OUTPUT + + # Log detection results + echo "🔍 Detected Runner OS: ${OS_NAME} ${OS_VERSION}" diff --git a/.forgejo/actions/setup-llvm-with-apt/action.yml b/.forgejo/actions/setup-llvm-with-apt/action.yml new file mode 100644 index 00000000..eb421e4f --- /dev/null +++ b/.forgejo/actions/setup-llvm-with-apt/action.yml @@ -0,0 +1,167 @@ +name: setup-llvm-with-apt +description: | + Set up LLVM toolchain with APT package management and smart caching. + Supports cross-compilation architectures and additional package installation. + + Creates symlinks in /usr/bin: clang, clang++, lld, llvm-ar, llvm-ranlib + +inputs: + dpkg-arch: + description: 'Debian architecture for cross-compilation (e.g. arm64)' + required: false + default: '' + extra-packages: + description: 'Additional APT packages to install (space-separated)' + required: false + default: '' + llvm-version: + description: 'LLVM version to install' + required: false + default: '20' + +outputs: + llvm-version: + description: 'Installed LLVM version' + value: ${{ steps.configure.outputs.version }} + +runs: + using: composite + steps: + - name: Detect runner OS + id: runner-os + uses: ./.forgejo/actions/detect-runner-os + + - name: Configure cross-compilation architecture + if: inputs.dpkg-arch != '' + shell: bash + run: | + echo "🏗️ Adding ${{ inputs.dpkg-arch }} architecture" + sudo dpkg --add-architecture ${{ inputs.dpkg-arch }} + + # Restrict default sources to amd64 + sudo sed -i 's/^deb http/deb [arch=amd64] http/g' /etc/apt/sources.list + sudo sed -i 's/^deb https/deb [arch=amd64] https/g' /etc/apt/sources.list + + # Add ports sources for foreign architecture + sudo tee /etc/apt/sources.list.d/${{ inputs.dpkg-arch }}.list > /dev/null <> $GITHUB_OUTPUT + else + echo "📦 LLVM ${{ inputs.llvm-version }} not found or incomplete - installing..." + + echo "::group::🔧 Installing LLVM ${{ inputs.llvm-version }}" + wget -O - https://apt.llvm.org/llvm.sh | bash -s -- ${{ inputs.llvm-version }} + echo "::endgroup::" + + if [ ! -f "/usr/bin/clang-${{ inputs.llvm-version }}" ]; then + echo "❌ Failed to install LLVM ${{ inputs.llvm-version }}" + exit 1 + fi + + echo "✅ Installed LLVM ${{ inputs.llvm-version }}" + echo "needs-install=true" >> $GITHUB_OUTPUT + fi + + - name: Prepare for additional packages + if: inputs.extra-packages != '' + shell: bash + run: | + # Update APT if LLVM was cached (installer script already does apt-get update) + if [[ "${{ steps.llvm-setup.outputs.needs-install }}" != "true" ]]; then + echo "::group::📦 Running apt-get update (LLVM cached, extra packages needed)" + sudo apt-get update + echo "::endgroup::" + fi + echo "::group::📦 Installing additional packages" + + - name: Install additional packages + if: inputs.extra-packages != '' + uses: https://github.com/awalsh128/cache-apt-pkgs-action@latest + with: + packages: ${{ inputs.extra-packages }} + version: 1.0 + + - name: End package installation group + if: inputs.extra-packages != '' + shell: bash + run: echo "::endgroup::" + + - name: Configure LLVM environment + id: configure + shell: bash + run: | + echo "::group::🔧 Configuring LLVM ${{ inputs.llvm-version }} environment" + + # Create symlinks + sudo ln -sf "/usr/bin/clang-${{ inputs.llvm-version }}" /usr/bin/clang + sudo ln -sf "/usr/bin/clang++-${{ inputs.llvm-version }}" /usr/bin/clang++ + sudo ln -sf "/usr/bin/lld-${{ inputs.llvm-version }}" /usr/bin/lld + sudo ln -sf "/usr/bin/llvm-ar-${{ inputs.llvm-version }}" /usr/bin/llvm-ar + sudo ln -sf "/usr/bin/llvm-ranlib-${{ inputs.llvm-version }}" /usr/bin/llvm-ranlib + echo " ✓ Created symlinks" + + # Setup library paths + LLVM_LIB_PATH="/usr/lib/llvm-${{ inputs.llvm-version }}/lib" + if [ -d "$LLVM_LIB_PATH" ]; then + echo "LD_LIBRARY_PATH=${LLVM_LIB_PATH}:${LD_LIBRARY_PATH:-}" >> $GITHUB_ENV + echo "LIBCLANG_PATH=${LLVM_LIB_PATH}" >> $GITHUB_ENV + + echo "$LLVM_LIB_PATH" | sudo tee "/etc/ld.so.conf.d/llvm-${{ inputs.llvm-version }}.conf" > /dev/null + sudo ldconfig + echo " ✓ Configured library paths" + else + # Fallback to standard library location + if [ -d "/usr/lib/x86_64-linux-gnu" ]; then + echo "LIBCLANG_PATH=/usr/lib/x86_64-linux-gnu" >> $GITHUB_ENV + echo " ✓ Using fallback library path" + fi + fi + + # Set output + echo "version=${{ inputs.llvm-version }}" >> $GITHUB_OUTPUT + echo "::endgroup::" + echo "✅ LLVM ready: $(clang --version | head -1)" diff --git a/.forgejo/actions/setup-rust/action.yml b/.forgejo/actions/setup-rust/action.yml new file mode 100644 index 00000000..091da8c2 --- /dev/null +++ b/.forgejo/actions/setup-rust/action.yml @@ -0,0 +1,236 @@ +name: setup-rust +description: | + Set up Rust toolchain with sccache for compilation caching. + Respects rust-toolchain.toml by default or accepts explicit version override. + +inputs: + cache-key-suffix: + description: 'Optional suffix for cache keys (e.g. platform identifier)' + required: false + default: '' + rust-components: + description: 'Additional Rust components to install (space-separated)' + required: false + default: '' + rust-target: + description: 'Rust target triple (e.g. x86_64-unknown-linux-gnu)' + required: false + default: '' + rust-version: + description: 'Rust version to install (e.g. nightly). Defaults to 1.87.0' + required: false + default: '1.87.0' + sccache-cache-limit: + description: 'Maximum size limit for sccache local cache (e.g. 2G, 500M)' + required: false + default: '2G' + github-token: + description: 'GitHub token for downloading sccache from GitHub releases' + required: false + default: '' + +outputs: + rust-version: + description: 'Installed Rust version' + value: ${{ steps.rust-setup.outputs.version }} + +runs: + using: composite + steps: + - name: Detect runner OS + id: runner-os + uses: ./.forgejo/actions/detect-runner-os + + - name: Configure Cargo environment + shell: bash + run: | + # Use workspace-relative paths for better control and consistency + echo "CARGO_HOME=${{ github.workspace }}/.cargo" >> $GITHUB_ENV + echo "CARGO_TARGET_DIR=${{ github.workspace }}/target" >> $GITHUB_ENV + echo "SCCACHE_DIR=${{ github.workspace }}/.sccache" >> $GITHUB_ENV + echo "RUSTUP_HOME=${{ github.workspace }}/.rustup" >> $GITHUB_ENV + + # Limit binstall resolution timeout to avoid GitHub rate limit delays + echo "BINSTALL_MAXIMUM_RESOLUTION_TIMEOUT=10" >> $GITHUB_ENV + + # Ensure directories exist for first run + mkdir -p "${{ github.workspace }}/.cargo" + mkdir -p "${{ github.workspace }}/.sccache" + mkdir -p "${{ github.workspace }}/target" + mkdir -p "${{ github.workspace }}/.rustup" + + - name: Start cache restore group + shell: bash + run: echo "::group::📦 Restoring caches (registry, toolchain, build artifacts)" + + - name: Cache Cargo registry and git + id: registry-cache + uses: https://github.com/actions/cache@v4 + with: + path: | + .cargo/registry/index + .cargo/registry/cache + .cargo/git/db + # Registry cache saved per workflow, restored from any workflow's cache + # Each workflow maintains its own registry that accumulates its needed crates + key: cargo-registry-${{ steps.runner-os.outputs.slug }}-${{ github.workflow }} + restore-keys: | + cargo-registry-${{ steps.runner-os.outputs.slug }}- + + - name: Cache toolchain binaries + id: toolchain-cache + uses: https://github.com/actions/cache@v4 + with: + path: | + .cargo/bin + .rustup/toolchains + .rustup/update-hashes + # Shared toolchain cache across all Rust versions + key: toolchain-${{ steps.runner-os.outputs.slug }} + + - name: Debug GitHub token availability + shell: bash + run: | + if [ -z "${{ inputs.github-token }}" ]; then + echo "⚠️ No GitHub token provided - sccache will use fallback download method" + else + echo "✅ GitHub token provided for sccache" + fi + + - name: Setup sccache + uses: https://github.com/mozilla-actions/sccache-action@v0.0.9 + with: + token: ${{ inputs.github-token }} + + - name: Cache build artifacts + id: build-cache + uses: https://github.com/actions/cache@v4 + with: + path: | + target/**/deps + !target/**/deps/*.rlib + target/**/build + target/**/.fingerprint + target/**/incremental + target/**/*.d + /timelord/ + # Build artifacts - cache per code change, restore from deps when code changes + key: >- + build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}-${{ hashFiles('**/*.rs', '**/Cargo.toml') }} + restore-keys: | + build-${{ steps.runner-os.outputs.slug }}-${{ inputs.rust-version }}${{ inputs.cache-key-suffix && format('-{0}', inputs.cache-key-suffix) || '' }}-${{ hashFiles('rust-toolchain.toml', '**/Cargo.lock') }}- + + - name: End cache restore group + shell: bash + run: echo "::endgroup::" + + - name: Setup Rust toolchain + shell: bash + run: | + # Install rustup if not already cached + if ! command -v rustup &> /dev/null; then + echo "::group::📦 Installing rustup" + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain none + source "$CARGO_HOME/env" + echo "::endgroup::" + else + echo "✅ rustup already available" + fi + + # Setup the appropriate Rust version + if [[ -n "${{ inputs.rust-version }}" ]]; then + echo "::group::📦 Setting up Rust ${{ inputs.rust-version }}" + # Set override first to prevent rust-toolchain.toml from auto-installing + rustup override set ${{ inputs.rust-version }} 2>/dev/null || true + + # Check if we need to install/update the toolchain + if rustup toolchain list | grep -q "^${{ inputs.rust-version }}-"; then + rustup update ${{ inputs.rust-version }} + else + rustup toolchain install ${{ inputs.rust-version }} --profile minimal -c cargo,clippy,rustfmt + fi + else + echo "::group::📦 Setting up Rust from rust-toolchain.toml" + rustup show + fi + echo "::endgroup::" + + - name: Configure PATH and install tools + shell: bash + env: + GITHUB_TOKEN: ${{ inputs.github-token }} + run: | + # Add .cargo/bin to PATH permanently for all subsequent steps + echo "${{ github.workspace }}/.cargo/bin" >> $GITHUB_PATH + + # For this step only, we need to add it to PATH since GITHUB_PATH takes effect in the next step + export PATH="${{ github.workspace }}/.cargo/bin:$PATH" + + # Install cargo-binstall for fast binary installations + if command -v cargo-binstall &> /dev/null; then + echo "✅ cargo-binstall already available" + else + echo "::group::📦 Installing cargo-binstall" + curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash + echo "::endgroup::" + fi + + if command -v prek &> /dev/null; then + echo "✅ prek already available" + else + echo "::group::📦 Installing prek" + # prek isn't regularly published to crates.io, so we use git source + cargo-binstall -y --no-symlinks --git https://github.com/j178/prek prek + echo "::endgroup::" + fi + + if command -v timelord &> /dev/null; then + echo "✅ timelord already available" + else + echo "::group::📦 Installing timelord" + cargo-binstall -y --no-symlinks timelord-cli + echo "::endgroup::" + fi + + - name: Configure sccache environment + shell: bash + run: | + echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV + echo "CMAKE_C_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV + echo "CMAKE_CXX_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV + echo "CMAKE_CUDA_COMPILER_LAUNCHER=sccache" >> $GITHUB_ENV + echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV + + # Configure incremental compilation GC + # If we restored from old cache (partial hit), clean up aggressively + if [[ "${{ steps.build-cache.outputs.cache-hit }}" != "true" ]]; then + echo "♻️ Partial cache hit - enabling cache cleanup" + echo "CARGO_INCREMENTAL_GC_THRESHOLD=5" >> $GITHUB_ENV + fi + + - name: Install Rust components + if: inputs.rust-components != '' + shell: bash + run: | + echo "📦 Installing components: ${{ inputs.rust-components }}" + rustup component add ${{ inputs.rust-components }} + + - name: Install Rust target + if: inputs.rust-target != '' + shell: bash + run: | + echo "📦 Installing target: ${{ inputs.rust-target }}" + rustup target add ${{ inputs.rust-target }} + + - name: Output version and summary + id: rust-setup + shell: bash + run: | + RUST_VERSION=$(rustc --version | cut -d' ' -f2) + echo "version=$RUST_VERSION" >> $GITHUB_OUTPUT + + echo "📋 Setup complete:" + echo " Rust: $(rustc --version)" + echo " Cargo: $(cargo --version)" + echo " prek: $(prek --version 2>/dev/null || echo 'installed')" + echo " timelord: $(timelord --version 2>/dev/null || echo 'installed')" diff --git a/.forgejo/workflows/prek-checks.yml b/.forgejo/workflows/prek-checks.yml index ac330ca2..c25b9c3d 100644 --- a/.forgejo/workflows/prek-checks.yml +++ b/.forgejo/workflows/prek-checks.yml @@ -2,7 +2,6 @@ name: Checks / Prek on: push: - pull_request: permissions: contents: read @@ -17,18 +16,64 @@ jobs: with: persist-credentials: false - - name: Install uv - uses: https://github.com/astral-sh/setup-uv@v5 + - name: Setup Rust nightly + uses: ./.forgejo/actions/setup-rust with: - enable-cache: true - ignore-nothing-to-cache: true - cache-dependency-glob: '' + rust-version: nightly + github-token: ${{ secrets.GH_PUBLIC_RO }} - name: Run prek run: | - uvx prek run \ + prek run \ --all-files \ --hook-stage manual \ --show-diff-on-failure \ --color=always \ -v + + - name: Check Rust formatting + run: | + cargo +nightly fmt --all -- --check && \ + echo "✅ Formatting check passed" || \ + exit 1 + + clippy-and-tests: + name: Clippy and Cargo Tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup LLVM + uses: ./.forgejo/actions/setup-llvm-with-apt + with: + extra-packages: liburing-dev liburing2 + + - name: Setup Rust with caching + uses: ./.forgejo/actions/setup-rust + with: + github-token: ${{ secrets.GH_PUBLIC_RO }} + + - name: Run Clippy lints + run: | + cargo clippy \ + --workspace \ + --features full \ + --locked \ + --no-deps \ + --profile test \ + -- \ + -D warnings + + - name: Run Cargo tests + run: | + cargo test \ + --workspace \ + --features full \ + --locked \ + --profile test \ + --all-targets \ + --no-fail-fast diff --git a/.forgejo/workflows/rust-checks.yml b/.forgejo/workflows/rust-checks.yml deleted file mode 100644 index c46363a0..00000000 --- a/.forgejo/workflows/rust-checks.yml +++ /dev/null @@ -1,144 +0,0 @@ -name: Checks / Rust - -on: - push: - -jobs: - format: - name: Format - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install rust - uses: ./.forgejo/actions/rust-toolchain - with: - toolchain: "nightly" - components: "rustfmt" - - - name: Check formatting - run: | - cargo +nightly fmt --all -- --check - - clippy: - name: Clippy - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install rust - uses: ./.forgejo/actions/rust-toolchain - - - uses: https://github.com/actions/create-github-app-token@v2 - id: app-token - with: - app-id: ${{ vars.GH_APP_ID }} - private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} - github-api-url: https://api.github.com - owner: ${{ vars.GH_APP_OWNER }} - repositories: "" - - name: Install sccache - uses: ./.forgejo/actions/sccache - with: - token: ${{ steps.app-token.outputs.token }} - - run: sudo apt-get update - - name: Install system dependencies - uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1 - with: - packages: clang liburing-dev - version: 1 - - name: Cache Rust registry - uses: actions/cache@v3 - with: - path: | - ~/.cargo/git - !~/.cargo/git/checkouts - ~/.cargo/registry - !~/.cargo/registry/src - key: rust-registry-${{hashFiles('**/Cargo.lock') }} - - name: Timelord - uses: ./.forgejo/actions/timelord - with: - key: sccache-v0 - path: . - - name: Clippy - run: | - cargo clippy \ - --workspace \ - --features full \ - --locked \ - --no-deps \ - --profile test \ - -- \ - -D warnings - - - name: Show sccache stats - if: always() - run: sccache --show-stats - - cargo-test: - name: Cargo Test - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install rust - uses: ./.forgejo/actions/rust-toolchain - - - uses: https://github.com/actions/create-github-app-token@v2 - id: app-token - with: - app-id: ${{ vars.GH_APP_ID }} - private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} - github-api-url: https://api.github.com - owner: ${{ vars.GH_APP_OWNER }} - repositories: "" - - name: Install sccache - uses: ./.forgejo/actions/sccache - with: - token: ${{ steps.app-token.outputs.token }} - - run: sudo apt-get update - - name: Install system dependencies - uses: https://github.com/awalsh128/cache-apt-pkgs-action@v1 - with: - packages: clang liburing-dev - version: 1 - - name: Cache Rust registry - uses: actions/cache@v3 - with: - path: | - ~/.cargo/git - !~/.cargo/git/checkouts - ~/.cargo/registry - !~/.cargo/registry/src - key: rust-registry-${{hashFiles('**/Cargo.lock') }} - - name: Timelord - uses: ./.forgejo/actions/timelord - with: - key: sccache-v0 - path: . - - name: Cargo Test - run: | - cargo test \ - --workspace \ - --features full \ - --locked \ - --profile test \ - --all-targets \ - --no-fail-fast - - - name: Show sccache stats - if: always() - run: sccache --show-stats diff --git a/Cargo.toml b/Cargo.toml index 9452066c..9cf072d4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ version = "0.6.2" version = "0.2.9" [workspace.dependencies.cargo_toml] -version = "0.21" +version = "0.22" default-features = false features = ["features"] diff --git a/fedora/conduwuit.service b/fedora/conduwuit.service new file mode 100644 index 00000000..6ab2af46 --- /dev/null +++ b/fedora/conduwuit.service @@ -0,0 +1,68 @@ +[Unit] +Description=Continuwuity - Matrix homeserver +Documentation=https://continuwuity.org/ +Wants=network-online.target +After=network-online.target +Alias=matrix-conduwuit.service + +[Service] +DynamicUser=yes +User=conduwuit +Group=conduwuit +Type=notify + +Environment="CONTINUWUITY_CONFIG=/etc/conduwuit/conduwuit.toml" + +Environment="CONTINUWUITY_LOG_TO_JOURNALD=true" +Environment="CONTINUWUITY_JOURNALD_IDENTIFIER=%N" + +ExecStart=/usr/bin/conduwuit + +AmbientCapabilities= +CapabilityBoundingSet= + +DevicePolicy=closed +LockPersonality=yes +MemoryDenyWriteExecute=yes +NoNewPrivileges=yes +#ProcSubset=pid +ProtectClock=yes +ProtectControlGroups=yes +ProtectHome=yes +ProtectHostname=yes +ProtectKernelLogs=yes +ProtectKernelModules=yes +ProtectKernelTunables=yes +ProtectProc=invisible +ProtectSystem=strict +PrivateDevices=yes +PrivateMounts=yes +PrivateTmp=yes +PrivateUsers=yes +PrivateIPC=yes +RemoveIPC=yes +RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX +RestrictNamespaces=yes +RestrictRealtime=yes +RestrictSUIDSGID=yes +SystemCallArchitectures=native +SystemCallFilter=@system-service @resources +SystemCallFilter=~@clock @debug @module @mount @reboot @swap @cpu-emulation @obsolete @timer @chown @setuid @privileged @keyring @ipc +SystemCallErrorNumber=EPERM + +StateDirectory=conduwuit +ConfigurationDirectory=conduwuit +RuntimeDirectory=conduwuit +RuntimeDirectoryMode=0750 + +Restart=on-failure +RestartSec=5 + +TimeoutStopSec=2m +TimeoutStartSec=2m + +StartLimitInterval=1m +StartLimitBurst=5 + +[Install] +WantedBy=multi-user.target diff --git a/fedora/continuwuity.spec.rpkg b/fedora/continuwuity.spec.rpkg new file mode 100644 index 00000000..f2efa383 --- /dev/null +++ b/fedora/continuwuity.spec.rpkg @@ -0,0 +1,80 @@ +# This should be run using rpkg-util: https://docs.pagure.org/rpkg-util +# it requires Internet access and is not suitable for Fedora main repos +# TODO: rpkg-util is no longer maintained, find a replacement + +Name: continuwuity +Version: {{{ git_repo_version }}} +Release: 1%{?dist} +Summary: Very cool Matrix chat homeserver written in Rust + +License: Apache-2.0 AND MIT + +URL: https://continuwuity.org +VCS: {{{ git_repo_vcs }}} +Source: {{{ git_repo_pack }}} + +BuildRequires: cargo-rpm-macros >= 25 +BuildRequires: systemd-rpm-macros +# Needed to build rust-librocksdb-sys +BuildRequires: clang +BuildRequires: liburing-devel + +Requires: liburing +Requires: glibc +Requires: libstdc++ + +%global _description %{expand: +A cool hard fork of Conduit, a Matrix homeserver written in Rust} + +%description %{_description} + +%prep +{{{ git_repo_setup_macro }}} +%cargo_prep -N +# Perform an online build so Git dependencies can be retrieved +sed -i 's/^offline = true$//' .cargo/config.toml + +%build +%cargo_build + +# Here's the one legally required mystery incantation in this file. +# Some of our dependencies have source files which are (for some reason) marked as executable. +# Files in .cargo/registry/ are copied into /usr/src/ by the debuginfo machinery +# at the end of the build step, and then the BRP shebang mangling script checks +# the entire buildroot to find executable files, and fails the build because +# it thinks Rust's file attributes are shebangs because they start with `#!`. +# So we have to clear the executable bit on all of them before that happens. +find .cargo/registry/ -executable -name "*.rs" -exec chmod -x {} + + +# TODO: this fails currently because it's forced to run in offline mode +# {cargo_license -- --no-dev} > LICENSE.dependencies + +%install +install -Dpm0755 target/rpm/conduwuit -t %{buildroot}%{_bindir} +install -Dpm0644 fedora/conduwuit.service -t %{buildroot}%{_unitdir} +install -Dpm0644 conduwuit-example.toml %{buildroot}%{_sysconfdir}/conduwuit/conduwuit.toml + +%files +%license LICENSE +%license src/core/matrix/state_res/LICENSE +%doc CODE_OF_CONDUCT.md +%doc CONTRIBUTING.md +%doc README.md +%doc SECURITY.md +%config %{_sysconfdir}/conduwuit/conduwuit.toml + +%{_bindir}/conduwuit +%{_unitdir}/conduwuit.service +# Do not create /var/lib/conduwuit, systemd will create it if necessary + +%post +%systemd_post conduwuit.service + +%preun +%systemd_preun conduwuit.service + +%postun +%systemd_postun_with_restart conduwuit.service + +%changelog +{{{ git_repo_changelog }}} diff --git a/src/api/client/sync/v3.rs b/src/api/client/sync/v3.rs index 01428c08..298a6e4b 100644 --- a/src/api/client/sync/v3.rs +++ b/src/api/client/sync/v3.rs @@ -430,7 +430,7 @@ async fn handle_left_room( .ok(); // Left before last sync - if Some(since) >= left_count { + if (Some(since) >= left_count && !include_leave) || Some(next_batch) < left_count { return Ok(None); } diff --git a/src/database/map/get_batch.rs b/src/database/map/get_batch.rs index e23a8848..539f0c39 100644 --- a/src/database/map/get_batch.rs +++ b/src/database/map/get_batch.rs @@ -19,7 +19,7 @@ where S: Stream + Send + 'a, K: AsRef<[u8]> + Send + Sync + 'a, { - fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a; + fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a; } impl<'a, K, S> Get<'a, K, S> for S @@ -29,7 +29,7 @@ where K: AsRef<[u8]> + Send + Sync + 'a, { #[inline] - fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a { + fn get(self, map: &'a Arc) -> impl Stream>> + Send + 'a { map.get_batch(self) } } @@ -39,7 +39,7 @@ where pub(crate) fn get_batch<'a, S, K>( self: &'a Arc, keys: S, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where S: Stream + Send + 'a, K: AsRef<[u8]> + Send + Sync + 'a, diff --git a/src/database/map/keys.rs b/src/database/map/keys.rs index 7ca932a5..ac044e91 100644 --- a/src/database/map/keys.rs +++ b/src/database/map/keys.rs @@ -10,7 +10,7 @@ use super::stream::is_cached; use crate::{keyval, keyval::Key, stream}; #[implement(super::Map)] -pub fn keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send +pub fn keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send where K: Deserialize<'a> + Send, { diff --git a/src/database/map/keys_from.rs b/src/database/map/keys_from.rs index c9b1717a..11245f7b 100644 --- a/src/database/map/keys_from.rs +++ b/src/database/map/keys_from.rs @@ -15,7 +15,7 @@ use crate::{ pub fn keys_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -40,7 +40,7 @@ where pub fn keys_raw_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/keys_prefix.rs b/src/database/map/keys_prefix.rs index 09dd79ac..e6a9457f 100644 --- a/src/database/map/keys_prefix.rs +++ b/src/database/map/keys_prefix.rs @@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key}; pub fn keys_prefix<'a, K, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -37,7 +37,7 @@ where pub fn keys_raw_prefix<'a, K, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -50,7 +50,7 @@ where pub fn raw_keys_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/map/qry_batch.rs b/src/database/map/qry_batch.rs index e42d3e63..9da546e6 100644 --- a/src/database/map/qry_batch.rs +++ b/src/database/map/qry_batch.rs @@ -17,7 +17,7 @@ where S: Stream + Send + 'a, K: Serialize + Debug, { - fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a; + fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a; } impl<'a, K, S> Qry<'a, K, S> for S @@ -27,7 +27,7 @@ where K: Serialize + Debug + 'a, { #[inline] - fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a { + fn qry(self, map: &'a Arc) -> impl Stream>> + Send + 'a { map.qry_batch(self) } } @@ -37,7 +37,7 @@ where pub(crate) fn qry_batch<'a, S, K>( self: &'a Arc, keys: S, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where S: Stream + Send + 'a, K: Serialize + Debug + 'a, diff --git a/src/database/map/rev_keys.rs b/src/database/map/rev_keys.rs index c00f3e55..8f48a17e 100644 --- a/src/database/map/rev_keys.rs +++ b/src/database/map/rev_keys.rs @@ -10,7 +10,7 @@ use super::rev_stream::is_cached; use crate::{keyval, keyval::Key, stream}; #[implement(super::Map)] -pub fn rev_keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send +pub fn rev_keys<'a, K>(self: &'a Arc) -> impl Stream>> + Send where K: Deserialize<'a> + Send, { diff --git a/src/database/map/rev_keys_from.rs b/src/database/map/rev_keys_from.rs index 04e457dc..021e3b92 100644 --- a/src/database/map/rev_keys_from.rs +++ b/src/database/map/rev_keys_from.rs @@ -15,7 +15,7 @@ use crate::{ pub fn rev_keys_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -41,7 +41,7 @@ where pub fn rev_keys_raw_from<'a, K, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/rev_keys_prefix.rs b/src/database/map/rev_keys_prefix.rs index fbe9f9ca..5b1459f5 100644 --- a/src/database/map/rev_keys_prefix.rs +++ b/src/database/map/rev_keys_prefix.rs @@ -10,7 +10,7 @@ use crate::keyval::{Key, result_deserialize_key, serialize_key}; pub fn rev_keys_prefix<'a, K, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, P> +) -> impl Stream>> + Send + use<'a, K, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -37,7 +37,7 @@ where pub fn rev_keys_raw_prefix<'a, K, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -50,7 +50,7 @@ where pub fn rev_raw_keys_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/map/rev_stream.rs b/src/database/map/rev_stream.rs index 789a52e8..92d7bac8 100644 --- a/src/database/map/rev_stream.rs +++ b/src/database/map/rev_stream.rs @@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream}; #[implement(super::Map)] pub fn rev_stream<'a, K, V>( self: &'a Arc, -) -> impl Stream>> + Send +) -> impl Stream>> + Send where K: Deserialize<'a> + Send, V: Deserialize<'a> + Send, diff --git a/src/database/map/rev_stream_from.rs b/src/database/map/rev_stream_from.rs index a612d2a2..7fef1b35 100644 --- a/src/database/map/rev_stream_from.rs +++ b/src/database/map/rev_stream_from.rs @@ -20,7 +20,7 @@ use crate::{ pub fn rev_stream_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -55,7 +55,7 @@ where pub fn rev_stream_raw_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/rev_stream_prefix.rs b/src/database/map/rev_stream_prefix.rs index 46dc9247..70d4abf7 100644 --- a/src/database/map/rev_stream_prefix.rs +++ b/src/database/map/rev_stream_prefix.rs @@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key}; pub fn rev_stream_prefix<'a, K, V, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -50,7 +50,7 @@ where pub fn rev_stream_raw_prefix<'a, K, V, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -68,7 +68,7 @@ where pub fn rev_raw_stream_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/map/stream.rs b/src/database/map/stream.rs index f7371b6c..736ab268 100644 --- a/src/database/map/stream.rs +++ b/src/database/map/stream.rs @@ -14,7 +14,7 @@ use crate::{keyval, keyval::KeyVal, stream}; #[implement(super::Map)] pub fn stream<'a, K, V>( self: &'a Arc, -) -> impl Stream>> + Send +) -> impl Stream>> + Send where K: Deserialize<'a> + Send, V: Deserialize<'a> + Send, diff --git a/src/database/map/stream_from.rs b/src/database/map/stream_from.rs index ccf48db6..9acec173 100644 --- a/src/database/map/stream_from.rs +++ b/src/database/map/stream_from.rs @@ -19,7 +19,7 @@ use crate::{ pub fn stream_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -53,7 +53,7 @@ where pub fn stream_raw_from<'a, K, V, P>( self: &'a Arc, from: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: AsRef<[u8]> + ?Sized + Debug + Sync, K: Deserialize<'a> + Send, diff --git a/src/database/map/stream_prefix.rs b/src/database/map/stream_prefix.rs index a26478aa..8210e152 100644 --- a/src/database/map/stream_prefix.rs +++ b/src/database/map/stream_prefix.rs @@ -14,7 +14,7 @@ use crate::keyval::{KeyVal, result_deserialize, serialize_key}; pub fn stream_prefix<'a, K, V, P>( self: &'a Arc, prefix: &P, -) -> impl Stream>> + Send + use<'a, K, V, P> +) -> impl Stream>> + Send + use<'a, K, V, P> where P: Serialize + ?Sized + Debug, K: Deserialize<'a> + Send, @@ -50,7 +50,7 @@ where pub fn stream_raw_prefix<'a, K, V, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, K: Deserialize<'a> + Send + 'a, @@ -68,7 +68,7 @@ where pub fn raw_stream_prefix<'a, P>( self: &'a Arc, prefix: &'a P, -) -> impl Stream>> + Send + 'a +) -> impl Stream>> + Send + 'a where P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a, { diff --git a/src/database/pool.rs b/src/database/pool.rs index 285aaf25..3421f779 100644 --- a/src/database/pool.rs +++ b/src/database/pool.rs @@ -443,7 +443,7 @@ pub(crate) fn into_send_seek(result: stream::State<'_>) -> stream::State<'static unsafe { std::mem::transmute(result) } } -fn into_recv_seek(result: stream::State<'static>) -> stream::State<'_> { +fn into_recv_seek(result: stream::State<'static>) -> stream::State<'static> { // SAFETY: This is to receive the State from the channel; see above. unsafe { std::mem::transmute(result) } } diff --git a/src/database/tests.rs b/src/database/tests.rs index c1a9f47c..30562a66 100644 --- a/src/database/tests.rs +++ b/src/database/tests.rs @@ -326,7 +326,7 @@ fn ser_array() { } #[test] -#[ignore] +#[ignore = "arrayvec deserialization is not implemented (separators)"] fn de_array() { let a: u64 = 123_456; let b: u64 = 987_654; @@ -358,7 +358,7 @@ fn de_array() { } #[test] -#[ignore] +#[ignore = "Nested sequences are not supported"] fn de_complex() { type Key<'a> = (&'a UserId, ArrayVec, &'a RoomId); diff --git a/src/service/migrations.rs b/src/service/migrations.rs index cee638ba..586d6249 100644 --- a/src/service/migrations.rs +++ b/src/service/migrations.rs @@ -215,8 +215,8 @@ async fn db_lt_12(services: &Services) -> Result<()> { for username in &services .users .list_local_users() - .map(UserId::to_owned) - .collect::>() + .map(ToOwned::to_owned) + .collect::>() .await { let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name) @@ -295,8 +295,8 @@ async fn db_lt_13(services: &Services) -> Result<()> { for username in &services .users .list_local_users() - .map(UserId::to_owned) - .collect::>() + .map(ToOwned::to_owned) + .collect::>() .await { let user = match UserId::parse_with_server_name(username.as_str(), &services.server.name) diff --git a/src/service/presence/mod.rs b/src/service/presence/mod.rs index 8f646be6..e7ce64bc 100644 --- a/src/service/presence/mod.rs +++ b/src/service/presence/mod.rs @@ -183,8 +183,8 @@ impl Service { .services .users .list_local_users() - .map(UserId::to_owned) - .collect::>() + .map(ToOwned::to_owned) + .collect::>() .await { let presence = self.db.get_presence(user_id).await; diff --git a/src/service/pusher/mod.rs b/src/service/pusher/mod.rs index baa7a72e..071bf822 100644 --- a/src/service/pusher/mod.rs +++ b/src/service/pusher/mod.rs @@ -178,7 +178,7 @@ impl Service { pub fn get_pushkeys<'a>( &'a self, sender: &'a UserId, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { let prefix = (sender, Interfix); self.db .senderkey_pusher diff --git a/src/service/rooms/alias/mod.rs b/src/service/rooms/alias/mod.rs index 7675efd4..c627092e 100644 --- a/src/service/rooms/alias/mod.rs +++ b/src/service/rooms/alias/mod.rs @@ -178,7 +178,7 @@ impl Service { pub fn local_aliases_for_room<'a>( &'a self, room_id: &'a RoomId, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .aliasid_alias @@ -188,7 +188,9 @@ impl Service { } #[tracing::instrument(skip(self), level = "debug")] - pub fn all_local_aliases<'a>(&'a self) -> impl Stream + Send + 'a { + pub fn all_local_aliases<'a>( + &'a self, + ) -> impl Stream + Send + 'a { self.db .alias_roomid .stream() diff --git a/src/service/rooms/pdu_metadata/data.rs b/src/service/rooms/pdu_metadata/data.rs index a746b4cc..854c6ea0 100644 --- a/src/service/rooms/pdu_metadata/data.rs +++ b/src/service/rooms/pdu_metadata/data.rs @@ -60,7 +60,7 @@ impl Data { target: ShortEventId, from: PduCount, dir: Direction, - ) -> impl Stream + Send + '_ { + ) -> impl Stream + Send + 'a { // Query from exact position then filter excludes it (saturating_inc could skip // events at min/max boundaries) let from_unsigned = from.into_unsigned(); diff --git a/src/service/rooms/read_receipt/data.rs b/src/service/rooms/read_receipt/data.rs index 62f87948..9a2fa70c 100644 --- a/src/service/rooms/read_receipt/data.rs +++ b/src/service/rooms/read_receipt/data.rs @@ -65,7 +65,7 @@ impl Data { &'a self, room_id: &'a RoomId, since: u64, - ) -> impl Stream> + Send + 'a { + ) -> impl Stream> + Send + 'a { type Key<'a> = (&'a RoomId, u64, &'a UserId); type KeyVal<'a> = (Key<'a>, CanonicalJsonObject); diff --git a/src/service/rooms/read_receipt/mod.rs b/src/service/rooms/read_receipt/mod.rs index 68ce9b7f..64081a2c 100644 --- a/src/service/rooms/read_receipt/mod.rs +++ b/src/service/rooms/read_receipt/mod.rs @@ -112,7 +112,7 @@ impl Service { &'a self, room_id: &'a RoomId, since: u64, - ) -> impl Stream> + Send + 'a { + ) -> impl Stream> + Send + 'a { self.db.readreceipts_since(room_id, since) } diff --git a/src/service/rooms/search/mod.rs b/src/service/rooms/search/mod.rs index afe3061b..ea2f90af 100644 --- a/src/service/rooms/search/mod.rs +++ b/src/service/rooms/search/mod.rs @@ -104,7 +104,7 @@ pub fn deindex_pdu(&self, shortroomid: ShortRoomId, pdu_id: &RawPduId, message_b pub async fn search_pdus<'a>( &'a self, query: &'a RoomQuery<'a>, -) -> Result<(usize, impl Stream> + Send + '_)> { +) -> Result<(usize, impl Stream> + Send + 'a)> { let pdu_ids: Vec<_> = self.search_pdu_ids(query).await?.collect().await; let filter = &query.criteria.filter; @@ -137,10 +137,10 @@ pub async fn search_pdus<'a>( // result is modeled as a stream such that callers don't have to be refactored // though an additional async/wrap still exists for now #[implement(Service)] -pub async fn search_pdu_ids( - &self, - query: &RoomQuery<'_>, -) -> Result + Send + '_ + use<'_>> { +pub async fn search_pdu_ids<'a>( + &'a self, + query: &'a RoomQuery<'_>, +) -> Result + Send + 'a + use<'a>> { let shortroomid = self.services.short.get_shortroomid(query.room_id).await?; let pdu_ids = self.search_pdu_ids_query_room(query, shortroomid).await; @@ -173,7 +173,7 @@ fn search_pdu_ids_query_words<'a>( &'a self, shortroomid: ShortRoomId, word: &'a str, -) -> impl Stream + Send + '_ { +) -> impl Stream + Send + 'a { self.search_pdu_ids_query_word(shortroomid, word) .map(move |key| -> RawPduId { let key = &key[prefix_len(word)..]; @@ -183,11 +183,11 @@ fn search_pdu_ids_query_words<'a>( /// Iterate over raw database results for a word #[implement(Service)] -fn search_pdu_ids_query_word( - &self, +fn search_pdu_ids_query_word<'a>( + &'a self, shortroomid: ShortRoomId, - word: &str, -) -> impl Stream> + Send + '_ + use<'_> { + word: &'a str, +) -> impl Stream> + Send + 'a + use<'a> { // rustc says const'ing this not yet stable let end_id: RawPduId = PduId { shortroomid, diff --git a/src/service/rooms/short/mod.rs b/src/service/rooms/short/mod.rs index 06ff6493..660bb7de 100644 --- a/src/service/rooms/short/mod.rs +++ b/src/service/rooms/short/mod.rs @@ -62,7 +62,7 @@ pub async fn get_or_create_shorteventid(&self, event_id: &EventId) -> ShortEvent pub fn multi_get_or_create_shorteventid<'a, I>( &'a self, event_ids: I, -) -> impl Stream + Send + '_ +) -> impl Stream + Send + 'a where I: Iterator + Clone + Debug + Send + 'a, { diff --git a/src/service/rooms/state/mod.rs b/src/service/rooms/state/mod.rs index 641aa6a9..386adf9d 100644 --- a/src/service/rooms/state/mod.rs +++ b/src/service/rooms/state/mod.rs @@ -388,7 +388,7 @@ impl Service { pub fn get_forward_extremities<'a>( &'a self, room_id: &'a RoomId, - ) -> impl Stream + Send + '_ { + ) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db diff --git a/src/service/rooms/state_cache/mod.rs b/src/service/rooms/state_cache/mod.rs index e9845fbf..2d8f5cc5 100644 --- a/src/service/rooms/state_cache/mod.rs +++ b/src/service/rooms/state_cache/mod.rs @@ -144,7 +144,7 @@ pub fn clear_appservice_in_room_cache(&self) { self.appservice_in_room_cache.wri pub fn room_servers<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomserverids @@ -167,7 +167,7 @@ pub async fn server_in_room<'a>(&'a self, server: &'a ServerName, room_id: &'a R pub fn server_rooms<'a>( &'a self, server: &'a ServerName, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (server, Interfix); self.db .serverroomids @@ -202,7 +202,7 @@ pub fn get_shared_rooms<'a>( &'a self, user_a: &'a UserId, user_b: &'a UserId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { use conduwuit::utils::set; let a = self.rooms_joined(user_a); @@ -216,7 +216,7 @@ pub fn get_shared_rooms<'a>( pub fn room_members<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuserid_joined @@ -239,7 +239,7 @@ pub async fn room_joined_count(&self, room_id: &RoomId) -> Result { pub fn local_users_in_room<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { self.room_members(room_id) .ready_filter(|user| self.services.globals.user_is_local(user)) } @@ -251,7 +251,7 @@ pub fn local_users_in_room<'a>( pub fn active_local_users_in_room<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { self.local_users_in_room(room_id) .filter(|user| self.services.users.is_active(user)) } @@ -273,7 +273,7 @@ pub async fn room_invited_count(&self, room_id: &RoomId) -> Result { pub fn room_useroncejoined<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuseroncejoinedids @@ -288,7 +288,7 @@ pub fn room_useroncejoined<'a>( pub fn room_members_invited<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuserid_invitecount @@ -303,7 +303,7 @@ pub fn room_members_invited<'a>( pub fn room_members_knocked<'a>( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { let prefix = (room_id, Interfix); self.db .roomuserid_knockedcount @@ -347,7 +347,7 @@ pub async fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result pub fn rooms_joined<'a>( &'a self, user_id: &'a UserId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { self.db .userroomid_joined .keys_raw_prefix(user_id) diff --git a/src/service/rooms/state_cache/via.rs b/src/service/rooms/state_cache/via.rs index a818cc04..24d92a21 100644 --- a/src/service/rooms/state_cache/via.rs +++ b/src/service/rooms/state_cache/via.rs @@ -81,7 +81,7 @@ pub async fn servers_route_via(&self, room_id: &RoomId) -> Result( &'a self, room_id: &'a RoomId, -) -> impl Stream + Send + 'a { +) -> impl Stream + Send + 'a { type KeyVal<'a> = (Ignore, Vec<&'a ServerName>); self.db diff --git a/src/service/users/mod.rs b/src/service/users/mod.rs index fff1661c..6ddd8d79 100644 --- a/src/service/users/mod.rs +++ b/src/service/users/mod.rs @@ -422,7 +422,7 @@ impl Service { pub fn all_device_ids<'a>( &'a self, user_id: &'a UserId, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { let prefix = (user_id, Interfix); self.db .userdeviceid_metadata @@ -770,7 +770,7 @@ impl Service { user_id: &'a UserId, from: u64, to: Option, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { self.keys_changed_user_or_room(user_id.as_str(), from, to) .map(|(user_id, ..)| user_id) } @@ -781,7 +781,7 @@ impl Service { room_id: &'a RoomId, from: u64, to: Option, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { self.keys_changed_user_or_room(room_id.as_str(), from, to) } @@ -790,7 +790,7 @@ impl Service { user_or_room_id: &'a str, from: u64, to: Option, - ) -> impl Stream + Send + 'a { + ) -> impl Stream + Send + 'a { type KeyVal<'a> = ((&'a str, u64), &'a UserId); let to = to.unwrap_or(u64::MAX);