From 60960c6e099c7ec249dd29c7396328a4907fe6ba Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Wed, 21 May 2025 20:29:53 +0100 Subject: [PATCH 01/10] feat: Automatically set well-known support contacts --- conduwuit-example.toml | 20 +++++++++---- src/api/client/well_known.rs | 54 +++++++++++++++++++++++++----------- src/core/config/mod.rs | 16 +++++++++++ 3 files changed, 69 insertions(+), 21 deletions(-) diff --git a/conduwuit-example.toml b/conduwuit-example.toml index 6934e67c..1a8be2aa 100644 --- a/conduwuit-example.toml +++ b/conduwuit-example.toml @@ -1641,19 +1641,29 @@ # #server = -# This item is undocumented. Please contribute documentation for it. +# URL to a support page for the server, which will be served as part of +# the MSC1929 server support endpoint at /.well-known/matrix/support. +# Will be included alongside any contact information # #support_page = -# This item is undocumented. Please contribute documentation for it. +# Role string for server support contacts, to be served as part of the +# MSC1929 server support endpoint at /.well-known/matrix/support. # -#support_role = +#support_role = "m.role.admin" -# This item is undocumented. Please contribute documentation for it. +# Email address for server support contacts, to be served as part of the +# MSC1929 server support endpoint. +# This will be used along with support_mxid if specified. # #support_email = -# This item is undocumented. Please contribute documentation for it. +# Matrix ID for server support contacts, to be served as part of the +# MSC1929 server support endpoint. +# This will be used along with support_email if specified. +# +# If no email or mxid is specified, all of the server's admins will be +# listed. # #support_mxid = diff --git a/src/api/client/well_known.rs b/src/api/client/well_known.rs index 35b7fc1e..4981ccb4 100644 --- a/src/api/client/well_known.rs +++ b/src/api/client/well_known.rs @@ -1,5 +1,6 @@ use axum::{Json, extract::State, response::IntoResponse}; use conduwuit::{Error, Result}; +use futures::StreamExt; use ruma::api::client::{ discovery::{ discover_homeserver::{self, HomeserverInfo, SlidingSyncProxyInfo}, @@ -33,6 +34,8 @@ pub(crate) async fn well_known_client( /// # `GET /.well-known/matrix/support` /// /// Server support contact and support page of a homeserver's domain. +/// Implements MSC1929 for server discovery. +/// If no configuration is set, uses admin users as contacts. pub(crate) async fn well_known_support( State(services): State, _body: Ruma, @@ -45,32 +48,51 @@ pub(crate) async fn well_known_support( .as_ref() .map(ToString::to_string); - let role = services.server.config.well_known.support_role.clone(); - - // support page or role must be either defined for this to be valid - if support_page.is_none() && role.is_none() { - return Err(Error::BadRequest(ErrorKind::NotFound, "Not found.")); - } - let email_address = services.server.config.well_known.support_email.clone(); let matrix_id = services.server.config.well_known.support_mxid.clone(); - // if a role is specified, an email address or matrix id is required - if role.is_some() && (email_address.is_none() && matrix_id.is_none()) { - return Err(Error::BadRequest(ErrorKind::NotFound, "Not found.")); - } - // TODO: support defining multiple contacts in the config let mut contacts: Vec = vec![]; - if let Some(role) = role { - let contact = Contact { role, email_address, matrix_id }; + let role_value = services + .server + .config + .well_known + .support_role + .clone() + .unwrap_or_else(|| "m.role.admin".to_owned().into()); - contacts.push(contact); + // Add configured contact if at least one contact method is specified + if email_address.is_some() || matrix_id.is_some() { + contacts.push(Contact { + role: role_value.clone(), + email_address: email_address.clone(), + matrix_id: matrix_id.clone(), + }); + } + + // Try to add admin users as contacts if no contacts are configured + if contacts.is_empty() { + if let Ok(admin_room) = services.admin.get_admin_room().await { + let admin_users = services.rooms.state_cache.room_members(&admin_room); + let mut stream = admin_users; + + while let Some(user_id) = stream.next().await { + // Skip server user + if *user_id == services.globals.server_user { + break; + } + contacts.push(Contact { + role: role_value.clone(), + email_address: None, + matrix_id: Some(user_id.to_owned()), + }); + } + } } - // support page or role+contacts must be either defined for this to be valid if contacts.is_empty() && support_page.is_none() { + // No admin room, no configured contacts, and no support page return Err(Error::BadRequest(ErrorKind::NotFound, "Not found.")); } diff --git a/src/core/config/mod.rs b/src/core/config/mod.rs index 66ed0b2e..d4a10345 100644 --- a/src/core/config/mod.rs +++ b/src/core/config/mod.rs @@ -1897,12 +1897,28 @@ pub struct WellKnownConfig { /// example: "matrix.example.com:443" pub server: Option, + /// URL to a support page for the server, which will be served as part of + /// the MSC1929 server support endpoint at /.well-known/matrix/support. + /// Will be included alongside any contact information pub support_page: Option, + /// Role string for server support contacts, to be served as part of the + /// MSC1929 server support endpoint at /.well-known/matrix/support. + /// + /// default: "m.role.admin" pub support_role: Option, + /// Email address for server support contacts, to be served as part of the + /// MSC1929 server support endpoint. + /// This will be used along with support_mxid if specified. pub support_email: Option, + /// Matrix ID for server support contacts, to be served as part of the + /// MSC1929 server support endpoint. + /// This will be used along with support_email if specified. + /// + /// If no email or mxid is specified, all of the server's admins will be + /// listed. pub support_mxid: Option, } From 2ccbd7d60b12c9c9d65bde027f4c974665022b28 Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Wed, 21 May 2025 21:06:44 +0100 Subject: [PATCH 02/10] fix: Reference config directly --- src/api/client/well_known.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/api/client/well_known.rs b/src/api/client/well_known.rs index 4981ccb4..fe2281ba 100644 --- a/src/api/client/well_known.rs +++ b/src/api/client/well_known.rs @@ -18,7 +18,7 @@ pub(crate) async fn well_known_client( State(services): State, _body: Ruma, ) -> Result { - let client_url = match services.server.config.well_known.client.as_ref() { + let client_url = match services.config.well_known.client.as_ref() { | Some(url) => url.to_string(), | None => return Err(Error::BadRequest(ErrorKind::NotFound, "Not found.")), }; @@ -41,21 +41,19 @@ pub(crate) async fn well_known_support( _body: Ruma, ) -> Result { let support_page = services - .server .config .well_known .support_page .as_ref() .map(ToString::to_string); - let email_address = services.server.config.well_known.support_email.clone(); - let matrix_id = services.server.config.well_known.support_mxid.clone(); + let email_address = services.config.well_known.support_email.clone(); + let matrix_id = services.config.well_known.support_mxid.clone(); // TODO: support defining multiple contacts in the config let mut contacts: Vec = vec![]; let role_value = services - .server .config .well_known .support_role @@ -106,9 +104,9 @@ pub(crate) async fn well_known_support( pub(crate) async fn syncv3_client_server_json( State(services): State, ) -> Result { - let server_url = match services.server.config.well_known.client.as_ref() { + let server_url = match services.config.well_known.client.as_ref() { | Some(url) => url.to_string(), - | None => match services.server.config.well_known.server.as_ref() { + | None => match services.config.well_known.server.as_ref() { | Some(url) => url.to_string(), | None => return Err(Error::BadRequest(ErrorKind::NotFound, "Not found.")), }, From 002c64ca88777c2e530a5ee24bdbac23555c3157 Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Wed, 21 May 2025 22:24:33 +0100 Subject: [PATCH 03/10] feat: Add admin command to delete sync tokens from a room --- src/admin/room/commands.rs | 19 +++++++++++++++++- src/admin/room/mod.rs | 9 ++++++++- src/service/rooms/user/mod.rs | 37 +++++++++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+), 2 deletions(-) diff --git a/src/admin/room/commands.rs b/src/admin/room/commands.rs index 81f36f15..5b08ff2a 100644 --- a/src/admin/room/commands.rs +++ b/src/admin/room/commands.rs @@ -1,6 +1,6 @@ use conduwuit::{Err, Result}; use futures::StreamExt; -use ruma::OwnedRoomId; +use ruma::{OwnedRoomId, OwnedRoomOrAliasId}; use crate::{PAGE_SIZE, admin_command, get_room_info}; @@ -66,3 +66,20 @@ pub(super) async fn exists(&self, room_id: OwnedRoomId) -> Result { self.write_str(&format!("{result}")).await } + +#[admin_command] +pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result { + // Resolve the room ID from the room or alias ID + let room_id = self.services.rooms.alias.resolve(&room).await?; + + // Delete all tokens for this room using the service method + let deleted_count = match self.services.rooms.user.delete_room_tokens(&room_id).await { + | Ok(count) => count, + | Err(_) => return Err!("Failed to delete sync tokens for room {}", room_id), + }; + + self.write_str(&format!( + "Successfully deleted {deleted_count} sync tokens for room {room_id}" + )) + .await +} diff --git a/src/admin/room/mod.rs b/src/admin/room/mod.rs index 26d2c2d8..0eac2224 100644 --- a/src/admin/room/mod.rs +++ b/src/admin/room/mod.rs @@ -6,7 +6,7 @@ mod moderation; use clap::Subcommand; use conduwuit::Result; -use ruma::OwnedRoomId; +use ruma::{OwnedRoomId, OwnedRoomOrAliasId}; use self::{ alias::RoomAliasCommand, directory::RoomDirectoryCommand, info::RoomInfoCommand, @@ -56,4 +56,11 @@ pub(super) enum RoomCommand { Exists { room_id: OwnedRoomId, }, + + /// - Delete all sync tokens for a room + PurgeSyncTokens { + /// Room ID or alias to purge sync tokens for + #[arg(value_parser)] + room: OwnedRoomOrAliasId, + }, } diff --git a/src/service/rooms/user/mod.rs b/src/service/rooms/user/mod.rs index bd76f1f4..cc72ac97 100644 --- a/src/service/rooms/user/mod.rs +++ b/src/service/rooms/user/mod.rs @@ -127,3 +127,40 @@ pub async fn get_token_shortstatehash( .await .deserialized() } + +/// Delete all sync tokens associated with a room +/// +/// This helps clean up the database as these tokens are never otherwise removed +#[implement(Service)] +pub async fn delete_room_tokens(&self, room_id: &RoomId) -> Result { + use futures::TryStreamExt; + + let shortroomid = self.services.short.get_shortroomid(room_id).await?; + + // Create a prefix to search by - all entries for this room will start with its + // short ID + let prefix = &[shortroomid]; + + // Get all keys with this room prefix + let mut count = 0; + + // Collect all keys into a Vec first, then delete them + let keys = self + .db + .roomsynctoken_shortstatehash + .keys_prefix_raw(prefix) + .map_ok(|key| { + // Clone the key since we can't store references in the Vec + Vec::from(key) + }) + .try_collect::>() + .await?; + + // Delete each key individually + for key in &keys { + self.db.roomsynctoken_shortstatehash.del(key); + count += 1; + } + + Ok(count) +} From 9bef2972ac23e4d81d622ae961814edc868803ac Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Wed, 21 May 2025 22:44:15 +0100 Subject: [PATCH 04/10] feat: Add command to purge sync tokens for empty rooms --- src/admin/room/commands.rs | 166 ++++++++++++++++++++++++++++++++++ src/admin/room/mod.rs | 31 ++++++- src/service/rooms/user/mod.rs | 25 +++++ 3 files changed, 218 insertions(+), 4 deletions(-) diff --git a/src/admin/room/commands.rs b/src/admin/room/commands.rs index 5b08ff2a..9075389f 100644 --- a/src/admin/room/commands.rs +++ b/src/admin/room/commands.rs @@ -83,3 +83,169 @@ pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result )) .await } + +#[admin_command] +pub(super) async fn purge_empty_room_tokens( + &self, + yes: bool, + target_disabled: bool, + target_banned: bool, + dry_run: bool, +) -> Result { + use conduwuit::{debug, info}; + + if !yes && !dry_run { + return Err!( + "Please confirm this operation with --yes as it may delete tokens from many rooms, \ + or use --dry-run to simulate" + ); + } + + let mode = if dry_run { "Simulating" } else { "Starting" }; + + let mut total_rooms_processed = 0; + let mut empty_rooms_processed = 0; + let mut total_tokens_deleted = 0; + let mut error_count = 0; + let mut skipped_rooms = 0; + + info!("{} purge of sync tokens for rooms with no local users", mode); + + // Get all rooms in the server + let all_rooms = self + .services + .rooms + .metadata + .iter_ids() + .collect::>() + .await; + + info!("Found {} rooms total on the server", all_rooms.len()); + + // Filter rooms based on options + let mut rooms = Vec::new(); + for room_id in all_rooms { + // Filter rooms based on targeting options + let is_disabled = self.services.rooms.metadata.is_disabled(room_id).await; + let is_banned = self.services.rooms.metadata.is_banned(room_id).await; + + // If targeting specific types of rooms, only include matching rooms + if (target_disabled || target_banned) + && !((target_disabled && is_disabled) || (target_banned && is_banned)) + { + debug!("Skipping room {} as it doesn't match targeting criteria", room_id); + skipped_rooms += 1; + continue; + } + + rooms.push(room_id); + } + + // Total number of rooms we'll be checking + let total_rooms = rooms.len(); + info!( + "Processing {} rooms after filtering (skipped {} rooms)", + total_rooms, skipped_rooms + ); + + // Process each room + for room_id in rooms { + total_rooms_processed += 1; + + // Count local users in this room + let local_users_count = self + .services + .rooms + .state_cache + .local_users_in_room(room_id) + .count() + .await; + + // Only process rooms with no local users + if local_users_count == 0 { + empty_rooms_processed += 1; + + // In dry run mode, just count what would be deleted, don't actually delete + debug!( + "Room {} has no local users, {}", + room_id, + if dry_run { + "would purge sync tokens" + } else { + "purging sync tokens" + } + ); + + if dry_run { + // For dry run mode, count tokens without deleting + match self.services.rooms.user.count_room_tokens(room_id).await { + | Ok(count) => + if count > 0 { + debug!("Would delete {} sync tokens for room {}", count, room_id); + total_tokens_deleted += count; + } else { + debug!("No sync tokens found for room {}", room_id); + }, + | Err(e) => { + debug!("Error counting sync tokens for room {}: {:?}", room_id, e); + error_count += 1; + }, + } + } else { + // Real deletion mode + match self.services.rooms.user.delete_room_tokens(room_id).await { + | Ok(count) => + if count > 0 { + debug!("Deleted {} sync tokens for room {}", count, room_id); + total_tokens_deleted += count; + } else { + debug!("No sync tokens found for room {}", room_id); + }, + | Err(e) => { + debug!("Error purging sync tokens for room {}: {:?}", room_id, e); + error_count += 1; + }, + } + } + } else { + debug!("Room {} has {} local users, skipping", room_id, local_users_count); + } + + // Log progress periodically + if total_rooms_processed % 100 == 0 || total_rooms_processed == total_rooms { + info!( + "Progress: {}/{} rooms processed, {} empty rooms found, {} tokens {}", + total_rooms_processed, + total_rooms, + empty_rooms_processed, + total_tokens_deleted, + if dry_run { "would be deleted" } else { "deleted" } + ); + } + } + + let action = if dry_run { "would be deleted" } else { "deleted" }; + info!( + "Finished {}: processed {} empty rooms out of {} total, {} tokens {}, errors: {}", + if dry_run { + "purge simulation" + } else { + "purging sync tokens" + }, + empty_rooms_processed, + total_rooms, + total_tokens_deleted, + action, + error_count + ); + + let mode_msg = if dry_run { "DRY RUN: " } else { "" }; + self.write_str(&format!( + "{}Successfully processed {empty_rooms_processed} empty rooms (out of {total_rooms} \ + total rooms), {total_tokens_deleted} tokens {}. Skipped {skipped_rooms} rooms based on \ + filters. Failed for {error_count} rooms.", + mode_msg, + if dry_run { "would be deleted" } else { "deleted" } + )) + .await +} diff --git a/src/admin/room/mod.rs b/src/admin/room/mod.rs index 0eac2224..ee468e1c 100644 --- a/src/admin/room/mod.rs +++ b/src/admin/room/mod.rs @@ -22,13 +22,13 @@ pub(super) enum RoomCommand { ListRooms { page: Option, - /// Excludes rooms that we have federation disabled with + /// Only purge rooms that have federation disabled #[arg(long)] - exclude_disabled: bool, + only_disabled: bool, - /// Excludes rooms that we have banned + /// Only purge rooms that have been banned #[arg(long)] - exclude_banned: bool, + only_banned: bool, #[arg(long)] /// Whether to only output room IDs without supplementary room @@ -63,4 +63,27 @@ pub(super) enum RoomCommand { #[arg(value_parser)] room: OwnedRoomOrAliasId, }, + + /// - Delete sync tokens for all rooms that have no local users + /// + /// By default, processes all empty rooms. You can use --target-disabled + /// and/or --target-banned to exclusively process rooms matching those + /// conditions. + PurgeEmptyRoomTokens { + /// Confirm you want to delete tokens from potentially many rooms + #[arg(long)] + yes: bool, + + /// Only purge rooms that have federation disabled + #[arg(long)] + target_disabled: bool, + + /// Only purge rooms that have been banned + #[arg(long)] + target_banned: bool, + + /// Perform a dry run without actually deleting any tokens + #[arg(long)] + dry_run: bool, + }, } diff --git a/src/service/rooms/user/mod.rs b/src/service/rooms/user/mod.rs index cc72ac97..58df427b 100644 --- a/src/service/rooms/user/mod.rs +++ b/src/service/rooms/user/mod.rs @@ -128,6 +128,31 @@ pub async fn get_token_shortstatehash( .deserialized() } +/// Count how many sync tokens exist for a room without deleting them +/// +/// This is useful for dry runs to see how many tokens would be deleted +#[implement(Service)] +pub async fn count_room_tokens(&self, room_id: &RoomId) -> Result { + use futures::TryStreamExt; + + let shortroomid = self.services.short.get_shortroomid(room_id).await?; + + // Create a prefix to search by - all entries for this room will start with its + // short ID + let prefix = &[shortroomid]; + + // Collect all keys into a Vec and count them + let keys = self + .db + .roomsynctoken_shortstatehash + .keys_prefix_raw(prefix) + .map_ok(|_| ()) // We only need to count, not store the keys + .try_collect::>() + .await?; + + Ok(keys.len()) +} + /// Delete all sync tokens associated with a room /// /// This helps clean up the database as these tokens are never otherwise removed From a33179005852eec57c6ee512b865223f4a3b6d48 Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Wed, 21 May 2025 22:24:33 +0100 Subject: [PATCH 05/10] feat: Add admin command to delete sync tokens from a room --- src/admin/room/commands.rs | 19 +++++++++++++++++- src/admin/room/mod.rs | 9 ++++++++- src/service/rooms/user/mod.rs | 37 +++++++++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+), 2 deletions(-) diff --git a/src/admin/room/commands.rs b/src/admin/room/commands.rs index 81f36f15..5b08ff2a 100644 --- a/src/admin/room/commands.rs +++ b/src/admin/room/commands.rs @@ -1,6 +1,6 @@ use conduwuit::{Err, Result}; use futures::StreamExt; -use ruma::OwnedRoomId; +use ruma::{OwnedRoomId, OwnedRoomOrAliasId}; use crate::{PAGE_SIZE, admin_command, get_room_info}; @@ -66,3 +66,20 @@ pub(super) async fn exists(&self, room_id: OwnedRoomId) -> Result { self.write_str(&format!("{result}")).await } + +#[admin_command] +pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result { + // Resolve the room ID from the room or alias ID + let room_id = self.services.rooms.alias.resolve(&room).await?; + + // Delete all tokens for this room using the service method + let deleted_count = match self.services.rooms.user.delete_room_tokens(&room_id).await { + | Ok(count) => count, + | Err(_) => return Err!("Failed to delete sync tokens for room {}", room_id), + }; + + self.write_str(&format!( + "Successfully deleted {deleted_count} sync tokens for room {room_id}" + )) + .await +} diff --git a/src/admin/room/mod.rs b/src/admin/room/mod.rs index 26d2c2d8..0eac2224 100644 --- a/src/admin/room/mod.rs +++ b/src/admin/room/mod.rs @@ -6,7 +6,7 @@ mod moderation; use clap::Subcommand; use conduwuit::Result; -use ruma::OwnedRoomId; +use ruma::{OwnedRoomId, OwnedRoomOrAliasId}; use self::{ alias::RoomAliasCommand, directory::RoomDirectoryCommand, info::RoomInfoCommand, @@ -56,4 +56,11 @@ pub(super) enum RoomCommand { Exists { room_id: OwnedRoomId, }, + + /// - Delete all sync tokens for a room + PurgeSyncTokens { + /// Room ID or alias to purge sync tokens for + #[arg(value_parser)] + room: OwnedRoomOrAliasId, + }, } diff --git a/src/service/rooms/user/mod.rs b/src/service/rooms/user/mod.rs index bd76f1f4..cc72ac97 100644 --- a/src/service/rooms/user/mod.rs +++ b/src/service/rooms/user/mod.rs @@ -127,3 +127,40 @@ pub async fn get_token_shortstatehash( .await .deserialized() } + +/// Delete all sync tokens associated with a room +/// +/// This helps clean up the database as these tokens are never otherwise removed +#[implement(Service)] +pub async fn delete_room_tokens(&self, room_id: &RoomId) -> Result { + use futures::TryStreamExt; + + let shortroomid = self.services.short.get_shortroomid(room_id).await?; + + // Create a prefix to search by - all entries for this room will start with its + // short ID + let prefix = &[shortroomid]; + + // Get all keys with this room prefix + let mut count = 0; + + // Collect all keys into a Vec first, then delete them + let keys = self + .db + .roomsynctoken_shortstatehash + .keys_prefix_raw(prefix) + .map_ok(|key| { + // Clone the key since we can't store references in the Vec + Vec::from(key) + }) + .try_collect::>() + .await?; + + // Delete each key individually + for key in &keys { + self.db.roomsynctoken_shortstatehash.del(key); + count += 1; + } + + Ok(count) +} From c338fd8453cd01ee176c955c79c540fcc6b54412 Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Wed, 21 May 2025 22:44:15 +0100 Subject: [PATCH 06/10] feat: Add command to purge sync tokens for empty rooms --- src/admin/room/commands.rs | 166 ++++++++++++++++++++++++++++++++++ src/admin/room/mod.rs | 31 ++++++- src/service/rooms/user/mod.rs | 25 +++++ 3 files changed, 218 insertions(+), 4 deletions(-) diff --git a/src/admin/room/commands.rs b/src/admin/room/commands.rs index 5b08ff2a..9075389f 100644 --- a/src/admin/room/commands.rs +++ b/src/admin/room/commands.rs @@ -83,3 +83,169 @@ pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result )) .await } + +#[admin_command] +pub(super) async fn purge_empty_room_tokens( + &self, + yes: bool, + target_disabled: bool, + target_banned: bool, + dry_run: bool, +) -> Result { + use conduwuit::{debug, info}; + + if !yes && !dry_run { + return Err!( + "Please confirm this operation with --yes as it may delete tokens from many rooms, \ + or use --dry-run to simulate" + ); + } + + let mode = if dry_run { "Simulating" } else { "Starting" }; + + let mut total_rooms_processed = 0; + let mut empty_rooms_processed = 0; + let mut total_tokens_deleted = 0; + let mut error_count = 0; + let mut skipped_rooms = 0; + + info!("{} purge of sync tokens for rooms with no local users", mode); + + // Get all rooms in the server + let all_rooms = self + .services + .rooms + .metadata + .iter_ids() + .collect::>() + .await; + + info!("Found {} rooms total on the server", all_rooms.len()); + + // Filter rooms based on options + let mut rooms = Vec::new(); + for room_id in all_rooms { + // Filter rooms based on targeting options + let is_disabled = self.services.rooms.metadata.is_disabled(room_id).await; + let is_banned = self.services.rooms.metadata.is_banned(room_id).await; + + // If targeting specific types of rooms, only include matching rooms + if (target_disabled || target_banned) + && !((target_disabled && is_disabled) || (target_banned && is_banned)) + { + debug!("Skipping room {} as it doesn't match targeting criteria", room_id); + skipped_rooms += 1; + continue; + } + + rooms.push(room_id); + } + + // Total number of rooms we'll be checking + let total_rooms = rooms.len(); + info!( + "Processing {} rooms after filtering (skipped {} rooms)", + total_rooms, skipped_rooms + ); + + // Process each room + for room_id in rooms { + total_rooms_processed += 1; + + // Count local users in this room + let local_users_count = self + .services + .rooms + .state_cache + .local_users_in_room(room_id) + .count() + .await; + + // Only process rooms with no local users + if local_users_count == 0 { + empty_rooms_processed += 1; + + // In dry run mode, just count what would be deleted, don't actually delete + debug!( + "Room {} has no local users, {}", + room_id, + if dry_run { + "would purge sync tokens" + } else { + "purging sync tokens" + } + ); + + if dry_run { + // For dry run mode, count tokens without deleting + match self.services.rooms.user.count_room_tokens(room_id).await { + | Ok(count) => + if count > 0 { + debug!("Would delete {} sync tokens for room {}", count, room_id); + total_tokens_deleted += count; + } else { + debug!("No sync tokens found for room {}", room_id); + }, + | Err(e) => { + debug!("Error counting sync tokens for room {}: {:?}", room_id, e); + error_count += 1; + }, + } + } else { + // Real deletion mode + match self.services.rooms.user.delete_room_tokens(room_id).await { + | Ok(count) => + if count > 0 { + debug!("Deleted {} sync tokens for room {}", count, room_id); + total_tokens_deleted += count; + } else { + debug!("No sync tokens found for room {}", room_id); + }, + | Err(e) => { + debug!("Error purging sync tokens for room {}: {:?}", room_id, e); + error_count += 1; + }, + } + } + } else { + debug!("Room {} has {} local users, skipping", room_id, local_users_count); + } + + // Log progress periodically + if total_rooms_processed % 100 == 0 || total_rooms_processed == total_rooms { + info!( + "Progress: {}/{} rooms processed, {} empty rooms found, {} tokens {}", + total_rooms_processed, + total_rooms, + empty_rooms_processed, + total_tokens_deleted, + if dry_run { "would be deleted" } else { "deleted" } + ); + } + } + + let action = if dry_run { "would be deleted" } else { "deleted" }; + info!( + "Finished {}: processed {} empty rooms out of {} total, {} tokens {}, errors: {}", + if dry_run { + "purge simulation" + } else { + "purging sync tokens" + }, + empty_rooms_processed, + total_rooms, + total_tokens_deleted, + action, + error_count + ); + + let mode_msg = if dry_run { "DRY RUN: " } else { "" }; + self.write_str(&format!( + "{}Successfully processed {empty_rooms_processed} empty rooms (out of {total_rooms} \ + total rooms), {total_tokens_deleted} tokens {}. Skipped {skipped_rooms} rooms based on \ + filters. Failed for {error_count} rooms.", + mode_msg, + if dry_run { "would be deleted" } else { "deleted" } + )) + .await +} diff --git a/src/admin/room/mod.rs b/src/admin/room/mod.rs index 0eac2224..ee468e1c 100644 --- a/src/admin/room/mod.rs +++ b/src/admin/room/mod.rs @@ -22,13 +22,13 @@ pub(super) enum RoomCommand { ListRooms { page: Option, - /// Excludes rooms that we have federation disabled with + /// Only purge rooms that have federation disabled #[arg(long)] - exclude_disabled: bool, + only_disabled: bool, - /// Excludes rooms that we have banned + /// Only purge rooms that have been banned #[arg(long)] - exclude_banned: bool, + only_banned: bool, #[arg(long)] /// Whether to only output room IDs without supplementary room @@ -63,4 +63,27 @@ pub(super) enum RoomCommand { #[arg(value_parser)] room: OwnedRoomOrAliasId, }, + + /// - Delete sync tokens for all rooms that have no local users + /// + /// By default, processes all empty rooms. You can use --target-disabled + /// and/or --target-banned to exclusively process rooms matching those + /// conditions. + PurgeEmptyRoomTokens { + /// Confirm you want to delete tokens from potentially many rooms + #[arg(long)] + yes: bool, + + /// Only purge rooms that have federation disabled + #[arg(long)] + target_disabled: bool, + + /// Only purge rooms that have been banned + #[arg(long)] + target_banned: bool, + + /// Perform a dry run without actually deleting any tokens + #[arg(long)] + dry_run: bool, + }, } diff --git a/src/service/rooms/user/mod.rs b/src/service/rooms/user/mod.rs index cc72ac97..58df427b 100644 --- a/src/service/rooms/user/mod.rs +++ b/src/service/rooms/user/mod.rs @@ -128,6 +128,31 @@ pub async fn get_token_shortstatehash( .deserialized() } +/// Count how many sync tokens exist for a room without deleting them +/// +/// This is useful for dry runs to see how many tokens would be deleted +#[implement(Service)] +pub async fn count_room_tokens(&self, room_id: &RoomId) -> Result { + use futures::TryStreamExt; + + let shortroomid = self.services.short.get_shortroomid(room_id).await?; + + // Create a prefix to search by - all entries for this room will start with its + // short ID + let prefix = &[shortroomid]; + + // Collect all keys into a Vec and count them + let keys = self + .db + .roomsynctoken_shortstatehash + .keys_prefix_raw(prefix) + .map_ok(|_| ()) // We only need to count, not store the keys + .try_collect::>() + .await?; + + Ok(keys.len()) +} + /// Delete all sync tokens associated with a room /// /// This helps clean up the database as these tokens are never otherwise removed From ebad3c78c6b5481871085b51b9217fb1b9d8857b Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Thu, 22 May 2025 13:29:47 +0100 Subject: [PATCH 07/10] fixup! feat: Add command to purge sync tokens for empty rooms --- src/admin/room/mod.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/admin/room/mod.rs b/src/admin/room/mod.rs index ee468e1c..61114b90 100644 --- a/src/admin/room/mod.rs +++ b/src/admin/room/mod.rs @@ -22,13 +22,13 @@ pub(super) enum RoomCommand { ListRooms { page: Option, - /// Only purge rooms that have federation disabled + /// Excludes rooms that we have federation disabled with #[arg(long)] - only_disabled: bool, + exclude_disabled: bool, - /// Only purge rooms that have been banned + /// Excludes rooms that we have banned #[arg(long)] - only_banned: bool, + exclude_banned: bool, #[arg(long)] /// Whether to only output room IDs without supplementary room From d1cb893db180747f1412f03252d8475bac6ec773 Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Thu, 22 May 2025 13:49:22 +0100 Subject: [PATCH 08/10] chore: Fix more complicated clippy warnings --- src/admin/room/commands.rs | 73 ++++++++++++++++++++++------------- src/admin/room/mod.rs | 15 +++---- src/service/rooms/user/mod.rs | 6 +-- 3 files changed, 53 insertions(+), 41 deletions(-) diff --git a/src/admin/room/commands.rs b/src/admin/room/commands.rs index 9075389f..5e25ec7a 100644 --- a/src/admin/room/commands.rs +++ b/src/admin/room/commands.rs @@ -73,9 +73,8 @@ pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result let room_id = self.services.rooms.alias.resolve(&room).await?; // Delete all tokens for this room using the service method - let deleted_count = match self.services.rooms.user.delete_room_tokens(&room_id).await { - | Ok(count) => count, - | Err(_) => return Err!("Failed to delete sync tokens for room {}", room_id), + let Ok(deleted_count) = self.services.rooms.user.delete_room_tokens(&room_id).await else { + return Err!("Failed to delete sync tokens for room {}", room_id); }; self.write_str(&format!( @@ -84,12 +83,23 @@ pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result .await } +/// Target options for room purging +#[derive(Default, Debug, clap::ValueEnum, Clone)] +pub(crate) enum RoomTargetOption { + #[default] + /// Target all rooms + All, + /// Target only disabled rooms + DisabledOnly, + /// Target only banned rooms + BannedOnly, +} + #[admin_command] pub(super) async fn purge_empty_room_tokens( &self, yes: bool, - target_disabled: bool, - target_banned: bool, + target_option: Option, dry_run: bool, ) -> Result { use conduwuit::{debug, info}; @@ -103,11 +113,13 @@ pub(super) async fn purge_empty_room_tokens( let mode = if dry_run { "Simulating" } else { "Starting" }; - let mut total_rooms_processed = 0; - let mut empty_rooms_processed = 0; - let mut total_tokens_deleted = 0; - let mut error_count = 0; - let mut skipped_rooms = 0; + // strictly, we should check if these reach the max value after the loop and + // warn the user that the count is too large + let mut total_rooms_processed: usize = 0; + let mut empty_rooms_processed: u32 = 0; + let mut total_tokens_deleted: usize = 0; + let mut error_count: u32 = 0; + let mut skipped_rooms: u32 = 0; info!("{} purge of sync tokens for rooms with no local users", mode); @@ -125,17 +137,24 @@ pub(super) async fn purge_empty_room_tokens( // Filter rooms based on options let mut rooms = Vec::new(); for room_id in all_rooms { - // Filter rooms based on targeting options - let is_disabled = self.services.rooms.metadata.is_disabled(room_id).await; - let is_banned = self.services.rooms.metadata.is_banned(room_id).await; - - // If targeting specific types of rooms, only include matching rooms - if (target_disabled || target_banned) - && !((target_disabled && is_disabled) || (target_banned && is_banned)) - { - debug!("Skipping room {} as it doesn't match targeting criteria", room_id); - skipped_rooms += 1; - continue; + if let Some(target) = &target_option { + match target { + | RoomTargetOption::DisabledOnly => { + if !self.services.rooms.metadata.is_disabled(room_id).await { + debug!("Skipping room {} as it's not disabled", room_id); + skipped_rooms = skipped_rooms.saturating_add(1); + continue; + } + }, + | RoomTargetOption::BannedOnly => { + if !self.services.rooms.metadata.is_banned(room_id).await { + debug!("Skipping room {} as it's not banned", room_id); + skipped_rooms = skipped_rooms.saturating_add(1); + continue; + } + }, + | RoomTargetOption::All => {}, + } } rooms.push(room_id); @@ -150,7 +169,7 @@ pub(super) async fn purge_empty_room_tokens( // Process each room for room_id in rooms { - total_rooms_processed += 1; + total_rooms_processed = total_rooms_processed.saturating_add(1); // Count local users in this room let local_users_count = self @@ -163,7 +182,7 @@ pub(super) async fn purge_empty_room_tokens( // Only process rooms with no local users if local_users_count == 0 { - empty_rooms_processed += 1; + empty_rooms_processed = empty_rooms_processed.saturating_add(1); // In dry run mode, just count what would be deleted, don't actually delete debug!( @@ -182,13 +201,13 @@ pub(super) async fn purge_empty_room_tokens( | Ok(count) => if count > 0 { debug!("Would delete {} sync tokens for room {}", count, room_id); - total_tokens_deleted += count; + total_tokens_deleted = total_tokens_deleted.saturating_add(count); } else { debug!("No sync tokens found for room {}", room_id); }, | Err(e) => { debug!("Error counting sync tokens for room {}: {:?}", room_id, e); - error_count += 1; + error_count = error_count.saturating_add(1); }, } } else { @@ -197,13 +216,13 @@ pub(super) async fn purge_empty_room_tokens( | Ok(count) => if count > 0 { debug!("Deleted {} sync tokens for room {}", count, room_id); - total_tokens_deleted += count; + total_tokens_deleted = total_tokens_deleted.saturating_add(count); } else { debug!("No sync tokens found for room {}", room_id); }, | Err(e) => { debug!("Error purging sync tokens for room {}: {:?}", room_id, e); - error_count += 1; + error_count = error_count.saturating_add(1); }, } } diff --git a/src/admin/room/mod.rs b/src/admin/room/mod.rs index 61114b90..1b4650c3 100644 --- a/src/admin/room/mod.rs +++ b/src/admin/room/mod.rs @@ -5,6 +5,7 @@ mod info; mod moderation; use clap::Subcommand; +use commands::RoomTargetOption; use conduwuit::Result; use ruma::{OwnedRoomId, OwnedRoomOrAliasId}; @@ -66,21 +67,15 @@ pub(super) enum RoomCommand { /// - Delete sync tokens for all rooms that have no local users /// - /// By default, processes all empty rooms. You can use --target-disabled - /// and/or --target-banned to exclusively process rooms matching those - /// conditions. + /// By default, processes all empty rooms. PurgeEmptyRoomTokens { /// Confirm you want to delete tokens from potentially many rooms #[arg(long)] yes: bool, - /// Only purge rooms that have federation disabled - #[arg(long)] - target_disabled: bool, - - /// Only purge rooms that have been banned - #[arg(long)] - target_banned: bool, + /// Target specific room types + #[arg(long, value_enum)] + target_option: Option, /// Perform a dry run without actually deleting any tokens #[arg(long)] diff --git a/src/service/rooms/user/mod.rs b/src/service/rooms/user/mod.rs index 58df427b..aaf735c1 100644 --- a/src/service/rooms/user/mod.rs +++ b/src/service/rooms/user/mod.rs @@ -166,9 +166,6 @@ pub async fn delete_room_tokens(&self, room_id: &RoomId) -> Result { // short ID let prefix = &[shortroomid]; - // Get all keys with this room prefix - let mut count = 0; - // Collect all keys into a Vec first, then delete them let keys = self .db @@ -184,8 +181,9 @@ pub async fn delete_room_tokens(&self, room_id: &RoomId) -> Result { // Delete each key individually for key in &keys { self.db.roomsynctoken_shortstatehash.del(key); - count += 1; } + let count = keys.len(); + Ok(count) } From f7dd4c692898872859083b3b18d8c50ee7af9e00 Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Thu, 22 May 2025 14:01:16 +0100 Subject: [PATCH 09/10] ci: Don't install rustup if it's already there --- .forgejo/actions/rust-toolchain/action.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.forgejo/actions/rust-toolchain/action.yml b/.forgejo/actions/rust-toolchain/action.yml index 71fb96f5..ae5cfcee 100644 --- a/.forgejo/actions/rust-toolchain/action.yml +++ b/.forgejo/actions/rust-toolchain/action.yml @@ -19,11 +19,20 @@ outputs: rustc_version: description: The rustc version installed value: ${{ steps.rustc-version.outputs.version }} + rustup_version: + description: The rustup version installed + value: ${{ steps.rustup-version.outputs.version }} runs: using: composite steps: + - name: Check if rustup is already installed + shell: bash + id: rustup-version + run: | + echo "version=$(rustup --version)" >> $GITHUB_OUTPUT - name: Cache rustup toolchains + if: steps.rustup-version.outputs.version == '' uses: actions/cache@v3 with: path: | @@ -33,6 +42,7 @@ runs: # Requires repo to be cloned if toolchain is not specified key: ${{ runner.os }}-rustup-${{ inputs.toolchain || hashFiles('**/rust-toolchain.toml') }} - name: Install Rust toolchain + if: steps.rustup-version.outputs.version == '' shell: bash run: | if ! command -v rustup &> /dev/null ; then From f62d8a42d4c9327033405e07775bef3ee1531bfb Mon Sep 17 00:00:00 2001 From: Jade Ellis Date: Thu, 22 May 2025 14:07:22 +0100 Subject: [PATCH 10/10] ci: Don't specify container for image builder --- .forgejo/workflows/release-image.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.forgejo/workflows/release-image.yml b/.forgejo/workflows/release-image.yml index ec466c58..ec05fb45 100644 --- a/.forgejo/workflows/release-image.yml +++ b/.forgejo/workflows/release-image.yml @@ -57,7 +57,6 @@ jobs: build-image: runs-on: dind - container: ghcr.io/catthehacker/ubuntu:act-latest needs: define-variables permissions: contents: read @@ -211,7 +210,6 @@ jobs: merge: runs-on: dind - container: ghcr.io/catthehacker/ubuntu:act-latest needs: [define-variables, build-image] steps: - name: Download digests