feat(hydra): Initial public commit for v12 support

# Conflicts:
#	src/core/info/room_version.rs
#	src/service/rooms/timeline/create.rs
This commit is contained in:
nexy7574 2025-07-25 11:17:29 +01:00
commit 327fa02cd9
No known key found for this signature in database
59 changed files with 950 additions and 394 deletions

View file

@ -756,7 +756,7 @@ pub(super) async fn force_demote(&self, user_id: String, room_id: OwnedRoomOrAli
.build_and_append_pdu(
PduBuilder::state(String::new(), &power_levels_content),
&user_id,
&room_id,
Some(&room_id),
&state_lock,
)
.await?;
@ -901,7 +901,13 @@ pub(super) async fn redact_event(&self, event_id: OwnedEventId) -> Result {
);
let redaction_event_id = {
let state_lock = self.services.rooms.state.mutex.lock(event.room_id()).await;
let state_lock = self
.services
.rooms
.state
.mutex
.lock(&event.room_id_or_hash())
.await;
self.services
.rooms
@ -915,7 +921,7 @@ pub(super) async fn redact_event(&self, event_id: OwnedEventId) -> Result {
})
},
event.sender(),
event.room_id(),
Some(&event.room_id_or_hash()),
&state_lock,
)
.await?

View file

@ -929,7 +929,7 @@ pub async fn full_user_deactivate(
.build_and_append_pdu(
PduBuilder::state(String::new(), &power_levels_content),
user_id,
room_id,
Some(room_id),
&state_lock,
)
.await

View file

@ -69,7 +69,7 @@ pub(crate) async fn get_context_route(
let (base_id, base_pdu, visible) = try_join3(base_id, base_pdu, visible).await?;
if base_pdu.room_id != *room_id || base_pdu.event_id != *event_id {
if base_pdu.room_id_or_hash() != *room_id || base_pdu.event_id != *event_id {
return Err!(Request(NotFound("Base event not found.")));
}

View file

@ -49,7 +49,7 @@ pub(crate) async fn ban_user_route(
..current_member_content
}),
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -128,12 +128,12 @@ pub(crate) async fn invite_helper(
.create_hash_and_sign_event(
PduBuilder::state(user_id.to_string(), &content),
sender_user,
room_id,
Some(room_id),
&state_lock,
)
.await?;
let invite_room_state = services.rooms.state.summary_stripped(&pdu).await;
let invite_room_state = services.rooms.state.summary_stripped(&pdu, room_id).await;
drop(state_lock);
@ -227,7 +227,7 @@ pub(crate) async fn invite_helper(
.build_and_append_pdu(
PduBuilder::state(user_id.to_string(), &content),
sender_user,
room_id,
Some(room_id),
&state_lock,
)
.await?;

View file

@ -18,7 +18,7 @@ use conduwuit::{
},
warn,
};
use futures::{FutureExt, StreamExt};
use futures::{FutureExt, StreamExt, TryFutureExt};
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, OwnedRoomId, OwnedServerName, OwnedUserId, RoomId,
RoomVersionId, UserId,
@ -550,12 +550,20 @@ async fn join_room_by_id_helper_remote(
.iter()
.stream()
.then(|pdu| {
debug!(?pdu, "Validating send_join response room_state event");
services
.server_keys
.validate_and_add_event_id_no_fetch(pdu, &room_version_id)
.inspect_err(|e| {
debug_warn!(
"Could not validate send_join response room_state event: {e:?}"
);
})
.inspect(|_| debug!("Completed validating send_join response room_state event"))
})
.ready_filter_map(Result::ok)
.fold(HashMap::new(), |mut state, (event_id, value)| async move {
debug!(?event_id, "Processing send_join response room_state event");
let pdu = match PduEvent::from_id_val(&event_id, value.clone()) {
| Ok(pdu) => pdu,
| Err(e) => {
@ -563,9 +571,10 @@ async fn join_room_by_id_helper_remote(
return state;
},
};
debug!(event_id = ?event_id.clone(), "Adding PDU outlier for send_join response room_state event");
services.rooms.outlier.add_pdu_outlier(&event_id, &value);
if let Some(state_key) = &pdu.state_key {
debug!(?state_key, "Creating shortstatekey for state event in send_join response");
let shortstatekey = services
.rooms
.short
@ -574,7 +583,7 @@ async fn join_room_by_id_helper_remote(
state.insert(shortstatekey, pdu.event_id.clone());
}
debug!("Completed send_join response");
state
})
.await;
@ -615,6 +624,9 @@ async fn join_room_by_id_helper_remote(
&parsed_join_pdu,
None, // TODO: third party invite
|k, s| state_fetch(k.clone(), s.into()),
&state_fetch(StateEventType::RoomCreate, "".into())
.await
.expect("create event is missing from send_join auth"),
)
.await
.map_err(|e| err!(Request(Forbidden(warn!("Auth check failed: {e:?}")))))?;
@ -662,7 +674,7 @@ async fn join_room_by_id_helper_remote(
let statehash_after_join = services
.rooms
.state
.append_to_state(&parsed_join_pdu)
.append_to_state(&parsed_join_pdu, room_id)
.await?;
info!("Appending new room join event");
@ -674,6 +686,7 @@ async fn join_room_by_id_helper_remote(
join_event,
once(parsed_join_pdu.event_id.borrow()),
&state_lock,
room_id,
)
.await?;
@ -773,7 +786,7 @@ async fn join_room_by_id_helper_local(
.build_and_append_pdu(
PduBuilder::state(sender_user.to_string(), &content),
sender_user,
room_id,
Some(room_id),
&state_lock,
)
.await

View file

@ -54,7 +54,7 @@ pub(crate) async fn kick_user_route(
..event
}),
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -373,7 +373,7 @@ async fn knock_room_helper_local(
.build_and_append_pdu(
PduBuilder::state(sender_user.to_string(), &content),
sender_user,
room_id,
Some(room_id),
&state_lock,
)
.await
@ -502,6 +502,7 @@ async fn knock_room_helper_local(
knock_event,
once(parsed_knock_pdu.event_id.borrow()),
&state_lock,
room_id,
)
.await?;
@ -672,7 +673,7 @@ async fn knock_room_helper_remote(
let statehash_after_knock = services
.rooms
.state
.append_to_state(&parsed_knock_pdu)
.append_to_state(&parsed_knock_pdu, room_id)
.await?;
info!("Updating membership locally to knock state with provided stripped state events");
@ -701,6 +702,7 @@ async fn knock_room_helper_remote(
knock_event,
once(parsed_knock_pdu.event_id.borrow()),
&state_lock,
room_id,
)
.await?;

View file

@ -206,7 +206,7 @@ pub async fn leave_room(
..event
}),
user_id,
room_id,
Some(room_id),
&state_lock,
)
.await?;

View file

@ -70,9 +70,10 @@ pub(crate) async fn banned_room_check(
if let Some(room_id) = room_id {
if services.rooms.metadata.is_banned(room_id).await
|| services
.moderation
.is_remote_server_forbidden(room_id.server_name().expect("legacy room mxid"))
|| (room_id.server_name().is_some()
&& services
.moderation
.is_remote_server_forbidden(room_id.server_name().expect("legacy room mxid")))
{
warn!(
"User {user_id} who is not an admin attempted to send an invite for or \

View file

@ -47,7 +47,7 @@ pub(crate) async fn unban_user_route(
..current_member_content
}),
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -310,7 +310,7 @@ pub(crate) async fn visibility_filter(
services
.rooms
.state_accessor
.user_can_see_event(user_id, pdu.room_id(), pdu.event_id())
.user_can_see_event(user_id, &pdu.room_id_or_hash(), pdu.event_id())
.await
.then_some(item)
}

View file

@ -423,7 +423,7 @@ pub async fn update_all_rooms(
if let Err(e) = services
.rooms
.timeline
.build_and_append_pdu(pdu_builder, user_id, room_id, &state_lock)
.build_and_append_pdu(pdu_builder, user_id, Some(room_id), &state_lock)
.await
{
warn!(%user_id, %room_id, "Failed to update/send new profile join membership update in room: {e}");

View file

@ -36,7 +36,7 @@ pub(crate) async fn redact_event_route(
})
},
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -223,7 +223,7 @@ async fn visibility_filter<Pdu: Event + Send + Sync>(
services
.rooms
.state_accessor
.user_can_see_event(sender_user, pdu.room_id(), pdu.event_id())
.user_can_see_event(sender_user, &pdu.room_id_or_hash(), pdu.event_id())
.await
.then_some(item)
}

View file

@ -2,7 +2,7 @@ use std::{fmt::Write as _, ops::Mul, time::Duration};
use axum::extract::State;
use axum_client_ip::InsecureClientIp;
use conduwuit::{Err, Result, debug_info, info, matrix::pdu::PduEvent, utils::ReadyExt};
use conduwuit::{Err, Event, Result, debug_info, info, matrix::pdu::PduEvent, utils::ReadyExt};
use conduwuit_service::Services;
use rand::Rng;
use ruma::{
@ -200,7 +200,7 @@ async fn is_event_report_valid(
valid"
);
if room_id != pdu.room_id {
if room_id != &pdu.room_id_or_hash() {
return Err!(Request(NotFound("Event ID does not belong to the reported room",)));
}

View file

@ -2,7 +2,7 @@ use std::collections::BTreeMap;
use axum::extract::State;
use conduwuit::{
Err, Result, debug_info, debug_warn, err, info,
Err, Result, RoomVersion, debug, debug_info, debug_warn, err, info,
matrix::{StateKey, pdu::PduBuilder},
warn,
};
@ -68,51 +68,6 @@ pub(crate) async fn create_room_route(
return Err!(Request(UserSuspended("You cannot perform this action while suspended.")));
}
let room_id: OwnedRoomId = match &body.room_id {
| Some(custom_room_id) => custom_room_id_check(&services, custom_room_id)?,
| _ => RoomId::new(&services.server.name),
};
// check if room ID doesn't already exist instead of erroring on auth check
if services.rooms.short.get_shortroomid(&room_id).await.is_ok() {
return Err!(Request(RoomInUse("Room with that custom room ID already exists",)));
}
if body.visibility == room::Visibility::Public
&& services.server.config.lockdown_public_room_directory
&& !services.users.is_admin(sender_user).await
&& body.appservice_info.is_none()
{
warn!(
"Non-admin user {sender_user} tried to publish {room_id} to the room directory \
while \"lockdown_public_room_directory\" is enabled"
);
if services.server.config.admin_room_notices {
services
.admin
.notice(&format!(
"Non-admin user {sender_user} tried to publish {room_id} to the room \
directory while \"lockdown_public_room_directory\" is enabled"
))
.await;
}
return Err!(Request(Forbidden("Publishing rooms to the room directory is not allowed")));
}
let _short_id = services
.rooms
.short
.get_or_create_shortroomid(&room_id)
.await;
let state_lock = services.rooms.state.mutex.lock(&room_id).await;
let alias: Option<OwnedRoomAliasId> = match body.room_alias_name.as_ref() {
| Some(alias) =>
Some(room_alias_check(&services, alias, body.appservice_info.as_ref()).await?),
| _ => None,
};
let room_version = match body.room_version.clone() {
| Some(room_version) =>
if services.server.supported_room_version(&room_version) {
@ -124,6 +79,51 @@ pub(crate) async fn create_room_route(
},
| None => services.server.config.default_room_version.clone(),
};
let room_features = RoomVersion::new(&room_version)?;
let room_id: Option<OwnedRoomId> = match room_features.room_ids_as_hashes {
| true => None,
| false => match &body.room_id {
| Some(custom_room_id) => Some(custom_room_id_check(&services, custom_room_id)?),
| None => Some(RoomId::new(services.globals.server_name())),
},
};
// check if room ID doesn't already exist instead of erroring on auth check
if let Some(ref room_id) = room_id {
if services.rooms.short.get_shortroomid(&room_id).await.is_ok() {
return Err!(Request(RoomInUse("Room with that custom room ID already exists",)));
}
}
if body.visibility == room::Visibility::Public
&& services.server.config.lockdown_public_room_directory
&& !services.users.is_admin(sender_user).await
&& body.appservice_info.is_none()
{
warn!(
"Non-admin user {sender_user} tried to publish {room_id:?} to the room directory \
while \"lockdown_public_room_directory\" is enabled"
);
if services.server.config.admin_room_notices {
services
.admin
.notice(&format!(
"Non-admin user {sender_user} tried to publish {room_id:?} to the room \
directory while \"lockdown_public_room_directory\" is enabled"
))
.await;
}
return Err!(Request(Forbidden("Publishing rooms to the room directory is not allowed")));
}
let alias: Option<OwnedRoomAliasId> = match body.room_alias_name.as_ref() {
| Some(alias) =>
Some(room_alias_check(&services, alias, body.appservice_info.as_ref()).await?),
| _ => None,
};
let create_content = match &body.creation_content {
| Some(content) => {
@ -156,6 +156,10 @@ pub(crate) async fn create_room_route(
.try_into()
.map_err(|e| err!(Request(BadJson("Invalid creation content: {e}"))))?,
);
if room_version == V12 {
// TODO(hydra): v12 rooms cannot be federated until they are stable.
content.insert("m.federate".into(), false.into());
}
content
},
| None => {
@ -164,18 +168,32 @@ pub(crate) async fn create_room_route(
let content = match room_version {
| V1 | V2 | V3 | V4 | V5 | V6 | V7 | V8 | V9 | V10 =>
RoomCreateEventContent::new_v1(sender_user.to_owned()),
| _ => RoomCreateEventContent::new_v11(),
| V11 => RoomCreateEventContent::new_v11(),
| _ => RoomCreateEventContent::new_v12(),
};
let mut content =
serde_json::from_str::<CanonicalJsonObject>(to_raw_value(&content)?.get())
.unwrap();
serde_json::from_str::<CanonicalJsonObject>(to_raw_value(&content)?.get())?;
content.insert("room_version".into(), json!(room_version.as_str()).try_into()?);
if room_version == V12 {
// TODO(hydra): v12 rooms cannot be federated until they are stable.
content.insert("m.federate".into(), false.into());
}
content
},
};
let state_lock = match room_id.clone() {
| Some(room_id) => services.rooms.state.mutex.lock(&room_id).await,
| None => {
let temp_room_id = RoomId::new(services.globals.server_name());
debug_info!("Locking temporary room state mutex for {temp_room_id}");
services.rooms.state.mutex.lock(&temp_room_id).await
},
};
// 1. The room create event
services
debug!("Creating room create event for {sender_user} in room {room_id:?}");
let create_event_id = services
.rooms
.timeline
.build_and_append_pdu(
@ -186,13 +204,26 @@ pub(crate) async fn create_room_route(
..Default::default()
},
sender_user,
&room_id,
None,
&state_lock,
)
.boxed()
.await?;
debug!("Created room create event with ID {}", create_event_id);
let room_id = match room_id {
| Some(room_id) => room_id,
| None => {
let as_room_id = create_event_id.as_str().replace('$', "!");
debug_info!("Creating room with v12 room ID {as_room_id}");
RoomId::parse(&as_room_id)?.to_owned()
},
};
drop(state_lock);
debug!("Room created with ID {room_id}");
let state_lock = services.rooms.state.mutex.lock(&room_id).await;
// 2. Let the room creator join
debug_info!("Joining {sender_user} to room {room_id}");
services
.rooms
.timeline
@ -205,7 +236,7 @@ pub(crate) async fn create_room_route(
..RoomMemberEventContent::new(MembershipState::Join)
}),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -235,10 +266,28 @@ pub(crate) async fn create_room_route(
}
}
let mut creators: Vec<OwnedUserId> = vec![sender_user.to_owned()];
if let Some(additional_creators) = create_content.get("additional_creators") {
if let Some(additional_creators) = additional_creators.as_array() {
for creator in additional_creators {
if let Some(creator) = creator.as_str() {
if let Ok(creator) = OwnedUserId::parse(creator) {
creators.push(creator.clone());
users.insert(creator.clone(), int!(100));
}
}
}
}
}
if !(RoomVersion::new(&room_version)?).explicitly_privilege_room_creators {
creators.clear();
}
let power_levels_content = default_power_levels_content(
body.power_level_content_override.as_ref(),
&body.visibility,
users,
creators,
)?;
services
@ -252,7 +301,7 @@ pub(crate) async fn create_room_route(
..Default::default()
},
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -269,7 +318,7 @@ pub(crate) async fn create_room_route(
alt_aliases: vec![],
}),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -292,7 +341,7 @@ pub(crate) async fn create_room_route(
}),
),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -308,7 +357,7 @@ pub(crate) async fn create_room_route(
&RoomHistoryVisibilityEventContent::new(HistoryVisibility::Shared),
),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -327,7 +376,7 @@ pub(crate) async fn create_room_route(
}),
),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -363,7 +412,7 @@ pub(crate) async fn create_room_route(
services
.rooms
.timeline
.build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock)
.build_and_append_pdu(pdu_builder, sender_user, Some(&room_id), &state_lock)
.boxed()
.await?;
}
@ -376,7 +425,7 @@ pub(crate) async fn create_room_route(
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomNameEventContent::new(name.clone())),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -390,7 +439,7 @@ pub(crate) async fn create_room_route(
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomTopicEventContent { topic: topic.clone() }),
sender_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -450,6 +499,7 @@ fn default_power_levels_content(
power_level_content_override: Option<&Raw<RoomPowerLevelsEventContent>>,
visibility: &room::Visibility,
users: BTreeMap<OwnedUserId, Int>,
creators: Vec<OwnedUserId>,
) -> Result<serde_json::Value> {
let mut power_levels_content =
serde_json::to_value(RoomPowerLevelsEventContent { users, ..Default::default() })
@ -499,6 +549,19 @@ fn default_power_levels_content(
}
}
if !creators.is_empty() {
// Raise the default power level of tombstone to 150
power_levels_content["events"]["m.room.tombstone"] =
serde_json::to_value(150).expect("150 is valid Value");
for creator in creators {
// Omit creators from the power level list altogether
power_levels_content["users"]
.as_object_mut()
.expect("users is an object")
.remove(creator.as_str());
}
}
Ok(power_levels_content)
}

View file

@ -34,7 +34,7 @@ pub(crate) async fn get_room_event_route(
}
debug_assert!(
event.event_id() == event_id && event.room_id() == room_id,
event.event_id() == event_id && event.room_id_or_hash() == *room_id,
"Fetched PDU must match requested"
);

View file

@ -91,7 +91,7 @@ pub(crate) async fn upgrade_room_route(
replacement_room: replacement_room.clone(),
}),
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;
@ -173,7 +173,7 @@ pub(crate) async fn upgrade_room_route(
timestamp: None,
},
sender_user,
&replacement_room,
Some(&replacement_room),
&state_lock,
)
.boxed()
@ -204,7 +204,7 @@ pub(crate) async fn upgrade_room_route(
timestamp: None,
},
sender_user,
&replacement_room,
Some(&replacement_room),
&state_lock,
)
.boxed()
@ -243,7 +243,7 @@ pub(crate) async fn upgrade_room_route(
..Default::default()
},
sender_user,
&replacement_room,
Some(&replacement_room),
&state_lock,
)
.boxed()
@ -302,7 +302,7 @@ pub(crate) async fn upgrade_room_route(
..power_levels_event_content
}),
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.boxed()
@ -352,7 +352,7 @@ pub(crate) async fn upgrade_room_route(
..Default::default()
},
sender_user,
space_id,
Some(space_id),
&state_lock,
)
.boxed()
@ -376,7 +376,7 @@ pub(crate) async fn upgrade_room_route(
..Default::default()
},
sender_user,
space_id,
Some(space_id),
&state_lock,
)
.boxed()

View file

@ -80,7 +80,7 @@ pub(crate) async fn send_message_event_route(
..Default::default()
},
sender_user,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -201,7 +201,7 @@ async fn send_state_event_for_key_helper(
..Default::default()
},
sender,
room_id,
Some(room_id),
&state_lock,
)
.await?;

View file

@ -457,7 +457,7 @@ async fn handle_left_room(
state_key: Some(sender_user.as_str().into()),
unsigned: None,
// The following keys are dropped on conversion
room_id: room_id.clone(),
room_id: Some(room_id.clone()),
prev_events: vec![],
depth: uint!(1),
auth_events: vec![],

View file

@ -2,7 +2,7 @@ use std::cmp;
use axum::extract::State;
use conduwuit::{
PduCount, Result,
Event, PduCount, Result,
utils::{IterStream, ReadyExt, stream::TryTools},
};
use futures::{FutureExt, StreamExt, TryStreamExt};
@ -68,7 +68,7 @@ pub(crate) async fn get_backfill_route(
Ok(services
.rooms
.state_accessor
.server_can_see_event(body.origin(), &pdu.room_id, &pdu.event_id)
.server_can_see_event(body.origin(), &pdu.room_id_or_hash(), &pdu.event_id)
.await
.then_some(pdu))
})

View file

@ -122,7 +122,7 @@ pub(crate) async fn create_join_event_template_route(
..RoomMemberEventContent::new(MembershipState::Join)
}),
&body.user_id,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -95,7 +95,7 @@ pub(crate) async fn create_knock_event_template_route(
&RoomMemberEventContent::new(MembershipState::Knock),
),
&body.user_id,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -45,7 +45,7 @@ pub(crate) async fn create_leave_event_template_route(
&RoomMemberEventContent::new(MembershipState::Leave),
),
&body.user_id,
&body.room_id,
Some(&body.room_id),
&state_lock,
)
.await?;

View file

@ -138,6 +138,7 @@ async fn handle(
pdus: impl Stream<Item = Pdu> + Send,
edus: impl Stream<Item = Edu> + Send,
) -> Result<ResolvedMap> {
// TODO(hydra): Does having no room ID break this?
// group pdus by room
let pdus = pdus
.collect()
@ -186,6 +187,7 @@ async fn handle_room(
.lock(&room_id)
.await;
// TODO(hydra): We might be missing a room ID
let room_id = &room_id;
pdus.try_stream()
.and_then(|(_, event_id, value)| async move {

View file

@ -175,7 +175,11 @@ pub(crate) async fn create_knock_event_v1_route(
.send_pdu_room(&body.room_id, &pdu_id)
.await?;
let knock_room_state = services.rooms.state.summary_stripped(&pdu).await;
let knock_room_state = services
.rooms
.state
.summary_stripped(&pdu, &body.room_id)
.await;
Ok(send_knock::v1::Response { knock_room_state })
}

View file

@ -17,8 +17,12 @@ pub const STABLE_ROOM_VERSIONS: &[RoomVersionId] = &[
];
/// Experimental, partially supported room versions
pub const UNSTABLE_ROOM_VERSIONS: &[RoomVersionId] =
&[RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5];
pub const UNSTABLE_ROOM_VERSIONS: &[RoomVersionId] = &[
RoomVersionId::V3,
RoomVersionId::V4,
RoomVersionId::V5,
RoomVersionId::V12,
];
type RoomVersion = (RoomVersionId, RoomVersionStability);

View file

@ -10,7 +10,7 @@ mod unsigned;
use std::fmt::Debug;
use ruma::{
CanonicalJsonObject, EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId,
CanonicalJsonObject, EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, OwnedRoomId, RoomId,
RoomVersionId, UserId, events::TimelineEventType,
};
use serde::Deserialize;
@ -168,7 +168,12 @@ pub trait Event: Clone + Debug {
fn redacts(&self) -> Option<&EventId>;
/// The `RoomId` of this event.
fn room_id(&self) -> &RoomId;
fn room_id(&self) -> Option<&RoomId>;
/// The `RoomId` or hash of this event.
/// This should only be preferred over room_id() if the event is a v12
/// create event.
fn room_id_or_hash(&self) -> OwnedRoomId;
/// The `UserId` of this event.
fn sender(&self) -> &UserId;

View file

@ -32,12 +32,16 @@ impl<E: Event> Matches<E> for &RoomEventFilter {
}
fn matches_room<E: Event>(event: &E, filter: &RoomEventFilter) -> bool {
if filter.not_rooms.iter().any(is_equal_to!(event.room_id())) {
if filter
.not_rooms
.iter()
.any(is_equal_to!(event.room_id().unwrap()))
{
return false;
}
if let Some(rooms) = filter.rooms.as_ref() {
if !rooms.iter().any(is_equal_to!(event.room_id())) {
if !rooms.iter().any(is_equal_to!(event.room_id().unwrap())) {
return false;
}
}

View file

@ -31,7 +31,7 @@ use crate::Result;
pub struct Pdu {
pub event_id: OwnedEventId,
pub room_id: OwnedRoomId,
pub room_id: Option<OwnedRoomId>,
pub sender: OwnedUserId,
@ -110,7 +110,19 @@ impl Event for Pdu {
fn redacts(&self) -> Option<&EventId> { self.redacts.as_deref() }
#[inline]
fn room_id(&self) -> &RoomId { &self.room_id }
fn room_id(&self) -> Option<&RoomId> { self.room_id.as_deref() }
#[inline]
fn room_id_or_hash(&self) -> OwnedRoomId {
if let Some(room_id) = &self.room_id {
room_id.clone()
} else {
let constructed_hash = "!".to_owned() + &self.event_id.as_str()[1..];
RoomId::parse(&constructed_hash)
.expect("event ID can be indexed")
.to_owned()
}
}
#[inline]
fn sender(&self) -> &UserId { &self.sender }
@ -163,7 +175,19 @@ impl Event for &Pdu {
fn redacts(&self) -> Option<&EventId> { self.redacts.as_deref() }
#[inline]
fn room_id(&self) -> &RoomId { &self.room_id }
fn room_id(&self) -> Option<&RoomId> { self.room_id.as_ref().map(AsRef::as_ref) }
#[inline]
fn room_id_or_hash(&self) -> OwnedRoomId {
if let Some(room_id) = &self.room_id {
room_id.clone()
} else {
let constructed_hash = "!".to_owned() + &self.event_id.as_str()[1..];
RoomId::parse(&constructed_hash)
.expect("event ID can be indexed")
.to_owned()
}
}
#[inline]
fn sender(&self) -> &UserId { &self.sender }

View file

@ -406,7 +406,7 @@ where
Pdu {
event_id: id.try_into().unwrap(),
room_id: room_id().to_owned(),
room_id: Some(room_id().to_owned()),
sender: sender.to_owned(),
origin_server_ts: ts.try_into().unwrap(),
state_key: state_key.map(Into::into),

View file

@ -2,10 +2,10 @@ use std::{borrow::Borrow, collections::BTreeSet};
use futures::{
Future,
future::{OptionFuture, join3},
future::{OptionFuture, join, join3},
};
use ruma::{
Int, OwnedUserId, RoomVersionId, UserId,
Int, OwnedRoomId, OwnedUserId, RoomVersionId, UserId,
events::room::{
create::RoomCreateEventContent,
join_rules::{JoinRule, RoomJoinRulesEventContent},
@ -44,6 +44,15 @@ struct RoomMemberContentFields {
join_authorised_via_users_server: Option<Raw<OwnedUserId>>,
}
#[derive(Deserialize)]
struct RoomCreateContentFields {
room_version: Option<Raw<RoomVersionId>>,
creator: Option<Raw<IgnoredAny>>,
additional_creators: Option<Vec<Raw<OwnedUserId>>>,
#[serde(rename = "m.federate", default = "ruma::serde::default_true")]
federate: bool,
}
/// For the given event `kind` what are the relevant auth events that are needed
/// to authenticate this `content`.
///
@ -56,16 +65,24 @@ pub fn auth_types_for_event(
sender: &UserId,
state_key: Option<&str>,
content: &RawJsonValue,
room_version: &RoomVersion,
) -> serde_json::Result<Vec<(StateEventType, StateKey)>> {
if kind == &TimelineEventType::RoomCreate {
return Ok(vec![]);
}
let mut auth_types = vec![
(StateEventType::RoomPowerLevels, StateKey::new()),
(StateEventType::RoomMember, sender.as_str().into()),
(StateEventType::RoomCreate, StateKey::new()),
];
let mut auth_types = if room_version.room_ids_as_hashes {
vec![
(StateEventType::RoomPowerLevels, StateKey::new()),
(StateEventType::RoomMember, sender.as_str().into()),
]
} else {
vec![
(StateEventType::RoomPowerLevels, StateKey::new()),
(StateEventType::RoomMember, sender.as_str().into()),
(StateEventType::RoomCreate, StateKey::new()),
]
};
if kind == &TimelineEventType::RoomMember {
#[derive(Deserialize)]
@ -141,6 +158,7 @@ pub async fn auth_check<E, F, Fut>(
incoming_event: &E,
current_third_party_invite: Option<&E>,
fetch_state: F,
create_event: &E,
) -> Result<bool, Error>
where
F: Fn(&StateEventType, &str) -> Fut + Send,
@ -169,12 +187,6 @@ where
//
// 1. If type is m.room.create:
if *incoming_event.event_type() == TimelineEventType::RoomCreate {
#[derive(Deserialize)]
struct RoomCreateContentFields {
room_version: Option<Raw<RoomVersionId>>,
creator: Option<Raw<IgnoredAny>>,
}
debug!("start m.room.create check");
// If it has any previous events, reject
@ -184,14 +196,16 @@ where
}
// If the domain of the room_id does not match the domain of the sender, reject
let Some(room_id_server_name) = incoming_event.room_id().server_name() else {
warn!("room ID has no servername");
return Ok(false);
};
if room_id_server_name != sender.server_name() {
warn!("servername of room ID does not match servername of sender");
return Ok(false);
if incoming_event.room_id().is_some() {
let Some(room_id_server_name) = incoming_event.room_id().unwrap().server_name()
else {
warn!("room ID has no servername");
return Ok(false);
};
if room_id_server_name != sender.server_name() {
warn!("servername of room ID does not match servername of sender");
return Ok(false);
}
}
// If content.room_version is present and is not a recognized version, reject
@ -204,7 +218,15 @@ where
return Ok(false);
}
if !room_version.use_room_create_sender {
// TODO(hydra): If the create event has a room_id, reject
if room_version.room_ids_as_hashes && incoming_event.room_id().is_some() {
warn!("this room version does not support room IDs in m.room.create");
return Ok(false);
}
if !room_version.use_room_create_sender
&& !room_version.explicitly_privilege_room_creators
{
// If content has no creator field, reject
if content.creator.is_none() {
warn!("no creator field found in m.room.create content");
@ -216,6 +238,8 @@ where
return Ok(true);
}
// NOTE(hydra): We must have an event ID from this point forward.
/*
// TODO: In the past this code was commented as it caused problems with Synapse. This is no
// longer the case. This needs to be implemented.
@ -242,54 +266,102 @@ where
}
*/
let (room_create_event, power_levels_event, sender_member_event) = join3(
fetch_state(&StateEventType::RoomCreate, ""),
let (power_levels_event, sender_member_event) = join(
// fetch_state(&StateEventType::RoomCreate, ""),
fetch_state(&StateEventType::RoomPowerLevels, ""),
fetch_state(&StateEventType::RoomMember, sender.as_str()),
)
.await;
let room_create_event = match room_create_event {
| None => {
warn!("no m.room.create event in auth chain");
return Ok(false);
},
| Some(e) => e,
// TODO(hydra): Re-enable <v12 checks
// let room_create_event = match room_create_event {
// | None => {
// // Room was either v11 with no create event, or v12+ room
// if incoming_event.room_id().is_some() {
// // invalid v11
// warn!("no m.room.create event found in claimed state");
// return Ok(false);
// }
// // v12 room
// debug!("no m.room.create event found, assuming v12 room");
// create_event.clone()
// },
// | Some(e) => e,
// };
let room_create_event = create_event.clone();
// Get the content of the room create event, used later.
let room_create_content: RoomCreateContentFields =
from_json_str(room_create_event.content().get())?;
if room_create_content
.room_version
.is_some_and(|v| v.deserialize().is_err())
{
warn!("invalid room version found in m.room.create event");
return Ok(false);
}
let expected_room_id = match room_version.room_ids_as_hashes {
// If the room version uses hashes, we replace the create event's event ID leading sigil
// with !
| true => OwnedRoomId::try_from(room_create_event.event_id().as_str().replace('$', "!"))
.expect("Failed to convert event ID to room ID")
.clone(),
| false => room_create_event.room_id().unwrap().to_owned(),
};
if incoming_event.room_id() != room_create_event.room_id() {
warn!("room_id of incoming event does not match room_id of m.room.create event");
if incoming_event.room_id().unwrap() != expected_room_id {
warn!(
expected = %expected_room_id,
received = %incoming_event.room_id().unwrap(),
"room_id of incoming event ({}) does not match room_id of m.room.create event ({})",
incoming_event.room_id().unwrap(),
expected_room_id,
);
return Ok(false);
}
// If the create event is referenced in the event's auth events, and this is a
// v12 room, reject
let claims_create_event = incoming_event
.auth_events()
.any(|id| id == room_create_event.event_id());
if room_version.room_ids_as_hashes && claims_create_event {
warn!("m.room.create event incorrectly found in auth events");
return Ok(false);
} else if !room_version.room_ids_as_hashes && !claims_create_event {
// If the create event is not referenced in the event's auth events, and this is
// a v11 room, reject
warn!("no m.room.create event found in auth events");
return Ok(false);
}
if let Some(ref pe) = power_levels_event {
if pe.room_id() != room_create_event.room_id() {
warn!("room_id of power levels event does not match room_id of m.room.create event");
if *pe.room_id().unwrap() != expected_room_id {
warn!(
expected = %expected_room_id,
received = %pe.room_id().unwrap(),
"room_id of power levels event does not match room_id of m.room.create event"
);
return Ok(false);
}
}
// 3. If event does not have m.room.create in auth_events reject
if !incoming_event
.auth_events()
.any(|id| id == room_create_event.event_id())
{
warn!("no m.room.create event in auth events");
return Ok(false);
}
// removed as part of Hydra.
// TODO: reintroduce this for <v12 lol
// if !incoming_event
// .auth_events()
// .any(|id| id == room_create_event.event_id())
// {
// warn!("no m.room.create event in auth events");
// return Ok(false);
// }
// If the create event content has the field m.federate set to false and the
// sender domain of the event does not match the sender domain of the create
// event, reject.
#[derive(Deserialize)]
#[allow(clippy::items_after_statements)]
struct RoomCreateContentFederate {
#[serde(rename = "m.federate", default = "ruma::serde::default_true")]
federate: bool,
}
let room_create_content: RoomCreateContentFederate =
from_json_str(room_create_event.content().get())?;
if !room_create_content.federate
if !room_version.room_ids_as_hashes
&& !room_create_content.federate
&& room_create_event.sender().server_name() != incoming_event.sender().server_name()
{
warn!(
@ -321,7 +393,7 @@ where
debug!("starting m.room.member check");
let state_key = match incoming_event.state_key() {
| None => {
warn!("no statekey in member event");
warn!("no state key in member event");
return Ok(false);
},
| Some(s) => s,
@ -377,6 +449,7 @@ where
&user_for_join_auth_membership,
&room_create_event,
)? {
warn!("membership change not valid for some reason");
return Ok(false);
}
@ -394,8 +467,16 @@ where
},
};
if sender_member_event.room_id() != room_create_event.room_id() {
warn!("room_id of incoming event does not match room_id of m.room.create event");
if sender_member_event
.room_id()
.expect("we have a room ID for non create events")
!= room_create_event.room_id_or_hash()
{
warn!(
"room_id of incoming event ({}) does not match room_id of m.room.create event ({})",
sender_member_event.room_id_or_hash(),
room_create_event.room_id_or_hash()
);
return Ok(false);
}
@ -417,7 +498,7 @@ where
}
// If type is m.room.third_party_invite
let sender_power_level = match &power_levels_event {
let mut sender_power_level = match &power_levels_event {
| Some(pl) => {
let content =
deserialize_power_levels_content_fields(pl.content().get(), room_version)?;
@ -439,6 +520,24 @@ where
if is_creator { int!(100) } else { int!(0) }
},
};
if room_version.explicitly_privilege_room_creators {
// If the user sent the create event, or is listed in additional_creators, just
// give them Int::MAX
if sender == room_create_event.sender()
|| room_create_content
.additional_creators
.as_ref()
.is_some_and(|creators| {
creators
.iter()
.any(|c| c.deserialize().is_ok_and(|c| c == *sender))
}) {
trace!("privileging room creator or additional creator");
// This user is the room creator or an additional creator, give them max power
// level
sender_power_level = Int::MAX;
}
}
// Allow if and only if sender's current power level is greater than
// or equal to the invite level
@ -554,6 +653,7 @@ where
struct GetThirdPartyInvite {
third_party_invite: Option<Raw<ThirdPartyInvite>>,
}
let create_content = from_json_str::<RoomCreateContentFields>(create_room.content().get())?;
let content = current_event.content();
let target_membership = from_json_str::<GetMembership>(content.get())?.membership;
@ -576,15 +676,36 @@ where
| None => RoomPowerLevelsEventContent::default(),
};
let sender_power = power_levels
let mut sender_power = power_levels
.users
.get(sender)
.or_else(|| sender_is_joined.then_some(&power_levels.users_default));
let target_power = power_levels.users.get(target_user).or_else(|| {
let mut target_power = power_levels.users.get(target_user).or_else(|| {
(target_membership == MembershipState::Join).then_some(&power_levels.users_default)
});
let mut creators = BTreeSet::new();
creators.insert(create_room.sender().to_owned());
if room_version.explicitly_privilege_room_creators {
// Explicitly privilege room creators
// If the sender sent the create event, or in additional_creators, give them
// Int::MAX. Same case for target.
if let Some(additional_creators) = &create_content.additional_creators {
for c in additional_creators {
if let Ok(c) = c.deserialize() {
creators.insert(c);
}
}
}
if creators.contains(sender) {
sender_power = Some(&Int::MAX);
}
if creators.contains(target_user) {
target_power = Some(&Int::MAX);
}
}
let mut join_rules = JoinRule::Invite;
if let Some(jr) = &join_rules_event {
join_rules = from_json_str::<RoomJoinRulesEventContent>(jr.content().get())?.join_rule;
@ -597,7 +718,7 @@ where
let user_for_join_auth_is_valid = if let Some(user_for_join_auth) = user_for_join_auth {
// Is the authorised user allowed to invite users into this room
let (auth_user_pl, invite_level) = if let Some(pl) = &power_levels_event {
let (mut auth_user_pl, invite_level) = if let Some(pl) = &power_levels_event {
// TODO Refactor all powerlevel parsing
let invite =
deserialize_power_levels_content_invite(pl.content().get(), room_version)?.invite;
@ -613,6 +734,9 @@ where
} else {
(int!(0), int!(0))
};
if creators.contains(user_for_join_auth) {
auth_user_pl = Int::MAX;
}
(user_for_join_auth_membership == &MembershipState::Join)
&& (auth_user_pl >= invite_level)
} else {
@ -622,6 +746,7 @@ where
Ok(match target_membership {
| MembershipState::Join => {
debug!("starting target_membership=join check");
// 1. If the only previous event is an m.room.create and the state_key is the
// creator,
// allow
@ -633,7 +758,10 @@ where
let no_more_prev_events = prev_events.next().is_none();
if prev_event_is_create_event && no_more_prev_events {
let is_creator = if room_version.use_room_create_sender {
debug!("checking if sender is a room creator for initial membership event");
let is_creator = if room_version.explicitly_privilege_room_creators {
creators.contains(target_user) && creators.contains(sender)
} else if room_version.use_room_create_sender {
let creator = create_room.sender();
creator == sender && creator == target_user
@ -647,10 +775,15 @@ where
};
if is_creator {
debug!("sender is room creator, allowing join");
return Ok(true);
}
debug!("sender is not room creator, proceeding with normal auth checks");
}
let membership_allows_join = matches!(
target_user_current_membership,
MembershipState::Join | MembershipState::Invite
);
if sender != target_user {
// If the sender does not match state_key, reject.
warn!("Can't make other user join");
@ -659,39 +792,48 @@ where
// If the sender is banned, reject.
warn!(?target_user_membership_event_id, "Banned user can't join");
false
} else if (join_rules == JoinRule::Invite
|| room_version.allow_knocking && (join_rules == JoinRule::Knock || matches!(join_rules, JoinRule::KnockRestricted(_))))
// If the join_rule is invite then allow if membership state is invite or join
&& (target_user_current_membership == MembershipState::Join
|| target_user_current_membership == MembershipState::Invite)
{
true
} else if room_version.restricted_join_rules
&& matches!(join_rules, JoinRule::Restricted(_))
|| room_version.knock_restricted_join_rule
&& matches!(join_rules, JoinRule::KnockRestricted(_))
{
// If the join_rule is restricted or knock_restricted
if matches!(
target_user_current_membership,
MembershipState::Invite | MembershipState::Join
) {
// If membership state is join or invite, allow.
true
} else {
// If the join_authorised_via_users_server key in content is not a user with
// sufficient permission to invite other users, reject.
// Otherwise, allow.
user_for_join_auth_is_valid
}
} else {
// If the join_rule is public, allow.
// Otherwise, reject.
join_rules == JoinRule::Public
match join_rules {
| JoinRule::Invite if !membership_allows_join => {
warn!("Join rule is invite but membership does not allow join");
false
},
| JoinRule::Knock if !room_version.allow_knocking => {
warn!("Join rule is knock but room version does not allow knocking");
false
},
| JoinRule::Knock if !membership_allows_join => {
warn!("Join rule is knock but membership does not allow join");
false
},
| JoinRule::KnockRestricted(_) if !room_version.knock_restricted_join_rule =>
{
warn!(
"Join rule is knock_restricted but room version does not support it"
);
false
},
| JoinRule::KnockRestricted(_) if !membership_allows_join => {
warn!("Join rule is knock_restricted but membership does not allow join");
false
},
| JoinRule::Restricted(_) | JoinRule::KnockRestricted(_) =>
if !user_for_join_auth_is_valid {
warn!(
"Join rule is a restricted one but no valid authorising user \
was given"
);
false
} else {
true
},
| _ => true,
}
}
},
| MembershipState::Invite => {
// If content has third_party_invite key
debug!("starting target_membership=invite check");
match third_party_invite.and_then(|i| i.deserialize().ok()) {
| Some(tp_id) =>
if target_user_current_membership == MembershipState::Ban {
@ -849,6 +991,7 @@ fn can_send_event(event: &impl Event, ple: Option<&impl Event>, user_level: Int)
required_level = i64::from(event_type_power_level),
user_level = i64::from(user_level),
state_key = ?event.state_key(),
power_level_event_id = ?ple.map(|e| e.event_id().as_str()),
"permissions factors",
);

View file

@ -92,6 +92,11 @@ where
Pdu: Event + Clone + Send + Sync,
for<'b> &'b Pdu: Event + Send,
{
use RoomVersionId::*;
let stateres_version = match room_version {
| V2 | V3 | V4 | V5 | V6 | V7 | V8 | V9 | V10 | V11 => 2.0,
| _ => 2.1,
};
debug!("State resolution starting");
// Split non-conflicting and conflicting state
@ -108,13 +113,27 @@ where
debug!(count = conflicting.len(), "conflicting events");
trace!(map = ?conflicting, "conflicting events");
let conflicted_state_subgraph: HashSet<_> = if stateres_version >= 2.1 {
calculate_conflicted_subgraph(&conflicting, event_fetch)
.await
.ok_or_else(|| {
Error::InvalidPdu("Failed to calculate conflicted subgraph".to_owned())
})?
} else {
HashSet::new()
};
debug!(count = conflicted_state_subgraph.len(), "conflicted subgraph");
trace!(set = ?conflicted_state_subgraph, "conflicted subgraph");
let conflicting_values = conflicting.into_values().flatten().stream();
// `all_conflicted` contains unique items
// synapse says `full_set = {eid for eid in full_conflicted_set if eid in
// event_map}`
// Hydra: Also consider the conflicted state subgraph
let all_conflicted: HashSet<_> = get_auth_chain_diff(auth_chain_sets)
.chain(conflicting_values)
.chain(conflicted_state_subgraph.into_iter().stream())
.broad_filter_map(async |id| event_exists(id.clone()).await.then_some(id))
.collect()
.await;
@ -150,6 +169,7 @@ where
// Sequentially auth check each control event.
let resolved_control = iterative_auth_check(
&room_version,
stateres_version,
sorted_control_levels.iter().stream().map(AsRef::as_ref),
clean.clone(),
&event_fetch,
@ -183,10 +203,11 @@ where
let sorted_left_events =
mainline_sort(&events_to_resolve, power_event.cloned(), &event_fetch).await?;
trace!(list = ?sorted_left_events, "events left, sorted");
trace!(list = ?sorted_left_events, "events left, sorted, running iterative auth check");
let mut resolved_state = iterative_auth_check(
&room_version,
stateres_version,
sorted_left_events.iter().stream().map(AsRef::as_ref),
resolved_control, // The control events are added to the final resolved state
&event_fetch,
@ -198,6 +219,7 @@ where
resolved_state.extend(clean);
debug!("state resolution finished");
trace!( map = ?resolved_state, "final resolved state" );
Ok(resolved_state)
}
@ -250,6 +272,52 @@ where
(unconflicted_state, conflicted_state)
}
/// Calculate the conflicted subgraph
async fn calculate_conflicted_subgraph<F, Fut, E>(
conflicted: &StateMap<Vec<OwnedEventId>>,
fetch_event: &F,
) -> Option<HashSet<OwnedEventId>>
where
F: Fn(OwnedEventId) -> Fut + Sync,
Fut: Future<Output = Option<E>> + Send,
E: Event + Send + Sync,
{
let conflicted_events: HashSet<_> = conflicted.values().flatten().cloned().collect();
let mut subgraph: HashSet<OwnedEventId> = HashSet::new();
let mut stack: Vec<Vec<OwnedEventId>> =
vec![conflicted_events.iter().cloned().collect::<Vec<_>>()];
let mut path: Vec<OwnedEventId> = Vec::new();
let mut seen: HashSet<OwnedEventId> = HashSet::new();
let next_event = |stack: &mut Vec<Vec<_>>, path: &mut Vec<_>| {
while stack.last().is_some_and(|s| s.is_empty()) {
stack.pop();
path.pop();
}
stack.last_mut().and_then(|s| s.pop())
};
while let Some(event_id) = next_event(&mut stack, &mut path) {
path.push(event_id.clone());
if subgraph.contains(&event_id) {
if path.len() > 1 {
subgraph.extend(path.iter().cloned());
}
path.pop();
continue;
}
if conflicted_events.contains(&event_id) && path.len() > 1 {
subgraph.extend(path.iter().cloned());
}
if seen.contains(&event_id) {
path.pop();
continue;
}
let evt = fetch_event(event_id.clone()).await?;
stack.push(evt.auth_events().map(ToOwned::to_owned).collect());
seen.insert(event_id);
}
Some(subgraph)
}
/// Returns a Vec of deduped EventIds that appear in some chains but not others.
#[allow(clippy::arithmetic_side_effects)]
fn get_auth_chain_diff<Id, Hasher>(
@ -513,8 +581,10 @@ where
/// For each `events_to_check` event we gather the events needed to auth it from
/// the the `fetch_event` closure and verify each event using the
/// `event_auth::auth_check` function.
#[tracing::instrument(level = "trace", skip_all)]
async fn iterative_auth_check<'a, E, F, Fut, S>(
room_version: &RoomVersion,
stateres_version: f32,
events_to_check: S,
unconflicted_state: StateMap<OwnedEventId>,
fetch_event: &F,
@ -538,12 +608,15 @@ where
.try_collect()
.boxed()
.await?;
trace!(list = ?events_to_check, "events to check");
let auth_event_ids: HashSet<OwnedEventId> = events_to_check
.iter()
.flat_map(|event: &E| event.auth_events().map(ToOwned::to_owned))
.collect();
trace!(set = ?auth_event_ids, "auth event IDs to fetch");
let auth_events: HashMap<OwnedEventId, E> = auth_event_ids
.into_iter()
.stream()
@ -553,9 +626,15 @@ where
.boxed()
.await;
trace!(map = ?auth_events.keys().collect::<Vec<_>>(), "fetched auth events");
let auth_events = &auth_events;
let mut resolved_state = unconflicted_state;
let mut resolved_state = match stateres_version {
| 2.1 => StateMap::new(),
| _ => unconflicted_state,
};
for event in events_to_check {
trace!(event_id = event.event_id().as_str(), "checking event");
let state_key = event
.state_key()
.ok_or_else(|| Error::InvalidPdu("State event had no state key".to_owned()))?;
@ -565,13 +644,29 @@ where
event.sender(),
Some(state_key),
event.content(),
room_version,
)?;
trace!(list = ?auth_types, event_id = event.event_id().as_str(), "auth types for event");
let mut auth_state = StateMap::new();
if room_version.room_ids_as_hashes {
trace!("room version uses hashed IDs, manually fetching create event");
let create_event_id_raw = event.room_id_or_hash().as_str().replace('!', "$");
let create_event_id = EventId::parse(&create_event_id_raw).map_err(|e| {
Error::InvalidPdu(format!(
"Failed to parse create event ID from room ID/hash: {e}"
))
})?;
let create_event = fetch_event(create_event_id.into())
.await
.ok_or_else(|| Error::NotFound("Failed to find create event".into()))?;
auth_state.insert(create_event.event_type().with_state_key(""), create_event);
}
for aid in event.auth_events() {
if let Some(ev) = auth_events.get(aid) {
//TODO: synapse checks "rejected_reason" which is most likely related to
// soft-failing
trace!(event_id = aid.as_str(), "found auth event");
auth_state.insert(
ev.event_type()
.with_state_key(ev.state_key().ok_or_else(|| {
@ -600,8 +695,9 @@ where
auth_state.insert(key.to_owned(), event);
})
.await;
trace!(map = ?auth_state.keys().collect::<Vec<_>>(), event_id = event.event_id().as_str(), "auth state for event");
debug!("event to check {:?}", event.event_id());
debug!(event_id = event.event_id().as_str(), "Running auth checks");
// The key for this is (eventType + a state_key of the signed token not sender)
// so search for it
@ -617,16 +713,29 @@ where
)
};
let auth_result =
auth_check(room_version, &event, current_third_party, fetch_state).await;
let auth_result = auth_check(
room_version,
&event,
current_third_party,
fetch_state,
&fetch_state(&StateEventType::RoomCreate, "")
.await
.expect("create event must exist"),
)
.await;
match auth_result {
| Ok(true) => {
// add event to resolved state map
trace!(
event_id = event.event_id().as_str(),
"event passed the authentication check, adding to resolved state"
);
resolved_state.insert(
event.event_type().with_state_key(state_key),
event.event_id().to_owned(),
);
trace!(map = ?resolved_state, "new resolved state");
},
| Ok(false) => {
// synapse passes here on AuthError. We do not add this event to resolved_state.
@ -638,7 +747,8 @@ where
},
}
}
trace!(map = ?resolved_state, "final resolved state from iterative auth check");
debug!("iterative auth check finished");
Ok(resolved_state)
}
@ -909,6 +1019,7 @@ mod tests {
let resolved_power = super::iterative_auth_check(
&RoomVersion::V6,
2.1,
sorted_power_events.iter().map(AsRef::as_ref).stream(),
HashMap::new(), // unconflicted events
&fetcher,

View file

@ -61,25 +61,34 @@ pub struct RoomVersion {
pub extra_redaction_checks: bool,
/// Allow knocking in event authentication.
///
/// See [room v7 specification](https://spec.matrix.org/latest/rooms/v7/) for more information.
/// See [room v7 specification](https://spec.matrix.org/latest/rooms/v7/)
pub allow_knocking: bool,
/// Adds support for the restricted join rule.
///
/// See: [MSC3289](https://github.com/matrix-org/matrix-spec-proposals/pull/3289) for more information.
/// See: [MSC3289](https://github.com/matrix-org/matrix-spec-proposals/pull/3289)
pub restricted_join_rules: bool,
/// Adds support for the knock_restricted join rule.
///
/// See: [MSC3787](https://github.com/matrix-org/matrix-spec-proposals/pull/3787) for more information.
/// See: [MSC3787](https://github.com/matrix-org/matrix-spec-proposals/pull/3787)
pub knock_restricted_join_rule: bool,
/// Enforces integer power levels.
///
/// See: [MSC3667](https://github.com/matrix-org/matrix-spec-proposals/pull/3667) for more information.
/// See: [MSC3667](https://github.com/matrix-org/matrix-spec-proposals/pull/3667)
pub integer_power_levels: bool,
/// Determine the room creator using the `m.room.create` event's `sender`,
/// instead of the event content's `creator` field.
///
/// See: [MSC2175](https://github.com/matrix-org/matrix-spec-proposals/pull/2175) for more information.
/// See: [MSC2175](https://github.com/matrix-org/matrix-spec-proposals/pull/2175)
pub use_room_create_sender: bool,
/// Whether the room creators are considered superusers.
/// A superuser will always have infinite power levels in the room.
///
/// See: [MSC4289](https://github.com/matrix-org/matrix-spec-proposals/pull/4289)
pub explicitly_privilege_room_creators: bool,
/// Whether the room's m.room.create event ID is itself the room ID.
///
/// See: [MSC4291](https://github.com/matrix-org/matrix-spec-proposals/pull/4291)
pub room_ids_as_hashes: bool,
}
impl RoomVersion {
@ -97,6 +106,8 @@ impl RoomVersion {
knock_restricted_join_rule: false,
integer_power_levels: false,
use_room_create_sender: false,
explicitly_privilege_room_creators: false,
room_ids_as_hashes: false,
};
pub const V10: Self = Self {
knock_restricted_join_rule: true,
@ -107,6 +118,11 @@ impl RoomVersion {
use_room_create_sender: true,
..Self::V10
};
pub const V12: Self = Self {
explicitly_privilege_room_creators: true,
room_ids_as_hashes: true,
..Self::V11
};
pub const V2: Self = Self {
state_res: StateResolutionVersion::V2,
..Self::V1
@ -144,6 +160,7 @@ impl RoomVersion {
| RoomVersionId::V9 => Self::V9,
| RoomVersionId::V10 => Self::V10,
| RoomVersionId::V11 => Self::V11,
| RoomVersionId::V12 => Self::V12,
| ver => return Err(Error::Unsupported(format!("found version `{ver}`"))),
})
}

View file

@ -24,7 +24,7 @@ use serde_json::{
use super::auth_types_for_event;
use crate::{
Result, info,
Result, RoomVersion, info,
matrix::{Event, EventTypeExt, Pdu, StateMap, pdu::EventHash},
};
@ -154,6 +154,7 @@ pub(crate) async fn do_check(
fake_event.sender(),
fake_event.state_key(),
fake_event.content(),
&RoomVersion::V6,
)
.unwrap();
@ -398,7 +399,7 @@ pub(crate) fn to_init_pdu_event(
Pdu {
event_id: id.try_into().unwrap(),
room_id: room_id().to_owned(),
room_id: Some(room_id().to_owned()),
sender: sender.to_owned(),
origin_server_ts: ts.try_into().unwrap(),
state_key: state_key.map(Into::into),
@ -446,7 +447,7 @@ where
Pdu {
event_id: id.try_into().unwrap(),
room_id: room_id().to_owned(),
room_id: Some(room_id().to_owned()),
sender: sender.to_owned(),
origin_server_ts: ts.try_into().unwrap(),
state_key: state_key.map(Into::into),

View file

@ -1,6 +1,6 @@
use std::collections::BTreeMap;
use conduwuit::{Result, pdu::PduBuilder};
use conduwuit::{Result, info, pdu::PduBuilder};
use futures::FutureExt;
use ruma::{
RoomId, RoomVersionId,
@ -26,7 +26,7 @@ use crate::Services;
/// used to issue admin commands by talking to the server user inside it.
pub async fn create_admin_room(services: &Services) -> Result {
let room_id = RoomId::new(services.globals.server_name());
let room_version = &services.config.default_room_version;
let room_version = &RoomVersionId::V11;
let _short_id = services
.rooms
@ -45,10 +45,13 @@ pub async fn create_admin_room(services: &Services) -> Result {
match room_version {
| V1 | V2 | V3 | V4 | V5 | V6 | V7 | V8 | V9 | V10 =>
RoomCreateEventContent::new_v1(server_user.into()),
| _ => RoomCreateEventContent::new_v11(),
| V11 => RoomCreateEventContent::new_v11(),
| _ => RoomCreateEventContent::new_v12(),
}
};
info!("Creating admin room {} with version {}", room_id, room_version);
// 1. The room create event
services
.rooms
@ -61,7 +64,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
..create_content
}),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -77,7 +80,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
&RoomMemberEventContent::new(MembershipState::Join),
),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -95,7 +98,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
..Default::default()
}),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -108,7 +111,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomJoinRulesEventContent::new(JoinRule::Invite)),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -124,7 +127,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
&RoomHistoryVisibilityEventContent::new(HistoryVisibility::Shared),
),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -140,7 +143,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
&RoomGuestAccessEventContent::new(GuestAccess::Forbidden),
),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -154,7 +157,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomNameEventContent::new(room_name)),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -168,7 +171,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
topic: format!("Manage {} | Run commands prefixed with `!admin` | Run `!admin -h` for help | Documentation: https://continuwuity.org/", services.config.server_name),
}),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -186,7 +189,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
alt_aliases: Vec::new(),
}),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()
@ -204,7 +207,7 @@ pub async fn create_admin_room(services: &Services) -> Result {
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomPreviewUrlsEventContent { disabled: true }),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.boxed()

View file

@ -55,7 +55,7 @@ pub async fn make_user_admin(&self, user_id: &UserId) -> Result {
&RoomMemberEventContent::new(MembershipState::Invite),
),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.await?;
@ -69,7 +69,7 @@ pub async fn make_user_admin(&self, user_id: &UserId) -> Result {
&RoomMemberEventContent::new(MembershipState::Join),
),
user_id,
&room_id,
Some(&room_id),
&state_lock,
)
.await?;
@ -83,7 +83,7 @@ pub async fn make_user_admin(&self, user_id: &UserId) -> Result {
&RoomMemberEventContent::new(MembershipState::Invite),
),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.await?;
@ -111,7 +111,7 @@ pub async fn make_user_admin(&self, user_id: &UserId) -> Result {
.build_and_append_pdu(
PduBuilder::state(String::new(), &room_power_levels),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.await?;
@ -135,7 +135,7 @@ pub async fn make_user_admin(&self, user_id: &UserId) -> Result {
.build_and_append_pdu(
PduBuilder::timeline(&RoomMessageEventContent::text_markdown(welcome_message)),
server_user,
&room_id,
Some(&room_id),
&state_lock,
)
.await?;
@ -218,7 +218,7 @@ pub async fn revoke_admin(&self, user_id: &UserId) -> Result {
..event
}),
self.services.globals.server_user.as_ref(),
&room_id,
Some(&room_id),
&state_lock,
)
.await

View file

@ -393,13 +393,13 @@ impl Service {
return Ok(());
};
let response_sender = if self.is_admin_room(pdu.room_id()).await {
let response_sender = if self.is_admin_room(pdu.room_id().unwrap()).await {
&self.services.globals.server_user
} else {
pdu.sender()
};
self.respond_to_room(content, pdu.room_id(), response_sender)
self.respond_to_room(content, pdu.room_id().unwrap(), response_sender)
.boxed()
.await
}
@ -419,7 +419,7 @@ impl Service {
.build_and_append_pdu(
PduBuilder::timeline(&self.text_or_file(content).await),
user_id,
room_id,
Some(room_id),
&state_lock,
)
.await
@ -447,7 +447,12 @@ impl Service {
self.services
.timeline
.build_and_append_pdu(PduBuilder::timeline(&content), user_id, room_id, state_lock)
.build_and_append_pdu(
PduBuilder::timeline(&content),
user_id,
Some(room_id),
state_lock,
)
.await?;
Ok(())
@ -484,7 +489,10 @@ impl Service {
}
// Prevent unescaped !admin from being used outside of the admin room
if is_public_prefix && !self.is_admin_room(event.room_id()).await {
if event.room_id().is_some()
&& is_public_prefix
&& !self.is_admin_room(event.room_id().unwrap()).await
{
return false;
}
@ -497,7 +505,7 @@ impl Service {
// the administrator can execute commands as the server user
let emergency_password_set = self.services.server.config.emergency_password.is_some();
let from_server = event.sender() == server_user && !emergency_password_set;
if from_server && self.is_admin_room(event.room_id()).await {
if from_server && self.is_admin_room(event.room_id().unwrap()).await {
return false;
}

View file

@ -287,18 +287,22 @@ impl Service {
{
let mut notify = None;
let mut tweaks = Vec::new();
if event.room_id().is_none() {
// TODO(hydra): does this matter?
return Ok(());
}
let power_levels: RoomPowerLevelsEventContent = self
.services
.state_accessor
.room_state_get(event.room_id(), &StateEventType::RoomPowerLevels, "")
.room_state_get(event.room_id().unwrap(), &StateEventType::RoomPowerLevels, "")
.await
.and_then(|event| event.get_content())
.unwrap_or_default();
let serialized = event.to_format();
for action in self
.get_actions(user, &ruleset, &power_levels, &serialized, event.room_id())
.get_actions(user, &ruleset, &power_levels, &serialized, event.room_id().unwrap())
.await
{
let n = match action {
@ -426,7 +430,7 @@ impl Service {
let mut notifi = Notification::new(d);
notifi.event_id = Some(event.event_id().to_owned());
notifi.room_id = Some(event.room_id().to_owned());
notifi.room_id = Some(event.room_id().unwrap().to_owned());
if http
.data
.get("org.matrix.msc4076.disable_badge_count")
@ -470,14 +474,14 @@ impl Service {
notifi.room_name = self
.services
.state_accessor
.get_name(event.room_id())
.get_name(event.room_id().unwrap())
.await
.ok();
notifi.room_alias = self
.services
.state_accessor
.get_canonical_alias(event.room_id())
.get_canonical_alias(event.room_id().unwrap())
.await
.ok();

View file

@ -195,11 +195,11 @@ async fn get_auth_chain_inner(
debug_error!(?event_id, ?e, "Could not find pdu mentioned in auth events");
},
| Ok(pdu) => {
if pdu.room_id != room_id {
if pdu.room_id.is_some() && pdu.room_id != Some(room_id.to_owned()) {
return Err!(Request(Forbidden(error!(
?event_id,
?room_id,
wrong_room_id = ?pdu.room_id,
wrong_room_id = ?pdu.room_id.unwrap(),
"auth event for incorrect room"
))));
}

View file

@ -58,6 +58,10 @@ pub async fn handle_incoming_pdu<'a>(
value: BTreeMap<String, CanonicalJsonValue>,
is_timeline_event: bool,
) -> Result<Option<RawPduId>> {
if room_id.is_empty() {
// TODO(hydra): Room IDs should be calculated before this function is called
panic!("room ID cannot be empty");
}
// 1. Skip the PDU if we already have it as a timeline event
if let Ok(pdu_id) = self.services.timeline.get_pdu_id(event_id).await {
return Ok(Some(pdu_id));

View file

@ -139,6 +139,7 @@ where
&pdu_event,
None, // TODO: third party invite
state_fetch,
create_event.as_pdu(),
)
.await
.map_err(|e| err!(Request(Forbidden("Auth check failed: {e:?}"))))?;

View file

@ -99,7 +99,10 @@ impl Service {
}
fn check_room_id<Pdu: Event>(room_id: &RoomId, pdu: &Pdu) -> Result {
if pdu.room_id() != room_id {
if pdu
.room_id()
.is_some_and(|claimed_room_id| claimed_room_id != room_id)
{
return Err!(Request(InvalidParam(error!(
pdu_event_id = ?pdu.event_id(),
pdu_room_id = ?pdu.room_id(),

View file

@ -102,6 +102,7 @@ where
&incoming_pdu,
None, // TODO: third party invite
|ty, sk| state_fetch(ty.clone(), sk.into()),
create_event.as_pdu(),
)
.await
.map_err(|e| err!(Request(Forbidden("Auth check failed: {e:?}"))))?;
@ -123,6 +124,7 @@ where
incoming_pdu.sender(),
incoming_pdu.state_key(),
incoming_pdu.content(),
&room_version,
)
.await?;
@ -140,6 +142,7 @@ where
&incoming_pdu,
None, // third-party invite
state_fetch,
create_event.as_pdu(),
)
.await
.map_err(|e| err!(Request(Forbidden("Auth check failed: {e:?}"))))?;
@ -156,7 +159,7 @@ where
!self
.services
.state_accessor
.user_can_redact(&redact_id, incoming_pdu.sender(), incoming_pdu.room_id(), true)
.user_can_redact(&redact_id, incoming_pdu.sender(), room_id, true)
.await?,
};
@ -313,6 +316,7 @@ where
state_ids_compressed,
soft_fail,
&state_lock,
room_id,
)
.await?;
@ -347,6 +351,7 @@ where
state_ids_compressed,
soft_fail,
&state_lock,
room_id,
)
.await?;

View file

@ -124,7 +124,7 @@ pub async fn search_pdus<'a>(
.wide_filter_map(move |pdu| async move {
self.services
.state_accessor
.user_can_see_event(query.user_id?, pdu.room_id(), pdu.event_id())
.user_can_see_event(query.user_id?, pdu.room_id().unwrap(), pdu.event_id())
.await
.then_some(pdu)
})

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, fmt::Write, iter::once, sync::Arc};
use async_trait::async_trait;
use conduwuit::{RoomVersion, debug};
use conduwuit_core::{
Event, PduEvent, Result, err,
result::FlatOk,
@ -15,6 +16,7 @@ use conduwuit_database::{Deserialized, Ignore, Interfix, Map};
use futures::{
FutureExt, Stream, StreamExt, TryFutureExt, TryStreamExt, future::join_all, pin_mut,
};
use log::trace;
use ruma::{
EventId, OwnedEventId, OwnedRoomId, RoomId, RoomVersionId, UserId,
events::{
@ -148,7 +150,7 @@ impl Service {
.roomid_spacehierarchy_cache
.lock()
.await
.remove(&pdu.room_id);
.remove(room_id);
},
| _ => continue,
}
@ -239,7 +241,7 @@ impl Service {
/// This adds all current state events (not including the incoming event)
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
#[tracing::instrument(skip(self, new_pdu), level = "debug")]
pub async fn append_to_state(&self, new_pdu: &PduEvent) -> Result<u64> {
pub async fn append_to_state(&self, new_pdu: &PduEvent, room_id: &RoomId) -> Result<u64> {
const BUFSIZE: usize = size_of::<u64>();
let shorteventid = self
@ -248,7 +250,7 @@ impl Service {
.get_or_create_shorteventid(&new_pdu.event_id)
.await;
let previous_shortstatehash = self.get_room_shortstatehash(&new_pdu.room_id).await;
let previous_shortstatehash = self.get_room_shortstatehash(room_id).await;
if let Ok(p) = previous_shortstatehash {
self.db
@ -319,7 +321,11 @@ impl Service {
}
#[tracing::instrument(skip_all, level = "debug")]
pub async fn summary_stripped<'a, E>(&self, event: &'a E) -> Vec<Raw<AnyStrippedStateEvent>>
pub async fn summary_stripped<'a, E>(
&self,
event: &'a E,
room_id: &RoomId,
) -> Vec<Raw<AnyStrippedStateEvent>>
where
E: Event + Send + Sync,
&'a E: Event + Send,
@ -338,7 +344,7 @@ impl Service {
let fetches = cells.into_iter().map(|(event_type, state_key)| {
self.services
.state_accessor
.room_state_get(event.room_id(), event_type, state_key)
.room_state_get(room_id, event_type, state_key)
});
join_all(fetches)
@ -421,7 +427,7 @@ impl Service {
}
/// This fetches auth events from the current state.
#[tracing::instrument(skip(self, content), level = "debug")]
#[tracing::instrument(skip(self, content, room_version), level = "trace")]
pub async fn get_auth_events(
&self,
room_id: &RoomId,
@ -429,13 +435,15 @@ impl Service {
sender: &UserId,
state_key: Option<&str>,
content: &serde_json::value::RawValue,
room_version: &RoomVersion,
) -> Result<StateMap<PduEvent>> {
let Ok(shortstatehash) = self.get_room_shortstatehash(room_id).await else {
return Ok(HashMap::new());
};
let auth_types = state_res::auth_types_for_event(kind, sender, state_key, content)?;
let auth_types =
state_res::auth_types_for_event(kind, sender, state_key, content, room_version)?;
debug!(?auth_types, "Auth types for event");
let sauthevents: HashMap<_, _> = auth_types
.iter()
.stream()
@ -448,6 +456,7 @@ impl Service {
})
.collect()
.await;
debug!(?sauthevents, "Auth events to fetch");
let (state_keys, event_ids): (Vec<_>, Vec<_>) = self
.services
@ -461,7 +470,7 @@ impl Service {
})
.unzip()
.await;
debug!(?state_keys, ?event_ids, "Auth events found in state");
self.services
.short
.multi_get_eventid_from_short(event_ids.into_iter().stream())
@ -473,6 +482,7 @@ impl Service {
.get_pdu(&event_id)
.await
.map(move |pdu| (((*ty).clone(), (*sk).clone()), pdu))
.inspect_err(|e| warn!("Failed to get auth event {event_id}: {e:?}"))
.ok()
})
.collect()

View file

@ -161,7 +161,7 @@ pub async fn user_can_invite(
&RoomMemberEventContent::new(MembershipState::Invite),
),
sender,
room_id,
Some(room_id),
state_lock,
)
.await

View file

@ -42,6 +42,7 @@ pub async fn append_incoming_pdu<'a, Leaves>(
state_ids_compressed: Arc<CompressedState>,
soft_fail: bool,
state_lock: &'a RoomMutexGuard,
room_id: &'a ruma::RoomId,
) -> Result<Option<RawPduId>>
where
Leaves: Iterator<Item = &'a EventId> + Send + 'a,
@ -51,24 +52,24 @@ where
// fail.
self.services
.state
.set_event_state(&pdu.event_id, &pdu.room_id, state_ids_compressed)
.set_event_state(&pdu.event_id, room_id, state_ids_compressed)
.await?;
if soft_fail {
self.services
.pdu_metadata
.mark_as_referenced(&pdu.room_id, pdu.prev_events.iter().map(AsRef::as_ref));
.mark_as_referenced(room_id, pdu.prev_events.iter().map(AsRef::as_ref));
self.services
.state
.set_forward_extremities(&pdu.room_id, new_room_leaves, state_lock)
.set_forward_extremities(room_id, new_room_leaves, state_lock)
.await;
return Ok(None);
}
let pdu_id = self
.append_pdu(pdu, pdu_json, new_room_leaves, state_lock)
.append_pdu(pdu, pdu_json, new_room_leaves, state_lock, room_id)
.await?;
Ok(Some(pdu_id))
@ -88,6 +89,7 @@ pub async fn append_pdu<'a, Leaves>(
mut pdu_json: CanonicalJsonObject,
leaves: Leaves,
state_lock: &'a RoomMutexGuard,
room_id: &'a ruma::RoomId,
) -> Result<RawPduId>
where
Leaves: Iterator<Item = &'a EventId> + Send + 'a,
@ -98,7 +100,7 @@ where
let shortroomid = self
.services
.short
.get_shortroomid(pdu.room_id())
.get_shortroomid(room_id)
.await
.map_err(|_| err!(Database("Room does not exist")))?;
@ -151,14 +153,14 @@ where
// We must keep track of all events that have been referenced.
self.services
.pdu_metadata
.mark_as_referenced(pdu.room_id(), pdu.prev_events().map(AsRef::as_ref));
.mark_as_referenced(room_id, pdu.prev_events().map(AsRef::as_ref));
self.services
.state
.set_forward_extremities(pdu.room_id(), leaves, state_lock)
.set_forward_extremities(room_id, leaves, state_lock)
.await;
let insert_lock = self.mutex_insert.lock(pdu.room_id()).await;
let insert_lock = self.mutex_insert.lock(room_id).await;
let count1 = self.services.globals.next_count().unwrap();
@ -166,11 +168,11 @@ where
// appending fails
self.services
.read_receipt
.private_read_set(pdu.room_id(), pdu.sender(), count1);
.private_read_set(room_id, pdu.sender(), count1);
self.services
.user
.reset_notification_counts(pdu.sender(), pdu.room_id());
.reset_notification_counts(pdu.sender(), room_id);
let count2 = PduCount::Normal(self.services.globals.next_count().unwrap());
let pdu_id: RawPduId = PduId { shortroomid, shorteventid: count2 }.into();
@ -184,14 +186,14 @@ where
let power_levels: RoomPowerLevelsEventContent = self
.services
.state_accessor
.room_state_get_content(pdu.room_id(), &StateEventType::RoomPowerLevels, "")
.room_state_get_content(room_id, &StateEventType::RoomPowerLevels, "")
.await
.unwrap_or_default();
let mut push_target: HashSet<_> = self
.services
.state_cache
.active_local_users_in_room(pdu.room_id())
.active_local_users_in_room(room_id)
.map(ToOwned::to_owned)
// Don't notify the sender of their own events, and dont send from ignored users
.ready_filter(|user| *user != pdu.sender())
@ -230,7 +232,7 @@ where
for action in self
.services
.pusher
.get_actions(user, &rules_for_user, &power_levels, &serialized, pdu.room_id())
.get_actions(user, &rules_for_user, &power_levels, &serialized, room_id)
.await
{
match action {
@ -268,20 +270,20 @@ where
}
self.db
.increment_notification_counts(pdu.room_id(), notifies, highlights);
.increment_notification_counts(room_id, notifies, highlights);
match *pdu.kind() {
| TimelineEventType::RoomRedaction => {
use RoomVersionId::*;
let room_version_id = self.services.state.get_room_version(pdu.room_id()).await?;
let room_version_id = self.services.state.get_room_version(room_id).await?;
match room_version_id {
| V1 | V2 | V3 | V4 | V5 | V6 | V7 | V8 | V9 | V10 => {
if let Some(redact_id) = pdu.redacts() {
if self
.services
.state_accessor
.user_can_redact(redact_id, pdu.sender(), pdu.room_id(), false)
.user_can_redact(redact_id, pdu.sender(), room_id, false)
.await?
{
self.redact_pdu(redact_id, pdu, shortroomid).await?;
@ -294,7 +296,7 @@ where
if self
.services
.state_accessor
.user_can_redact(redact_id, pdu.sender(), pdu.room_id(), false)
.user_can_redact(redact_id, pdu.sender(), room_id, false)
.await?
{
self.redact_pdu(redact_id, pdu, shortroomid).await?;
@ -310,7 +312,7 @@ where
.roomid_spacehierarchy_cache
.lock()
.await
.remove(pdu.room_id());
.remove(room_id);
},
| TimelineEventType::RoomMember => {
if let Some(state_key) = pdu.state_key() {
@ -320,8 +322,12 @@ where
let content: RoomMemberEventContent = pdu.get_content()?;
let stripped_state = match content.membership {
| MembershipState::Invite | MembershipState::Knock =>
self.services.state.summary_stripped(pdu).await.into(),
| MembershipState::Invite | MembershipState::Knock => self
.services
.state
.summary_stripped(pdu, room_id)
.await
.into(),
| _ => None,
};
@ -331,7 +337,7 @@ where
self.services
.state_cache
.update_membership(
pdu.room_id(),
room_id,
target_user_id,
content,
pdu.sender(),
@ -392,7 +398,7 @@ where
if self
.services
.state_cache
.appservice_in_room(pdu.room_id(), appservice)
.appservice_in_room(room_id, appservice)
.await
{
self.services
@ -430,12 +436,12 @@ where
let matching_aliases = |aliases: NamespaceRegex| {
self.services
.alias
.local_aliases_for_room(pdu.room_id())
.local_aliases_for_room(room_id)
.ready_any(move |room_alias| aliases.is_match(room_alias.as_str()))
};
if matching_aliases(appservice.aliases.clone()).await
|| appservice.rooms.is_match(pdu.room_id().as_str())
|| appservice.rooms.is_match(room_id.as_str())
|| matching_users(&appservice.users)
{
self.services

View file

@ -1,5 +1,6 @@
use std::{collections::HashSet, iter::once};
use conduwuit::{RoomVersion, debug_warn, trace};
use conduwuit_core::{
Err, Result, implement,
matrix::{event::Event, pdu::PduBuilder},
@ -11,6 +12,7 @@ use ruma::{
events::{
TimelineEventType,
room::{
create::RoomCreateEventContent,
member::{MembershipState, RoomMemberEventContent},
redaction::RoomRedactionEventContent,
},
@ -23,32 +25,36 @@ use super::RoomMutexGuard;
/// takes a roomid_mutex_state, meaning that only this function is able to
/// mutate the room state.
#[implement(super::Service)]
#[tracing::instrument(skip(self, state_lock), level = "debug")]
#[tracing::instrument(skip(self, state_lock), level = "trace")]
pub async fn build_and_append_pdu(
&self,
pdu_builder: PduBuilder,
sender: &UserId,
room_id: &RoomId,
room_id: Option<&RoomId>,
state_lock: &RoomMutexGuard,
) -> Result<OwnedEventId> {
let (pdu, pdu_json) = self
.create_hash_and_sign_event(pdu_builder, sender, room_id, state_lock)
.await?;
if self.services.admin.is_admin_room(pdu.room_id()).await {
let room_id = pdu.room_id_or_hash();
trace!("Checking if room {room_id} is an admin room");
if self.services.admin.is_admin_room(&room_id).await {
trace!("Room {room_id} is an admin room, checking PDU for admin room restrictions");
self.check_pdu_for_admin_room(&pdu, sender).boxed().await?;
}
// If redaction event is not authorized, do not append it to the timeline
if *pdu.kind() == TimelineEventType::RoomRedaction {
use RoomVersionId::*;
match self.services.state.get_room_version(pdu.room_id()).await? {
trace!("Running redaction checks for room {room_id}");
match self.services.state.get_room_version(&room_id).await? {
| V1 | V2 | V3 | V4 | V5 | V6 | V7 | V8 | V9 | V10 => {
if let Some(redact_id) = pdu.redacts() {
if !self
.services
.state_accessor
.user_can_redact(redact_id, pdu.sender(), pdu.room_id(), false)
.user_can_redact(redact_id, pdu.sender(), &room_id, false)
.await?
{
return Err!(Request(Forbidden("User cannot redact this event.")));
@ -61,7 +67,7 @@ pub async fn build_and_append_pdu(
if !self
.services
.state_accessor
.user_can_redact(redact_id, pdu.sender(), pdu.room_id(), false)
.user_can_redact(redact_id, pdu.sender(), &room_id, false)
.await?
{
return Err!(Request(Forbidden("User cannot redact this event.")));
@ -72,6 +78,7 @@ pub async fn build_and_append_pdu(
}
if *pdu.kind() == TimelineEventType::RoomMember {
trace!("Running room member checks for room {room_id}");
let content: RoomMemberEventContent = pdu.get_content()?;
if content.join_authorized_via_users_server.is_some()
@ -93,12 +100,27 @@ pub async fn build_and_append_pdu(
)));
}
}
if *pdu.kind() == TimelineEventType::RoomCreate {
trace!("Running room create checks for room {room_id}");
let content: RoomCreateEventContent = pdu.get_content()?;
let room_features = RoomVersion::new(&content.room_version)?;
if room_features.room_ids_as_hashes {
// bootstrap shortid for room
debug_warn!(%room_id, "Bootstrapping shortid for room");
self.services
.short
.get_or_create_shortroomid(&room_id)
.await;
}
}
// We append to state before appending the pdu, so we don't have a moment in
// time with the pdu without it's state. This is okay because append_pdu can't
// fail.
let statehashid = self.services.state.append_to_state(&pdu).await?;
trace!("Appending {} state for room {room_id}", pdu.event_id());
let statehashid = self.services.state.append_to_state(&pdu, &room_id).await?;
trace!("Generating raw ID for PDU {}", pdu.event_id());
let pdu_id = self
.append_pdu(
&pdu,
@ -107,20 +129,22 @@ pub async fn build_and_append_pdu(
// of the room
once(pdu.event_id()),
state_lock,
&room_id,
)
.boxed()
.await?;
// We set the room state after inserting the pdu, so that we never have a moment
// in time where events in the current room state do not exist
trace!("Setting room state for room {room_id}");
self.services
.state
.set_room_state(pdu.room_id(), statehashid, state_lock);
.set_room_state(&room_id, statehashid, state_lock);
let mut servers: HashSet<OwnedServerName> = self
.services
.state_cache
.room_servers(pdu.room_id())
.room_servers(&room_id)
.map(ToOwned::to_owned)
.collect()
.await;
@ -141,11 +165,13 @@ pub async fn build_and_append_pdu(
// room_servers() and/or the if statement above
servers.remove(self.services.globals.server_name());
trace!("Sending PDU {} to {} servers", pdu.event_id(), servers.len());
self.services
.sending
.send_pdu_servers(servers.iter().map(AsRef::as_ref).stream(), &pdu_id)
.await?;
trace!("Event {} in room {:?} has been appended", pdu.event_id(), room_id);
Ok(pdu.event_id().to_owned())
}
@ -179,7 +205,7 @@ where
let count = self
.services
.state_cache
.room_members(pdu.room_id())
.room_members(&pdu.room_id_or_hash())
.ready_filter(|user| self.services.globals.user_is_local(user))
.ready_filter(|user| *user != target)
.boxed()
@ -203,7 +229,7 @@ where
let count = self
.services
.state_cache
.room_members(pdu.room_id())
.room_members(&pdu.room_id_or_hash())
.ready_filter(|user| self.services.globals.user_is_local(user))
.ready_filter(|user| *user != target)
.boxed()

View file

@ -1,5 +1,6 @@
use std::cmp;
use std::{cmp, collections::HashMap};
use conduwuit::{smallstr::SmallString, trace};
use conduwuit_core::{
Err, Error, Result, err, implement,
matrix::{
@ -11,12 +12,13 @@ use conduwuit_core::{
};
use futures::{StreamExt, TryStreamExt, future, future::ready};
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, OwnedEventId, RoomId, RoomVersionId, UserId,
CanonicalJsonObject, CanonicalJsonValue, OwnedEventId, OwnedRoomId, RoomId, RoomVersionId,
UserId,
canonical_json::to_canonical_value,
events::{StateEventType, TimelineEventType, room::create::RoomCreateEventContent},
uint,
};
use serde_json::value::to_raw_value;
use serde_json::value::{RawValue, to_raw_value};
use tracing::warn;
use super::RoomMutexGuard;
@ -26,10 +28,25 @@ pub async fn create_hash_and_sign_event(
&self,
pdu_builder: PduBuilder,
sender: &UserId,
room_id: &RoomId,
room_id: Option<&RoomId>,
_mutex_lock: &RoomMutexGuard, /* Take mutex guard to make sure users get the room
* state mutex */
) -> Result<(PduEvent, CanonicalJsonObject)> {
fn from_evt(
room_id: OwnedRoomId,
event_type: TimelineEventType,
content: Box<RawValue>,
) -> Result<RoomVersionId> {
if event_type == TimelineEventType::RoomCreate {
let content: RoomCreateEventContent = serde_json::from_str(content.get())?;
Ok(content.room_version)
} else {
Err(Error::InconsistentRoomState(
"non-create event for room of unknown version",
room_id,
))
}
}
let PduBuilder {
event_type,
content,
@ -38,67 +55,84 @@ pub async fn create_hash_and_sign_event(
redacts,
timestamp,
} = pdu_builder;
let prev_events: Vec<OwnedEventId> = self
.services
.state
.get_forward_extremities(room_id)
.take(20)
.map(Into::into)
.collect()
.await;
// If there was no create event yet, assume we are creating a room
let room_version_id = self
.services
.state
.get_room_version(room_id)
.await
.or_else(|_| {
if event_type == TimelineEventType::RoomCreate {
let content: RoomCreateEventContent = serde_json::from_str(content.get())?;
Ok(content.room_version)
} else {
Err(Error::InconsistentRoomState(
"non-create event for room of unknown version",
room_id.to_owned(),
))
}
})?;
let room_version_id = match room_id {
| Some(room_id) => self
.services
.state
.get_room_version(room_id)
.await
.or_else(|_| from_evt(room_id.to_owned(), event_type.clone(), content.clone()))?,
| None => from_evt(
RoomId::new(self.services.globals.server_name()),
event_type.clone(),
content.clone(),
)?,
};
let room_version = RoomVersion::new(&room_version_id).expect("room version is supported");
// TODO(hydra): Only create events can lack a room ID.
let auth_events = self
.services
.state
.get_auth_events(room_id, &event_type, sender, state_key.as_deref(), &content)
.await?;
let prev_events: Vec<OwnedEventId> = match room_id {
| Some(room_id) =>
self.services
.state
.get_forward_extremities(room_id)
.take(20)
.map(Into::into)
.collect()
.await,
| None => Vec::new(),
};
let auth_events: HashMap<(StateEventType, SmallString<[u8; 48]>), PduEvent> = match room_id {
| Some(room_id) =>
self.services
.state
.get_auth_events(
room_id,
&event_type,
sender,
state_key.as_deref(),
&content,
&room_version,
)
.await?,
| None => HashMap::new(),
};
// Our depth is the maximum depth of prev_events + 1
let depth = prev_events
.iter()
.stream()
.map(Ok)
.and_then(|event_id| self.get_pdu(event_id))
.and_then(|pdu| future::ok(pdu.depth))
.ignore_err()
.ready_fold(uint!(0), cmp::max)
.await
.saturating_add(uint!(1));
let depth = match room_id {
| Some(_) => prev_events
.iter()
.stream()
.map(Ok)
.and_then(|event_id| self.get_pdu(event_id))
.and_then(|pdu| future::ok(pdu.depth))
.ignore_err()
.ready_fold(uint!(0), cmp::max)
.await
.saturating_add(uint!(1)),
| None => uint!(1),
};
let mut unsigned = unsigned.unwrap_or_default();
if let Some(state_key) = &state_key {
if let Ok(prev_pdu) = self
.services
.state_accessor
.room_state_get(room_id, &event_type.to_string().into(), state_key)
.await
{
unsigned.insert("prev_content".to_owned(), prev_pdu.get_content_as_value());
unsigned.insert("prev_sender".to_owned(), serde_json::to_value(prev_pdu.sender())?);
unsigned
.insert("replaces_state".to_owned(), serde_json::to_value(prev_pdu.event_id())?);
if let Some(room_id) = room_id {
if let Some(state_key) = &state_key {
if let Ok(prev_pdu) = self
.services
.state_accessor
.room_state_get(room_id, &event_type.clone().to_string().into(), state_key)
.await
{
unsigned.insert("prev_content".to_owned(), prev_pdu.get_content_as_value());
unsigned
.insert("prev_sender".to_owned(), serde_json::to_value(prev_pdu.sender())?);
unsigned.insert(
"replaces_state".to_owned(),
serde_json::to_value(prev_pdu.event_id())?,
);
}
}
}
@ -109,15 +143,15 @@ pub async fn create_hash_and_sign_event(
// The first two events in a room are always m.room.create and m.room.member,
// so any other events with that same depth are illegal.
warn!(
"Had unsafe depth {depth} when creating non-state event in {room_id}. Cowardly \
aborting"
"Had unsafe depth {depth} when creating non-state event in {}. Cowardly aborting",
room_id.expect("room_id is Some here").as_str()
);
return Err!(Request(Unknown("Unsafe depth for non-state event.")));
}
let mut pdu = PduEvent {
event_id: ruma::event_id!("$thiswillbefilledinlater").into(),
room_id: room_id.to_owned(),
room_id: room_id.map(ToOwned::to_owned),
sender: sender.to_owned(),
origin: None,
origin_server_ts: timestamp.map_or_else(
@ -152,11 +186,30 @@ pub async fn create_hash_and_sign_event(
ready(auth_events.get(&key).map(ToOwned::to_owned))
};
let room_id_or_hash = pdu.room_id_or_hash();
let create_pdu = match &pdu.kind {
| TimelineEventType::RoomCreate => None,
| _ => Some(
self.services
.state_accessor
.room_state_get(&room_id_or_hash, &StateEventType::RoomCreate, "")
.await
.map_err(|e| {
err!(Request(Forbidden(warn!("Failed to fetch room create event: {e}"))))
})?,
),
};
let create_event = match &pdu.kind {
| TimelineEventType::RoomCreate => &pdu,
| _ => create_pdu.as_ref().unwrap().as_pdu(),
};
let auth_check = state_res::auth_check(
&room_version,
&pdu,
None, // TODO: third_party_invite
auth_fetch,
create_event,
)
.await
.map_err(|e| err!(Request(Forbidden(warn!("Auth check failed: {e:?}")))))?;
@ -164,6 +217,11 @@ pub async fn create_hash_and_sign_event(
if !auth_check {
return Err!(Request(Forbidden("Event is not authorized.")));
}
trace!(
"Event {} in room {} is authorized",
pdu.event_id,
pdu.room_id.as_ref().map_or("None", |id| id.as_str())
);
// Hash and sign
let mut pdu_json = utils::to_canonical_object(&pdu).map_err(|e| {
@ -178,13 +236,13 @@ pub async fn create_hash_and_sign_event(
},
}
// Add origin because synapse likes that (and it's required in the spec)
pdu_json.insert(
"origin".to_owned(),
to_canonical_value(self.services.globals.server_name())
.expect("server name is a valid CanonicalJsonValue"),
);
trace!("hashing and signing event {}", pdu.event_id);
if let Err(e) = self
.services
.server_keys
@ -204,30 +262,45 @@ pub async fn create_hash_and_sign_event(
pdu_json.insert("event_id".into(), CanonicalJsonValue::String(pdu.event_id.clone().into()));
// Check with the policy server
match self
.services
.event_handler
.ask_policy_server(&pdu, room_id)
.await
{
| Ok(true) => {},
| Ok(false) => {
return Err!(Request(Forbidden(debug_warn!(
"Policy server marked this event as spam"
))));
},
| Err(e) => {
// fail open
warn!("Failed to check event with policy server: {e}");
},
// TODO(hydra): Skip this check for create events (why didnt we do this
// already?)
if room_id.is_some() {
trace!(
"Checking event {} in room {} with policy server",
pdu.event_id,
pdu.room_id.as_ref().map_or("None", |id| id.as_str())
);
match self
.services
.event_handler
.ask_policy_server(&pdu, &pdu.room_id_or_hash())
.await
{
| Ok(true) => {},
| Ok(false) => {
return Err!(Request(Forbidden(debug_warn!(
"Policy server marked this event as spam"
))));
},
| Err(e) => {
// fail open
warn!("Failed to check event with policy server: {e}");
},
}
}
// Generate short event id
trace!(
"Generating short event ID for {} in room {}",
pdu.event_id,
pdu.room_id.as_ref().map_or("None", |id| id.as_str())
);
let _shorteventid = self
.services
.short
.get_or_create_shorteventid(&pdu.event_id)
.await;
trace!("New PDU created: {pdu:?}");
Ok((pdu, pdu_json))
}

View file

@ -39,7 +39,11 @@ pub async fn redact_pdu<Pdu: Event + Send + Sync>(
}
}
let room_version_id = self.services.state.get_room_version(pdu.room_id()).await?;
let room_version_id = self
.services
.state
.get_room_version(&pdu.room_id_or_hash())
.await?;
pdu.redact(&room_version_id, reason.to_value())?;

View file

@ -798,7 +798,7 @@ impl Service {
let unread: UInt = self
.services
.user
.notification_count(&user_id, pdu.room_id())
.notification_count(&user_id, &pdu.room_id_or_hash())
.await
.try_into()
.expect("notification count can't go that high");

View file

@ -1,6 +1,6 @@
use std::borrow::Borrow;
use conduwuit::{Err, Result, implement};
use conduwuit::{Err, Result, debug, debug_error, implement};
use ruma::{
CanonicalJsonObject, RoomVersionId, ServerName, ServerSigningKeyId,
api::federation::discovery::VerifyKey,
@ -19,9 +19,11 @@ pub async fn get_event_keys(
let required = match required_keys(object, version) {
| Ok(required) => required,
| Err(e) => {
debug_error!("Failed to determine keys required to verify: {e}");
return Err!(BadServerResponse("Failed to determine keys required to verify: {e}"));
},
};
debug!(?required, "Keys required to verify event");
let batch = required
.iter()
@ -61,6 +63,7 @@ where
}
#[implement(super::Service)]
#[tracing::instrument(skip(self))]
pub async fn get_verify_key(
&self,
origin: &ServerName,
@ -70,6 +73,7 @@ pub async fn get_verify_key(
let notary_only = self.services.server.config.only_query_trusted_key_servers;
if let Some(result) = self.verify_keys_for(origin).await.remove(key_id) {
debug!("Found key in cache");
return Ok(result);
}

View file

@ -8,7 +8,7 @@ mod verify;
use std::{collections::BTreeMap, sync::Arc, time::Duration};
use conduwuit::{
Result, Server, implement,
Result, Server, debug, debug_error, debug_warn, implement,
utils::{IterStream, timepoint_from_now},
};
use database::{Deserialized, Json, Map};
@ -112,6 +112,7 @@ async fn add_signing_keys(&self, new_keys: ServerSigningKeys) {
}
#[implement(Service)]
#[tracing::instrument(skip(self, object))]
pub async fn required_keys_exist(
&self,
object: &CanonicalJsonObject,
@ -119,10 +120,12 @@ pub async fn required_keys_exist(
) -> bool {
use ruma::signatures::required_keys;
debug!(?object, "Checking required keys exist");
let Ok(required_keys) = required_keys(object, version) else {
debug_error!("Failed to determine required keys");
return false;
};
debug!(?required_keys, "Required keys to verify event");
required_keys
.iter()
.flat_map(|(server, key_ids)| key_ids.iter().map(move |key_id| (server, key_id)))
@ -132,6 +135,7 @@ pub async fn required_keys_exist(
}
#[implement(Service)]
#[tracing::instrument(skip(self))]
pub async fn verify_key_exists(&self, origin: &ServerName, key_id: &ServerSigningKeyId) -> bool {
type KeysMap<'a> = BTreeMap<&'a ServerSigningKeyId, &'a RawJsonValue>;
@ -142,6 +146,7 @@ pub async fn verify_key_exists(&self, origin: &ServerName, key_id: &ServerSignin
.await
.deserialized::<Raw<ServerSigningKeys>>()
else {
debug_warn!("No known signing keys found for {origin}");
return false;
};
@ -157,6 +162,7 @@ pub async fn verify_key_exists(&self, origin: &ServerName, key_id: &ServerSignin
}
}
debug_warn!("Key {key_id} not found for {origin}");
false
}

View file

@ -1,4 +1,6 @@
use conduwuit::{Err, Result, implement, matrix::event::gen_event_id_canonical_json};
use conduwuit::{
Err, Result, debug, debug_warn, implement, matrix::event::gen_event_id_canonical_json,
};
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, OwnedEventId, RoomVersionId, signatures::Verified,
};
@ -28,18 +30,25 @@ pub async fn validate_and_add_event_id_no_fetch(
pdu: &RawJsonValue,
room_version: &RoomVersionId,
) -> Result<(OwnedEventId, CanonicalJsonObject)> {
debug!(?pdu, "Validating PDU without fetching keys");
let (event_id, mut value) = gen_event_id_canonical_json(pdu, room_version)?;
debug!(event_id = event_id.as_str(), "Generated event ID, checking required keys");
if !self.required_keys_exist(&value, room_version).await {
debug_warn!(
"Event {event_id} is missing required keys, cannot verify without fetching keys"
);
return Err!(BadServerResponse(debug_warn!(
"Event {event_id} cannot be verified: missing keys."
)));
}
debug!("All required keys exist, verifying event");
if let Err(e) = self.verify_event(&value, Some(room_version)).await {
debug_warn!("Event verification failed");
return Err!(BadServerResponse(debug_error!(
"Event {event_id} failed verification: {e:?}"
)));
}
debug!("Event verified successfully");
value.insert("event_id".into(), CanonicalJsonValue::String(event_id.as_str().into()));
@ -52,7 +61,7 @@ pub async fn verify_event(
event: &CanonicalJsonObject,
room_version: Option<&RoomVersionId>,
) -> Result<Verified> {
let room_version = room_version.unwrap_or(&RoomVersionId::V11);
let room_version = room_version.unwrap_or(&RoomVersionId::V12);
let keys = self.get_event_keys(event, room_version).await?;
ruma::signatures::verify_event(&keys, event, room_version).map_err(Into::into)
}
@ -63,7 +72,7 @@ pub async fn verify_json(
event: &CanonicalJsonObject,
room_version: Option<&RoomVersionId>,
) -> Result {
let room_version = room_version.unwrap_or(&RoomVersionId::V11);
let room_version = room_version.unwrap_or(&RoomVersionId::V12);
let keys = self.get_event_keys(event, room_version).await?;
ruma::signatures::verify_json(&keys, event.clone()).map_err(Into::into)
}