mirror of
https://forgejo.ellis.link/continuwuation/continuwuity.git
synced 2025-07-01 16:55:45 +02:00
342 lines
10 KiB
Rust
342 lines
10 KiB
Rust
use std::{
|
|
collections::{hash_map, HashMap},
|
|
mem::size_of,
|
|
sync::{Arc, Mutex},
|
|
};
|
|
|
|
use conduit::{checked, error, utils, Error, PduCount, PduEvent, Result};
|
|
use database::{Database, Map};
|
|
use ruma::{api::client::error::ErrorKind, CanonicalJsonObject, EventId, OwnedRoomId, OwnedUserId, RoomId, UserId};
|
|
|
|
use crate::{rooms, Dep};
|
|
|
|
pub(super) struct Data {
|
|
eventid_outlierpdu: Arc<Map>,
|
|
eventid_pduid: Arc<Map>,
|
|
pduid_pdu: Arc<Map>,
|
|
userroomid_highlightcount: Arc<Map>,
|
|
userroomid_notificationcount: Arc<Map>,
|
|
pub(super) lasttimelinecount_cache: LastTimelineCountCache,
|
|
pub(super) db: Arc<Database>,
|
|
services: Services,
|
|
}
|
|
|
|
struct Services {
|
|
short: Dep<rooms::short::Service>,
|
|
}
|
|
|
|
type PdusIterItem = Result<(PduCount, PduEvent)>;
|
|
type PdusIterator<'a> = Box<dyn Iterator<Item = PdusIterItem> + 'a>;
|
|
type LastTimelineCountCache = Mutex<HashMap<OwnedRoomId, PduCount>>;
|
|
|
|
impl Data {
|
|
pub(super) fn new(args: &crate::Args<'_>) -> Self {
|
|
let db = &args.db;
|
|
Self {
|
|
eventid_outlierpdu: db["eventid_outlierpdu"].clone(),
|
|
eventid_pduid: db["eventid_pduid"].clone(),
|
|
pduid_pdu: db["pduid_pdu"].clone(),
|
|
userroomid_highlightcount: db["userroomid_highlightcount"].clone(),
|
|
userroomid_notificationcount: db["userroomid_notificationcount"].clone(),
|
|
lasttimelinecount_cache: Mutex::new(HashMap::new()),
|
|
db: args.db.clone(),
|
|
services: Services {
|
|
short: args.depend::<rooms::short::Service>("rooms::short"),
|
|
},
|
|
}
|
|
}
|
|
|
|
pub(super) fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<PduCount> {
|
|
match self
|
|
.lasttimelinecount_cache
|
|
.lock()
|
|
.expect("locked")
|
|
.entry(room_id.to_owned())
|
|
{
|
|
hash_map::Entry::Vacant(v) => {
|
|
if let Some(last_count) = self
|
|
.pdus_until(sender_user, room_id, PduCount::max())?
|
|
.find_map(|r| {
|
|
// Filter out buggy events
|
|
if r.is_err() {
|
|
error!("Bad pdu in pdus_since: {:?}", r);
|
|
}
|
|
r.ok()
|
|
}) {
|
|
Ok(*v.insert(last_count.0))
|
|
} else {
|
|
Ok(PduCount::Normal(0))
|
|
}
|
|
},
|
|
hash_map::Entry::Occupied(o) => Ok(*o.get()),
|
|
}
|
|
}
|
|
|
|
/// Returns the `count` of this pdu's id.
|
|
pub(super) fn get_pdu_count(&self, event_id: &EventId) -> Result<Option<PduCount>> {
|
|
self.eventid_pduid
|
|
.get(event_id.as_bytes())?
|
|
.map(|pdu_id| pdu_count(&pdu_id))
|
|
.transpose()
|
|
}
|
|
|
|
/// Returns the json of a pdu.
|
|
pub(super) fn get_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
|
self.get_non_outlier_pdu_json(event_id)?.map_or_else(
|
|
|| {
|
|
self.eventid_outlierpdu
|
|
.get(event_id.as_bytes())?
|
|
.map(|pdu| serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db.")))
|
|
.transpose()
|
|
},
|
|
|x| Ok(Some(x)),
|
|
)
|
|
}
|
|
|
|
/// Returns the json of a pdu.
|
|
pub(super) fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
|
self.eventid_pduid
|
|
.get(event_id.as_bytes())?
|
|
.map(|pduid| {
|
|
self.pduid_pdu
|
|
.get(&pduid)?
|
|
.ok_or_else(|| Error::bad_database("Invalid pduid in eventid_pduid."))
|
|
})
|
|
.transpose()?
|
|
.map(|pdu| serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db.")))
|
|
.transpose()
|
|
}
|
|
|
|
/// Returns the pdu's id.
|
|
#[inline]
|
|
pub(super) fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<database::Handle<'_>>> {
|
|
self.eventid_pduid.get(event_id.as_bytes())
|
|
}
|
|
|
|
/// Returns the pdu.
|
|
pub(super) fn get_non_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
|
self.eventid_pduid
|
|
.get(event_id.as_bytes())?
|
|
.map(|pduid| {
|
|
self.pduid_pdu
|
|
.get(&pduid)?
|
|
.ok_or_else(|| Error::bad_database("Invalid pduid in eventid_pduid."))
|
|
})
|
|
.transpose()?
|
|
.map(|pdu| serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db.")))
|
|
.transpose()
|
|
}
|
|
|
|
/// Returns the pdu.
|
|
///
|
|
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
|
pub(super) fn get_pdu(&self, event_id: &EventId) -> Result<Option<Arc<PduEvent>>> {
|
|
if let Some(pdu) = self
|
|
.get_non_outlier_pdu(event_id)?
|
|
.map_or_else(
|
|
|| {
|
|
self.eventid_outlierpdu
|
|
.get(event_id.as_bytes())?
|
|
.map(|pdu| serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db.")))
|
|
.transpose()
|
|
},
|
|
|x| Ok(Some(x)),
|
|
)?
|
|
.map(Arc::new)
|
|
{
|
|
Ok(Some(pdu))
|
|
} else {
|
|
Ok(None)
|
|
}
|
|
}
|
|
|
|
/// Returns the pdu.
|
|
///
|
|
/// This does __NOT__ check the outliers `Tree`.
|
|
pub(super) fn get_pdu_from_id(&self, pdu_id: &[u8]) -> Result<Option<PduEvent>> {
|
|
self.pduid_pdu.get(pdu_id)?.map_or(Ok(None), |pdu| {
|
|
Ok(Some(
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
|
))
|
|
})
|
|
}
|
|
|
|
/// Returns the pdu as a `BTreeMap<String, CanonicalJsonValue>`.
|
|
pub(super) fn get_pdu_json_from_id(&self, pdu_id: &[u8]) -> Result<Option<CanonicalJsonObject>> {
|
|
self.pduid_pdu.get(pdu_id)?.map_or(Ok(None), |pdu| {
|
|
Ok(Some(
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
|
))
|
|
})
|
|
}
|
|
|
|
pub(super) fn append_pdu(
|
|
&self, pdu_id: &[u8], pdu: &PduEvent, json: &CanonicalJsonObject, count: u64,
|
|
) -> Result<()> {
|
|
self.pduid_pdu.insert(
|
|
pdu_id,
|
|
&serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"),
|
|
)?;
|
|
|
|
self.lasttimelinecount_cache
|
|
.lock()
|
|
.expect("locked")
|
|
.insert(pdu.room_id.clone(), PduCount::Normal(count));
|
|
|
|
self.eventid_pduid.insert(pdu.event_id.as_bytes(), pdu_id)?;
|
|
self.eventid_outlierpdu.remove(pdu.event_id.as_bytes())?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub(super) fn prepend_backfill_pdu(
|
|
&self, pdu_id: &[u8], event_id: &EventId, json: &CanonicalJsonObject,
|
|
) -> Result<()> {
|
|
self.pduid_pdu.insert(
|
|
pdu_id,
|
|
&serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"),
|
|
)?;
|
|
|
|
self.eventid_pduid.insert(event_id.as_bytes(), pdu_id)?;
|
|
self.eventid_outlierpdu.remove(event_id.as_bytes())?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Removes a pdu and creates a new one with the same id.
|
|
pub(super) fn replace_pdu(&self, pdu_id: &[u8], pdu_json: &CanonicalJsonObject, _pdu: &PduEvent) -> Result<()> {
|
|
if self.pduid_pdu.get(pdu_id)?.is_some() {
|
|
self.pduid_pdu.insert(
|
|
pdu_id,
|
|
&serde_json::to_vec(pdu_json).expect("CanonicalJsonObject is always a valid"),
|
|
)?;
|
|
} else {
|
|
return Err(Error::BadRequest(ErrorKind::NotFound, "PDU does not exist."));
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Returns an iterator over all events and their tokens in a room that
|
|
/// happened before the event with id `until` in reverse-chronological
|
|
/// order.
|
|
pub(super) fn pdus_until(&self, user_id: &UserId, room_id: &RoomId, until: PduCount) -> Result<PdusIterator<'_>> {
|
|
let (prefix, current) = self.count_to_id(room_id, until, 1, true)?;
|
|
|
|
let user_id = user_id.to_owned();
|
|
|
|
Ok(Box::new(
|
|
self.pduid_pdu
|
|
.iter_from(¤t, true)
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
|
.map(move |(pdu_id, v)| {
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
if pdu.sender != user_id {
|
|
pdu.remove_transaction_id()?;
|
|
}
|
|
pdu.add_age()?;
|
|
let count = pdu_count(&pdu_id)?;
|
|
Ok((count, pdu))
|
|
}),
|
|
))
|
|
}
|
|
|
|
pub(super) fn pdus_after(&self, user_id: &UserId, room_id: &RoomId, from: PduCount) -> Result<PdusIterator<'_>> {
|
|
let (prefix, current) = self.count_to_id(room_id, from, 1, false)?;
|
|
|
|
let user_id = user_id.to_owned();
|
|
|
|
Ok(Box::new(
|
|
self.pduid_pdu
|
|
.iter_from(¤t, false)
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
|
.map(move |(pdu_id, v)| {
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
if pdu.sender != user_id {
|
|
pdu.remove_transaction_id()?;
|
|
}
|
|
pdu.add_age()?;
|
|
let count = pdu_count(&pdu_id)?;
|
|
Ok((count, pdu))
|
|
}),
|
|
))
|
|
}
|
|
|
|
pub(super) fn increment_notification_counts(
|
|
&self, room_id: &RoomId, notifies: Vec<OwnedUserId>, highlights: Vec<OwnedUserId>,
|
|
) -> Result<()> {
|
|
let mut notifies_batch = Vec::new();
|
|
let mut highlights_batch = Vec::new();
|
|
for user in notifies {
|
|
let mut userroom_id = user.as_bytes().to_vec();
|
|
userroom_id.push(0xFF);
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
|
notifies_batch.push(userroom_id);
|
|
}
|
|
for user in highlights {
|
|
let mut userroom_id = user.as_bytes().to_vec();
|
|
userroom_id.push(0xFF);
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
|
highlights_batch.push(userroom_id);
|
|
}
|
|
|
|
self.userroomid_notificationcount
|
|
.increment_batch(notifies_batch.iter().map(Vec::as_slice))?;
|
|
self.userroomid_highlightcount
|
|
.increment_batch(highlights_batch.iter().map(Vec::as_slice))?;
|
|
Ok(())
|
|
}
|
|
|
|
pub(super) fn count_to_id(
|
|
&self, room_id: &RoomId, count: PduCount, offset: u64, subtract: bool,
|
|
) -> Result<(Vec<u8>, Vec<u8>)> {
|
|
let prefix = self
|
|
.services
|
|
.short
|
|
.get_shortroomid(room_id)?
|
|
.ok_or_else(|| Error::bad_database("Looked for bad shortroomid in timeline"))?
|
|
.to_be_bytes()
|
|
.to_vec();
|
|
let mut pdu_id = prefix.clone();
|
|
// +1 so we don't send the base event
|
|
let count_raw = match count {
|
|
PduCount::Normal(x) => {
|
|
if subtract {
|
|
x.saturating_sub(offset)
|
|
} else {
|
|
x.saturating_add(offset)
|
|
}
|
|
},
|
|
PduCount::Backfilled(x) => {
|
|
pdu_id.extend_from_slice(&0_u64.to_be_bytes());
|
|
let num = u64::MAX.saturating_sub(x);
|
|
if subtract {
|
|
num.saturating_sub(offset)
|
|
} else {
|
|
num.saturating_add(offset)
|
|
}
|
|
},
|
|
};
|
|
pdu_id.extend_from_slice(&count_raw.to_be_bytes());
|
|
|
|
Ok((prefix, pdu_id))
|
|
}
|
|
}
|
|
|
|
/// Returns the `count` of this pdu's id.
|
|
pub(super) fn pdu_count(pdu_id: &[u8]) -> Result<PduCount> {
|
|
let stride = size_of::<u64>();
|
|
let pdu_id_len = pdu_id.len();
|
|
let last_u64 = utils::u64_from_bytes(&pdu_id[checked!(pdu_id_len - stride)?..])
|
|
.map_err(|_| Error::bad_database("PDU has invalid count bytes."))?;
|
|
let second_last_u64 =
|
|
utils::u64_from_bytes(&pdu_id[checked!(pdu_id_len - 2 * stride)?..checked!(pdu_id_len - stride)?]);
|
|
|
|
if matches!(second_last_u64, Ok(0)) {
|
|
Ok(PduCount::Backfilled(u64::MAX.saturating_sub(last_u64)))
|
|
} else {
|
|
Ok(PduCount::Normal(last_u64))
|
|
}
|
|
}
|