Skip to content
Snippets Groups Projects
sync.rs 48.1 KiB
Newer Older
use std::{
	cmp::Ordering,
	collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap, HashSet},
	time::Duration,
use axum::extract::State;
Jason Volk's avatar
Jason Volk committed
use conduit::{
	debug, error,
	utils::math::{ruma_from_u64, ruma_from_usize, usize_from_ruma, usize_from_u64_truncated},
	warn, Err, PduCount,
Jason Volk's avatar
Jason Volk committed
};
use ruma::{
	api::client::{
		error::ErrorKind,
		filter::{FilterDefinition, LazyLoadOptions},
		sync::sync_events::{
			self,
			v3::{
				Ephemeral, Filter, GlobalAccountData, InviteState, InvitedRoom, JoinedRoom, LeftRoom, Presence,
				RoomAccountData, RoomSummary, Rooms, State as RoomState, Timeline, ToDevice,
			v4::{SlidingOp, SlidingSyncRoomHero},
			DeviceLists, UnreadNotificationsCount,
		},
		uiaa::UiaaResponse,
	},
	events::{
		presence::PresenceEvent,
		room::member::{MembershipState, RoomMemberEventContent},
		AnyRawAccountDataEvent, StateEventType, TimelineEventType,
	},
	room::RoomType,
	serde::Raw,
	state_res::Event,
	uint, DeviceId, EventId, MilliSecondsSinceUnixEpoch, OwnedRoomId, OwnedUserId, RoomId, UInt, UserId,
Jason Volk's avatar
Jason Volk committed
use tracing::{Instrument as _, Span};
use crate::{
	service::{pdu::EventHash, Services},
	utils, Error, PduEvent, Result, Ruma, RumaResponse,
};
const SINGLE_CONNECTION_SYNC: &str = "single_connection_sync";
const DEFAULT_BUMP_TYPES: &[TimelineEventType] = &[
	TimelineEventType::Message,
	TimelineEventType::Encrypted,
	TimelineEventType::Sticker,
	TimelineEventType::CallInvite,
	TimelineEventType::PollStart,
	TimelineEventType::Beacon,
];
macro_rules! extract_variant {
	($e:expr, $variant:path) => {
		match $e {
			$variant(value) => Some(value),
			_ => None,
		}
	};
}

Timo's avatar
Timo committed
/// # `GET /_matrix/client/r0/sync`
///
/// Synchronize the client's state with the latest state on the server.
///
/// - This endpoint takes a `since` parameter which should be the `next_batch`
Jason Volk's avatar
Jason Volk committed
///   value from a previous request for incremental syncs.
///
/// Calling this endpoint without a `since` parameter returns:
/// - Some of the most recent events of each timeline
/// - Notification counts for each room
/// - Joined and invited member counts, heroes
/// - All state events
///
/// Calling this endpoint with a `since` parameter from a previous `next_batch`
/// returns: For joined rooms:
/// - Some of the most recent events of each timeline that happened after since
/// - If user joined the room after since: All state events (unless lazy loading
Jason Volk's avatar
Jason Volk committed
///   is activated) and all device list updates in that room
/// - If the user was already in the room: A list of all events that are in the
Jason Volk's avatar
Jason Volk committed
///   state now, but were not in the state at `since`
/// - If the state we send contains a member event: Joined and invited member
///   counts, heroes
/// - Device list updates that happened after `since`
/// - If there are events in the timeline we send or the user send updated his
///   read mark: Notification counts
/// - EDUs that are active now (read receipts, typing updates, presence)
Timo Kösters's avatar
Timo Kösters committed
/// - TODO: Allow multiple sync streams to support Pantalaimon
///
/// For invited rooms:
/// - If the user was invited after `since`: A subset of the state of the room
///   at the point of the invite
///
/// For left rooms:
/// - If the user left after `since`: `prev_batch` token, empty state (TODO:
///   subset of the state at the point of the leave)
pub(crate) async fn sync_events_route(
	State(services): State<crate::State>, body: Ruma<sync_events::v3::Request>,
Jonathan de Jong's avatar
Jonathan de Jong committed
) -> Result<sync_events::v3::Response, RumaResponse<UiaaResponse>> {
	let sender_user = body.sender_user.expect("user is authenticated");
	let sender_device = body.sender_device.expect("user is authenticated");
	let body = body.body;

	// Presence update
	if services.globals.allow_local_presence() {
		services
			.presence
			.ping_presence(&sender_user, &body.set_presence)?;
	}

	// Setup watchers, so if there's no response, we can wait for them
	let watcher = services.globals.watch(&sender_user, &sender_device);
	let next_batch = services.globals.current_count()?;
	let next_batchcount = PduCount::Normal(next_batch);
	let next_batch_string = next_batch.to_string();

	// Load filter
	let filter = match body.filter {
		None => FilterDefinition::default(),
		Some(Filter::FilterDefinition(filter)) => filter,
		Some(Filter::FilterId(filter_id)) => services
🥺's avatar
🥺 committed
			.users
			.get_filter(&sender_user, &filter_id)?
			.unwrap_or_default(),
	// some clients, at least element, seem to require knowledge of redundant
	// members for "inline" profiles on the timeline to work properly
	let (lazy_load_enabled, lazy_load_send_redundant) = match filter.room.state.lazy_load_options {
		LazyLoadOptions::Enabled {
			include_redundant_members,
		} => (true, include_redundant_members),
		LazyLoadOptions::Disabled => (false, cfg!(feature = "element_hacks")),
	};

	let full_state = body.full_state;

	let mut joined_rooms = BTreeMap::new();
🥺's avatar
🥺 committed
	let since = body
		.since
		.as_ref()
		.and_then(|string| string.parse().ok())
		.unwrap_or(0);
	let sincecount = PduCount::Normal(since);

	let mut presence_updates = HashMap::new();
	let mut left_encrypted_users = HashSet::new(); // Users that have left any encrypted rooms the sender was in
	let mut device_list_updates = HashSet::new();
	let mut device_list_left = HashSet::new();

	// Look for device list updates of this account
🥺's avatar
🥺 committed
	device_list_updates.extend(
		services
🥺's avatar
🥺 committed
			.users
			.keys_changed(sender_user.as_ref(), since, None)
			.filter_map(Result::ok),
	);

	if services.globals.allow_local_presence() {
Jason Volk's avatar
Jason Volk committed
		process_presence_updates(&services, &mut presence_updates, since, &sender_user).await?;
	let all_joined_rooms = services
🥺's avatar
🥺 committed
		.rooms
		.state_cache
		.rooms_joined(&sender_user)
		.collect::<Vec<_>>();

	// Coalesce database writes for the remainder of this scope.
	let _cork = services.db.cork_and_flush();
	for room_id in all_joined_rooms {
		let room_id = room_id?;
		if let Ok(joined_room) = load_joined_room(
Jason Volk's avatar
Jason Volk committed
			&services,
			&sender_user,
			&sender_device,
			&room_id,
			since,
			sincecount,
			next_batch,
			next_batchcount,
			lazy_load_enabled,
			lazy_load_send_redundant,
			full_state,
			&mut device_list_updates,
			&mut left_encrypted_users,
		)
		.await
		{
			if !joined_room.is_empty() {
				joined_rooms.insert(room_id.clone(), joined_room);
			}
		}
	}

	let mut left_rooms = BTreeMap::new();
	let all_left_rooms: Vec<_> = services
🥺's avatar
🥺 committed
		.rooms
		.state_cache
		.rooms_left(&sender_user)
		.collect();
	for result in all_left_rooms {
		handle_left_room(
Jason Volk's avatar
Jason Volk committed
			&services,
			since,
			&result?.0,
			&sender_user,
			&mut left_rooms,
			&next_batch_string,
			full_state,
			lazy_load_enabled,
		)
		.instrument(Span::current())
		.await?;
	}

	let mut invited_rooms = BTreeMap::new();
	let all_invited_rooms: Vec<_> = services
🥺's avatar
🥺 committed
		.rooms
		.state_cache
		.rooms_invited(&sender_user)
		.collect();
	for result in all_invited_rooms {
		let (room_id, invite_state_events) = result?;

Jason Volk's avatar
Jason Volk committed
		// Get and drop the lock to wait for remaining operations to finish
		let insert_lock = services.rooms.timeline.mutex_insert.lock(&room_id).await;
Jason Volk's avatar
Jason Volk committed
		drop(insert_lock);
		let invite_count = services
🥺's avatar
🥺 committed
			.rooms
			.state_cache
			.get_invite_count(&room_id, &sender_user)?;

		// Invited before last sync
		if Some(since) >= invite_count {
			continue;
		}

		invited_rooms.insert(
			room_id.clone(),
			InvitedRoom {
				invite_state: InviteState {
					events: invite_state_events,
				},
			},
		);
	}

	for user_id in left_encrypted_users {
		let dont_share_encrypted_room = services
			.rooms
			.user
			.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])?
			.filter_map(|other_room_id| {
				Some(
					services
						.rooms
						.state_accessor
						.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
						.ok()?
						.is_some(),
				)
			})
			.all(|encrypted| !encrypted);
		// If the user doesn't share an encrypted room with the target anymore, we need
		// to tell them
		if dont_share_encrypted_room {
			device_list_left.insert(user_id);
		}
	}

	// Remove all to-device events the device received *last time*
	services
🥺's avatar
🥺 committed
		.users
		.remove_to_device_events(&sender_user, &sender_device, since)?;

	let response = sync_events::v3::Response {
		next_batch: next_batch_string,
		rooms: Rooms {
			leave: left_rooms,
			join: joined_rooms,
			invite: invited_rooms,
			knock: BTreeMap::new(), // TODO
		},
		presence: Presence {
			events: presence_updates
				.into_values()
				.map(|v| Raw::new(&v).expect("PresenceEvent always serializes successfully"))
				.collect(),
		},
		account_data: GlobalAccountData {
			events: services
				.account_data
				.changes_since(None, &sender_user, since)?
				.into_iter()
				.filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Global))
				.collect(),
		},
		device_lists: DeviceLists {
			changed: device_list_updates.into_iter().collect(),
			left: device_list_left.into_iter().collect(),
		},
		device_one_time_keys_count: services
🥺's avatar
🥺 committed
			.users
			.count_one_time_keys(&sender_user, &sender_device)?,
		to_device: ToDevice {
			events: services
🥺's avatar
🥺 committed
				.users
				.get_to_device_events(&sender_user, &sender_device)?,
		},
		// Fallback keys are not yet supported
		device_unused_fallback_key_types: None,
	};

	// TODO: Retry the endpoint instead of returning
	if !full_state
		&& response.rooms.is_empty()
		&& response.presence.is_empty()
		&& response.account_data.is_empty()
		&& response.device_lists.is_empty()
		&& response.to_device.is_empty()
	{
		// Hang a few seconds so requests are not spammed
		// Stop hanging if new info arrives
		let default = Duration::from_secs(30);
		let duration = cmp::min(body.timeout.unwrap_or(default), default);
		_ = tokio::time::timeout(duration, watcher).await;
Benjamin Lee's avatar
Benjamin Lee committed

	Ok(response)
#[allow(clippy::too_many_arguments)]
#[tracing::instrument(skip_all, fields(user_id = %sender_user, room_id = %room_id), name = "left_room")]
async fn handle_left_room(
	services: &Services, since: u64, room_id: &RoomId, sender_user: &UserId,
	left_rooms: &mut BTreeMap<OwnedRoomId, LeftRoom>, next_batch_string: &str, full_state: bool,
	lazy_load_enabled: bool,
) -> Result<()> {
Jason Volk's avatar
Jason Volk committed
	// Get and drop the lock to wait for remaining operations to finish
	let insert_lock = services.rooms.timeline.mutex_insert.lock(room_id).await;
Jason Volk's avatar
Jason Volk committed
	drop(insert_lock);
	let left_count = services
		.rooms
		.state_cache
		.get_left_count(room_id, sender_user)?;

	// Left before last sync
	if Some(since) >= left_count {
		return Ok(());
	}

	if !services.rooms.metadata.exists(room_id)? {
		// This is just a rejected invite, not a room we know
		// Insert a leave event anyways
		let event = PduEvent {
			event_id: EventId::new(services.globals.server_name()).into(),
			sender: sender_user.to_owned(),
			origin: None,
			origin_server_ts: utils::millis_since_unix_epoch()
				.try_into()
				.expect("Timestamp is valid js_int value"),
			kind: TimelineEventType::RoomMember,
			content: serde_json::from_str(r#"{"membership":"leave"}"#).expect("this is valid JSON"),
			state_key: Some(sender_user.to_string()),
			unsigned: None,
			// The following keys are dropped on conversion
			room_id: room_id.to_owned(),
			prev_events: vec![],
			depth: uint!(1),
			auth_events: vec![],
			redacts: None,
			hashes: EventHash {
				sha256: String::new(),
			},
			signatures: None,
		};

		left_rooms.insert(
			room_id.to_owned(),
			LeftRoom {
				account_data: RoomAccountData {
					events: Vec::new(),
				},
				timeline: Timeline {
					limited: false,
					prev_batch: Some(next_batch_string.to_owned()),
					events: Vec::new(),
				},
				state: RoomState {
					events: vec![event.to_sync_state_event()],
				},
			},
		);
		return Ok(());
	}

	let mut left_state_events = Vec::new();

	let since_shortstatehash = services
		.rooms
		.user
		.get_token_shortstatehash(room_id, since)?;

	let since_state_ids = match since_shortstatehash {
		Some(s) => services.rooms.state_accessor.state_full_ids(s).await?,
		None => HashMap::new(),
	};

	let Some(left_event_id) =
		services
			.rooms
			.state_accessor
			.room_state_get_id(room_id, &StateEventType::RoomMember, sender_user.as_str())?
	else {
		error!("Left room but no left state event");
		return Ok(());
	};

	let Some(left_shortstatehash) = services
		.rooms
		.state_accessor
		.pdu_shortstatehash(&left_event_id)?
	else {
		error!(event_id = %left_event_id, "Leave event has no state");
		return Ok(());
	};

	let mut left_state_ids = services
		.rooms
		.state_accessor
		.state_full_ids(left_shortstatehash)
		.await?;

	let leave_shortstatekey = services
		.rooms
		.short
		.get_or_create_shortstatekey(&StateEventType::RoomMember, sender_user.as_str())?;

	left_state_ids.insert(leave_shortstatekey, left_event_id);

	for (key, id) in left_state_ids {
		if full_state || since_state_ids.get(&key) != Some(&id) {
			let (event_type, state_key) = services.rooms.short.get_statekey_from_short(key)?;

			if !lazy_load_enabled
                    || event_type != StateEventType::RoomMember
                    || full_state
                    // TODO: Delete the following line when this is resolved: https://github.com/vector-im/element-web/issues/22565
                    || (cfg!(feature = "element_hacks") && *sender_user == state_key)
			{
				let Some(pdu) = services.rooms.timeline.get_pdu(&id)? else {
					error!("Pdu in state not found: {}", id);
					continue;
				};

				left_state_events.push(pdu.to_sync_state_event());

🥺's avatar
🥺 committed
				i = i.wrapping_add(1);
				if i % 100 == 0 {
					tokio::task::yield_now().await;
				}
			}
		}
	}

	left_rooms.insert(
		room_id.to_owned(),
		LeftRoom {
			account_data: RoomAccountData {
				events: Vec::new(),
			},
			timeline: Timeline {
				limited: false,
				prev_batch: Some(next_batch_string.to_owned()),
				events: Vec::new(),
			},
			state: RoomState {
				events: left_state_events,
			},
		},
	);
	Ok(())
}

async fn process_presence_updates(
	services: &Services, presence_updates: &mut HashMap<OwnedUserId, PresenceEvent>, since: u64, syncing_user: &UserId,
	// Take presence updates
	for (user_id, _, presence_bytes) in services.presence.presence_since(since) {
		if !services
			.rooms
			.state_cache
			.user_sees_user(syncing_user, &user_id)?
		{
			continue;
		}

		let presence_event = services
			.presence
			.from_json_bytes_to_event(&presence_bytes, &user_id)?;
		match presence_updates.entry(user_id) {
			Entry::Vacant(slot) => {
				slot.insert(presence_event);
			},
			Entry::Occupied(mut slot) => {
				let curr_event = slot.get_mut();
				let curr_content = &mut curr_event.content;
				let new_content = presence_event.content;

				// Update existing presence event with more info
				curr_content.presence = new_content.presence;
🥺's avatar
🥺 committed
				curr_content.status_msg = new_content
					.status_msg
					.or_else(|| curr_content.status_msg.take());
				curr_content.last_active_ago = new_content.last_active_ago.or(curr_content.last_active_ago);
🥺's avatar
🥺 committed
				curr_content.displayname = new_content
					.displayname
					.or_else(|| curr_content.displayname.take());
				curr_content.avatar_url = new_content
					.avatar_url
					.or_else(|| curr_content.avatar_url.take());
				curr_content.currently_active = new_content
					.currently_active
					.or(curr_content.currently_active);
#[allow(clippy::too_many_arguments)]
async fn load_joined_room(
	services: &Services, sender_user: &UserId, sender_device: &DeviceId, room_id: &RoomId, since: u64,
	sincecount: PduCount, next_batch: u64, next_batchcount: PduCount, lazy_load_enabled: bool,
	lazy_load_send_redundant: bool, full_state: bool, device_list_updates: &mut HashSet<OwnedUserId>,
	left_encrypted_users: &mut HashSet<OwnedUserId>,
) -> Result<JoinedRoom> {
Jason Volk's avatar
Jason Volk committed
	// Get and drop the lock to wait for remaining operations to finish
	// This will make sure the we have all events until next_batch
	let insert_lock = services.rooms.timeline.mutex_insert.lock(room_id).await;
Jason Volk's avatar
Jason Volk committed
	drop(insert_lock);
	let (timeline_pdus, limited) = load_timeline(services, sender_user, room_id, sincecount, 10)?;
🥺's avatar
🥺 committed
	let send_notification_counts = !timeline_pdus.is_empty()
		|| services
🥺's avatar
🥺 committed
			.rooms
			.user
			.last_notification_read(sender_user, room_id)?
			> since;

	let mut timeline_users = HashSet::new();
	for (_, event) in &timeline_pdus {
		timeline_users.insert(event.sender.as_str().to_owned());
	}

	services
🥺's avatar
🥺 committed
		.rooms
		.lazy_loading
		.lazy_load_confirm_delivery(sender_user, sender_device, room_id, sincecount)
		.await?;

	// Database queries:

	let Some(current_shortstatehash) = services.rooms.state.get_room_shortstatehash(room_id)? else {
		return Err!(Database(error!("Room {room_id} has no state")));
	let since_shortstatehash = services
🥺's avatar
🥺 committed
		.rooms
		.user
		.get_token_shortstatehash(room_id, since)?;
🥺's avatar
🥺 committed
	let (heroes, joined_member_count, invited_member_count, joined_since_last_sync, state_events) =
		if timeline_pdus.is_empty() && since_shortstatehash == Some(current_shortstatehash) {
			// No state changes
			(Vec::new(), None, None, false, Vec::new())
		} else {
			// Calculates joined_member_count, invited_member_count and heroes
			let calculate_counts = || {
				let joined_member_count = services
🥺's avatar
🥺 committed
					.rooms
					.state_cache
					.room_joined_count(room_id)?
					.unwrap_or(0);
				let invited_member_count = services
🥺's avatar
🥺 committed
					.rooms
					.state_cache
					.room_invited_count(room_id)?
					.unwrap_or(0);
🥺's avatar
🥺 committed
				// Recalculate heroes (first 5 members)
				let mut heroes: Vec<OwnedUserId> = Vec::with_capacity(5);
🥺's avatar
🥺 committed
				if joined_member_count.saturating_add(invited_member_count) <= 5 {
🥺's avatar
🥺 committed
					// Go through all PDUs and for each member event, check if the user is still
					// joined or invited until we have 5 or we reach the end
					for hero in services
					.rooms
					.timeline
					.all_pdus(sender_user, room_id)?
					.filter_map(Result::ok) // Ignore all broken pdus
					.filter(|(_, pdu)| pdu.kind == TimelineEventType::RoomMember)
					.map(|(_, pdu)| {
						let content: RoomMemberEventContent = serde_json::from_str(pdu.content.get())
							.map_err(|_| Error::bad_database("Invalid member event in database."))?;

						if let Some(state_key) = &pdu.state_key {
							let user_id = UserId::parse(state_key.clone())
								.map_err(|_| Error::bad_database("Invalid UserId in member PDU."))?;

							// The membership was and still is invite or join
							if matches!(content.membership, MembershipState::Join | MembershipState::Invite)
								&& (services.rooms.state_cache.is_joined(&user_id, room_id)?
									|| services.rooms.state_cache.is_invited(&user_id, room_id)?)
								Ok::<_, Error>(Some(user_id))
							} else {
								Ok(None)
							}
						} else {
							Ok(None)
						}
					})
					// Filter for possible heroes
					.flatten()
🥺's avatar
🥺 committed
					{
						if heroes.contains(&hero) || hero == sender_user {
🥺's avatar
🥺 committed
							continue;
						}
🥺's avatar
🥺 committed
						heroes.push(hero);
					}
🥺's avatar
🥺 committed
				Ok::<_, Error>((Some(joined_member_count), Some(invited_member_count), heroes))
			};
🥺's avatar
🥺 committed
			let since_sender_member: Option<RoomMemberEventContent> = since_shortstatehash
				.and_then(|shortstatehash| {
					services
🥺's avatar
🥺 committed
						.rooms
						.state_accessor
						.state_get(shortstatehash, &StateEventType::RoomMember, sender_user.as_str())
						.transpose()
				})
				.transpose()?
				.and_then(|pdu| {
					serde_json::from_str(pdu.content.get())
						.map_err(|_| Error::bad_database("Invalid PDU in database."))
						.ok()
				});
🥺's avatar
🥺 committed
			let joined_since_last_sync =
				since_sender_member.map_or(true, |member| member.membership != MembershipState::Join);
🥺's avatar
🥺 committed
			if since_shortstatehash.is_none() || joined_since_last_sync {
				// Probably since = 0, we will do an initial sync
🥺's avatar
🥺 committed
				let (joined_member_count, invited_member_count, heroes) = calculate_counts()?;
				let current_state_ids = services
🥺's avatar
🥺 committed
					.rooms
					.state_accessor
					.state_full_ids(current_shortstatehash)
					.await?;
🥺's avatar
🥺 committed
				let mut state_events = Vec::new();
				let mut lazy_loaded = HashSet::new();
🥺's avatar
🥺 committed
				for (shortstatekey, id) in current_state_ids {
					let (event_type, state_key) = services
🥺's avatar
🥺 committed
						.rooms
						.short
						.get_statekey_from_short(shortstatekey)?;
🥺's avatar
🥺 committed
					if event_type != StateEventType::RoomMember {
						let Some(pdu) = services.rooms.timeline.get_pdu(&id)? else {
							error!("Pdu in state not found: {}", id);
							continue;
🥺's avatar
🥺 committed
						};
						state_events.push(pdu);
🥺's avatar
🥺 committed
						i = i.wrapping_add(1);
🥺's avatar
🥺 committed
						if i % 100 == 0 {
							tokio::task::yield_now().await;
						}
					} else if !lazy_load_enabled
                || full_state
                || timeline_users.contains(&state_key)
                // TODO: Delete the following line when this is resolved: https://github.com/vector-im/element-web/issues/22565
                || (cfg!(feature = "element_hacks") && *sender_user == state_key)
🥺's avatar
🥺 committed
					{
						let Some(pdu) = services.rooms.timeline.get_pdu(&id)? else {
							error!("Pdu in state not found: {}", id);
							continue;
🥺's avatar
🥺 committed
						};
🥺's avatar
🥺 committed
						// This check is in case a bad user ID made it into the database
						if let Ok(uid) = UserId::parse(&state_key) {
							lazy_loaded.insert(uid);
						}
						state_events.push(pdu);
🥺's avatar
🥺 committed
						i = i.wrapping_add(1);
🥺's avatar
🥺 committed
						if i % 100 == 0 {
							tokio::task::yield_now().await;
						}
🥺's avatar
🥺 committed
				// Reset lazy loading because this is an initial sync
				services
🥺's avatar
🥺 committed
					.rooms
					.lazy_loading
					.lazy_load_reset(sender_user, sender_device, room_id)?;
🥺's avatar
🥺 committed
				// The state_events above should contain all timeline_users, let's mark them as
				// lazy loaded.
				services
🥺's avatar
🥺 committed
					.rooms
					.lazy_loading
					.lazy_load_mark_sent(sender_user, sender_device, room_id, lazy_loaded, next_batchcount)
					.await;
🥺's avatar
🥺 committed
				(heroes, joined_member_count, invited_member_count, true, state_events)
			} else {
				// Incremental /sync
				let since_shortstatehash = since_shortstatehash.unwrap();
				let mut delta_state_events = Vec::new();
🥺's avatar
🥺 committed
				if since_shortstatehash != current_shortstatehash {
					let current_state_ids = services
🥺's avatar
🥺 committed
						.rooms
						.state_accessor
						.state_full_ids(current_shortstatehash)
						.await?;
					let since_state_ids = services
🥺's avatar
🥺 committed
						.rooms
						.state_accessor
						.state_full_ids(since_shortstatehash)
						.await?;
🥺's avatar
🥺 committed
					for (key, id) in current_state_ids {
						if full_state || since_state_ids.get(&key) != Some(&id) {
							let Some(pdu) = services.rooms.timeline.get_pdu(&id)? else {
								error!("Pdu in state not found: {}", id);
								continue;
🥺's avatar
🥺 committed
							};

							delta_state_events.push(pdu);
🥺's avatar
🥺 committed
							tokio::task::yield_now().await;
						}
				let encrypted_room = services
🥺's avatar
🥺 committed
					.rooms
					.state_accessor
					.state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")?
					.is_some();
				let since_encryption = services.rooms.state_accessor.state_get(
🥺's avatar
🥺 committed
					since_shortstatehash,
					&StateEventType::RoomEncryption,
					"",
				)?;
🥺's avatar
🥺 committed
				// Calculations:
				let new_encrypted_room = encrypted_room && since_encryption.is_none();
				let send_member_count = delta_state_events
🥺's avatar
🥺 committed
					.iter()
					.any(|event| event.kind == TimelineEventType::RoomMember);
🥺's avatar
🥺 committed
				if encrypted_room {
					for state_event in &delta_state_events {
🥺's avatar
🥺 committed
						if state_event.kind != TimelineEventType::RoomMember {
🥺's avatar
🥺 committed
						if let Some(state_key) = &state_event.state_key {
							let user_id = UserId::parse(state_key.clone())
								.map_err(|_| Error::bad_database("Invalid UserId in member PDU."))?;
🥺's avatar
🥺 committed
							if user_id == sender_user {
								continue;
							}

							let new_membership =
								serde_json::from_str::<RoomMemberEventContent>(state_event.content.get())
									.map_err(|_| Error::bad_database("Invalid PDU in database."))?
									.membership;

							match new_membership {
								MembershipState::Join => {
									// A new user joined an encrypted room
									if !share_encrypted_room(services, sender_user, &user_id, room_id)? {
🥺's avatar
🥺 committed
										device_list_updates.insert(user_id);
									}
								},
								MembershipState::Leave => {
									// Write down users that have left encrypted rooms we are in
									left_encrypted_users.insert(user_id);
								},
								_ => {},
							}
🥺's avatar
🥺 committed
				if joined_since_last_sync && encrypted_room || new_encrypted_room {
					// If the user is in a new encrypted room, give them all joined users
					device_list_updates.extend(
						services
🥺's avatar
🥺 committed
							.rooms
							.state_cache
							.room_members(room_id)
							.flatten()
							.filter(|user_id| {
								// Don't send key updates from the sender to the sender
								sender_user != user_id
							})
							.filter(|user_id| {
								// Only send keys if the sender doesn't share an encrypted room with the target
								// already
								!share_encrypted_room(services, sender_user, user_id, room_id).unwrap_or(false)
🥺's avatar
🥺 committed
							}),
					);
				}
🥺's avatar
🥺 committed
				let (joined_member_count, invited_member_count, heroes) = if send_member_count {
					calculate_counts()?
				} else {
					(None, None, Vec::new())
				};

				let mut state_events = delta_state_events;
				let mut lazy_loaded = HashSet::new();

				// Mark all member events we're returning as lazy-loaded
				for pdu in &state_events {
					if pdu.kind == TimelineEventType::RoomMember {
						match UserId::parse(
							pdu.state_key
								.as_ref()
								.expect("State event has state key")
								.clone(),
						) {
							Ok(state_key_userid) => {
								lazy_loaded.insert(state_key_userid);
							},
							Err(e) => error!("Invalid state key for member event: {}", e),
						}
					}
				}

				// Fetch contextual member state events for events from the timeline, and
				// mark them as lazy-loaded as well.
				for (_, event) in &timeline_pdus {
					if lazy_loaded.contains(&event.sender) {
						continue;
					}

					if !services.rooms.lazy_loading.lazy_load_was_sent_before(
						sender_user,
						sender_device,
						room_id,
						&event.sender,
					)? || lazy_load_send_redundant
					{
						if let Some(member_event) = services.rooms.state_accessor.room_state_get(
							room_id,
							&StateEventType::RoomMember,
							event.sender.as_str(),
						)? {
							lazy_loaded.insert(event.sender.clone());
							state_events.push(member_event);
						}
					}
				}

				services
					.rooms
					.lazy_loading
					.lazy_load_mark_sent(sender_user, sender_device, room_id, lazy_loaded, next_batchcount)
					.await;

🥺's avatar
🥺 committed
				(
					heroes,
					joined_member_count,
					invited_member_count,
					joined_since_last_sync,
					state_events,
				)
			}
		};

	// Look for device list updates in this room
🥺's avatar
🥺 committed
	device_list_updates.extend(
		services
🥺's avatar
🥺 committed
			.users
			.keys_changed(room_id.as_ref(), since, None)
			.filter_map(Result::ok),
	);

	let notification_count = if send_notification_counts {
		Some(
			services
				.rooms
				.user
				.notification_count(sender_user, room_id)?
				.try_into()
				.expect("notification count can't go that high"),
		)
	} else {
		None
	};

	let highlight_count = if send_notification_counts {
		Some(
			services
				.rooms
				.user
				.highlight_count(sender_user, room_id)?
				.try_into()
				.expect("highlight count can't go that high"),
		)
	} else {
		None
	};

🥺's avatar
🥺 committed
	let prev_batch = timeline_pdus
		.first()
		.map_or(Ok::<_, Error>(None), |(pdu_count, _)| {
			Ok(Some(match pdu_count {
				PduCount::Backfilled(_) => {
					error!("timeline in backfill state?!");
					"0".to_owned()
				},
				PduCount::Normal(c) => c.to_string(),
			}))
		})?;
🥺's avatar
🥺 committed
	let room_events: Vec<_> = timeline_pdus
		.iter()
		.map(|(_, pdu)| pdu.to_sync_room_event())
		.collect();
	let mut edus: Vec<_> = services
		.rooms
		.read_receipt
		.readreceipts_since(room_id, since)
		.filter_map(Result::ok) // Filter out buggy events
		.map(|(_, _, v)| v)
		.collect();

	if services.rooms.typing.last_typing_update(room_id).await? > since {
		edus.push(
			serde_json::from_str(
				&serde_json::to_string(&services.rooms.typing.typings_all(room_id).await?)
					.expect("event is valid, we just created it"),
			)
			.expect("event is valid, we just created it"),
		);
	}

	// Save the state after this sync so we can send the correct state diff next
	// sync
	services
🥺's avatar
🥺 committed
		.rooms
		.user
		.associate_token_shortstatehash(room_id, next_batch, current_shortstatehash)?;

	Ok(JoinedRoom {
		account_data: RoomAccountData {
			events: services
				.account_data
				.changes_since(Some(room_id), sender_user, since)?
				.into_iter()
				.filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Room))
				.collect(),
		},
		summary: RoomSummary {
			heroes,
Jason Volk's avatar
Jason Volk committed
			joined_member_count: joined_member_count.map(ruma_from_u64),
			invited_member_count: invited_member_count.map(ruma_from_u64),
		},
		unread_notifications: UnreadNotificationsCount {
			highlight_count,
			notification_count,
		},
		timeline: Timeline {
			limited: limited || joined_since_last_sync,
			prev_batch,
			events: room_events,
		},
		state: RoomState {
🥺's avatar
🥺 committed
			events: state_events
				.iter()
				.map(|pdu| pdu.to_sync_state_event())
				.collect(),
		},
		ephemeral: Ephemeral {
			events: edus,
		},
		unread_thread_notifications: BTreeMap::new(),
	})
fn load_timeline(
	services: &Services, sender_user: &UserId, room_id: &RoomId, roomsincecount: PduCount, limit: u64,
) -> Result<(Vec<(PduCount, PduEvent)>, bool), Error> {
	let timeline_pdus;
	let limited = if services
🥺's avatar
🥺 committed
		.rooms
		.timeline
		.last_timeline_count(sender_user, room_id)?
		> roomsincecount
	{
		let mut non_timeline_pdus = services
			.rooms
			.timeline
			.pdus_until(sender_user, room_id, PduCount::max())?
			.filter_map(|r| {
				// Filter out buggy events
				if r.is_err() {
					error!("Bad pdu in pdus_since: {:?}", r);
				}
				r.ok()
			})
			.take_while(|(pducount, _)| pducount > &roomsincecount);

		// Take the last events for the timeline
🥺's avatar
🥺 committed
		timeline_pdus = non_timeline_pdus
			.by_ref()
Jason Volk's avatar
Jason Volk committed
			.take(usize_from_u64_truncated(limit))
🥺's avatar
🥺 committed
			.collect::<Vec<_>>()
			.into_iter()
			.rev()
			.collect::<Vec<_>>();

		// They /sync response doesn't always return all messages, so we say the output
		// is limited unless there are events in non_timeline_pdus
Jason Volk's avatar
Jason Volk committed
		non_timeline_pdus.next().is_some()
	} else {
		timeline_pdus = Vec::new();
Jason Volk's avatar
Jason Volk committed
		false
	};
	Ok((timeline_pdus, limited))
fn share_encrypted_room(
	services: &Services, sender_user: &UserId, user_id: &UserId, ignore_room: &RoomId,
) -> Result<bool> {
	Ok(services
		.rooms
		.user
		.get_shared_rooms(vec![sender_user.to_owned(), user_id.to_owned()])?
		.filter(|room_id| room_id != ignore_room)
		.filter_map(|other_room_id| {
			Some(
				services
					.rooms
					.state_accessor
					.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
					.ok()?
					.is_some(),
			)
		})
		.any(|encrypted| encrypted))
/// POST `/_matrix/client/unstable/org.matrix.msc3575/sync`
///
/// Sliding Sync endpoint (future endpoint: `/_matrix/client/v4/sync`)
pub(crate) async fn sync_events_v4_route(
	State(services): State<crate::State>, body: Ruma<sync_events::v4::Request>,
) -> Result<sync_events::v4::Response> {
	let sender_user = body.sender_user.expect("user is authenticated");
	let sender_device = body.sender_device.expect("user is authenticated");
	let mut body = body.body;
	// Setup watchers, so if there's no response, we can wait for them
	let watcher = services.globals.watch(&sender_user, &sender_device);
	let next_batch = services.globals.next_count()?;
	let conn_id = body
		.conn_id
		.clone()
		.unwrap_or_else(|| SINGLE_CONNECTION_SYNC.to_owned());

🥺's avatar
🥺 committed
	let globalsince = body
		.pos
		.as_ref()
		.and_then(|string| string.parse().ok())
		.unwrap_or(0);
	if globalsince != 0
		&& !services
			.users
			.remembered(sender_user.clone(), sender_device.clone(), conn_id.clone())
	{
		debug!("Restarting sync stream because it was gone from the database");
		return Err(Error::Request(
			ErrorKind::UnknownPos,
			"Connection data lost since last time".into(),
			http::StatusCode::BAD_REQUEST,
		));
	}

	if globalsince == 0 {
		services
			.users
			.forget_sync_request_connection(sender_user.clone(), sender_device.clone(), conn_id.clone());
	}

	// Get sticky parameters from cache
	let known_rooms =
		services
🥺's avatar
🥺 committed
			.users
			.update_sync_request_with_cache(sender_user.clone(), sender_device.clone(), &mut body);
	let all_joined_rooms = services
🥺's avatar
🥺 committed
		.rooms
		.state_cache
		.rooms_joined(&sender_user)
		.filter_map(Result::ok)
		.collect::<Vec<_>>();
	let all_invited_rooms = services
		.rooms
		.state_cache
		.rooms_invited(&sender_user)
		.filter_map(Result::ok)
		.map(|r| r.0)
		.collect::<Vec<_>>();

	let all_rooms = all_joined_rooms
		.iter()
		.cloned()
		.chain(all_invited_rooms.clone())
	if body.extensions.to_device.enabled.unwrap_or(false) {
		services
🥺's avatar
🥺 committed
			.users
			.remove_to_device_events(&sender_user, &sender_device, globalsince)?;
	}

	let mut left_encrypted_users = HashSet::new(); // Users that have left any encrypted rooms the sender was in
	let mut device_list_changes = HashSet::new();
	let mut device_list_left = HashSet::new();

	let mut account_data = sync_events::v4::AccountData {
		global: Vec::new(),
		rooms: BTreeMap::new(),
	};
	if body.extensions.account_data.enabled.unwrap_or(false) {
		account_data.global = services
			.account_data
			.changes_since(None, &sender_user, globalsince)?
			.into_iter()
			.filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Global))
			.collect();

		if let Some(rooms) = body.extensions.account_data.rooms {
			for room in rooms {
				account_data.rooms.insert(
					room.clone(),
					services
						.account_data
						.changes_since(Some(&room), &sender_user, globalsince)?
						.into_iter()
						.filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Room))
						.collect(),
				);
			}
		}
	}

	if body.extensions.e2ee.enabled.unwrap_or(false) {
		// Look for device list updates of this account
🥺's avatar
🥺 committed
		device_list_changes.extend(
			services
🥺's avatar
🥺 committed
				.users
				.keys_changed(sender_user.as_ref(), globalsince, None)
				.filter_map(Result::ok),
		);

		for room_id in &all_joined_rooms {
			let Some(current_shortstatehash) = services.rooms.state.get_room_shortstatehash(room_id)? else {
				error!("Room {} has no state", room_id);
				continue;
			};

			let since_shortstatehash = services
🥺's avatar
🥺 committed
				.rooms
				.user
				.get_token_shortstatehash(room_id, globalsince)?;

			let since_sender_member: Option<RoomMemberEventContent> = since_shortstatehash
				.and_then(|shortstatehash| {
					services
						.rooms
						.state_accessor
						.state_get(shortstatehash, &StateEventType::RoomMember, sender_user.as_str())
						.transpose()
				})
				.transpose()?
				.and_then(|pdu| {
					serde_json::from_str(pdu.content.get())
						.map_err(|_| Error::bad_database("Invalid PDU in database."))
						.ok()
				});

			let encrypted_room = services
				.rooms
				.state_accessor
				.state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")?
				.is_some();

			if let Some(since_shortstatehash) = since_shortstatehash {
				// Skip if there are only timeline changes
				if since_shortstatehash == current_shortstatehash {
					continue;
				}

				let since_encryption = services.rooms.state_accessor.state_get(
					since_shortstatehash,
					&StateEventType::RoomEncryption,
					"",
				)?;

				let joined_since_last_sync =
					since_sender_member.map_or(true, |member| member.membership != MembershipState::Join);

				let new_encrypted_room = encrypted_room && since_encryption.is_none();
				if encrypted_room {
					let current_state_ids = services
🥺's avatar
🥺 committed
						.rooms
						.state_accessor
						.state_full_ids(current_shortstatehash)
						.await?;
					let since_state_ids = services
🥺's avatar
🥺 committed
						.rooms
						.state_accessor
						.state_full_ids(since_shortstatehash)
						.await?;

					for (key, id) in current_state_ids {
						if since_state_ids.get(&key) != Some(&id) {
							let Some(pdu) = services.rooms.timeline.get_pdu(&id)? else {
								error!("Pdu in state not found: {}", id);
								continue;
							};
							if pdu.kind == TimelineEventType::RoomMember {
								if let Some(state_key) = &pdu.state_key {
									let user_id = UserId::parse(state_key.clone())
										.map_err(|_| Error::bad_database("Invalid UserId in member PDU."))?;

									if user_id == sender_user {
										continue;
									}

									let new_membership =
										serde_json::from_str::<RoomMemberEventContent>(pdu.content.get())
											.map_err(|_| Error::bad_database("Invalid PDU in database."))?
											.membership;

									match new_membership {
										MembershipState::Join => {
											// A new user joined an encrypted room
Jason Volk's avatar
Jason Volk committed
											if !share_encrypted_room(&services, &sender_user, &user_id, room_id)? {
												device_list_changes.insert(user_id);
											}
										},
										MembershipState::Leave => {
											// Write down users that have left encrypted rooms we are in
											left_encrypted_users.insert(user_id);
										},
										_ => {},
									}
								}
							}
						}
					}
					if joined_since_last_sync || new_encrypted_room {
						// If the user is in a new encrypted room, give them all joined users
						device_list_changes.extend(
							services
								.rooms
								.state_cache
								.room_members(room_id)
								.flatten()
								.filter(|user_id| {
									// Don't send key updates from the sender to the sender
									&sender_user != user_id
								})
								.filter(|user_id| {
									// Only send keys if the sender doesn't share an encrypted room with the target
									// already
Jason Volk's avatar
Jason Volk committed
									!share_encrypted_room(&services, &sender_user, user_id, room_id).unwrap_or(false)
								}),
						);
					}
				}
			}
			// Look for device list updates in this room
🥺's avatar
🥺 committed
			device_list_changes.extend(
				services
🥺's avatar
🥺 committed
					.users
					.keys_changed(room_id.as_ref(), globalsince, None)
					.filter_map(Result::ok),
			);
		}
		for user_id in left_encrypted_users {
			let dont_share_encrypted_room = services
				.rooms
				.user
				.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])?
				.filter_map(|other_room_id| {
					Some(
						services
							.rooms
							.state_accessor
							.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
							.ok()?
							.is_some(),
					)
				})
				.all(|encrypted| !encrypted);
			// If the user doesn't share an encrypted room with the target anymore, we need
			// to tell them
			if dont_share_encrypted_room {
				device_list_left.insert(user_id);
			}
		}
	}

	let mut lists = BTreeMap::new();
	let mut todo_rooms = BTreeMap::new(); // and required state

	for (list_id, list) in body.lists {
		let active_rooms = match list.filters.clone().and_then(|f| f.is_invite) {
			Some(true) => &all_invited_rooms,
			Some(false) => &all_joined_rooms,
			None => &all_rooms,
		};

		let active_rooms = match list.filters.clone().map(|f| f.not_room_types) {
			Some(filter) if filter.is_empty() => active_rooms.clone(),
			Some(value) => filter_rooms(active_rooms, State(services), &value, true),
			None => active_rooms.clone(),
		};

		let active_rooms = match list.filters.clone().map(|f| f.room_types) {
			Some(filter) if filter.is_empty() => active_rooms.clone(),
			Some(value) => filter_rooms(&active_rooms, State(services), &value, false),
			None => active_rooms,
		};

		let mut new_known_rooms = BTreeSet::new();

		lists.insert(
			list_id.clone(),
			sync_events::v4::SyncList {
				ops: list
					.ranges
					.into_iter()
					.map(|mut r| {
							UInt::try_from(active_rooms.len().saturating_sub(1)).unwrap_or(UInt::MAX),
						r.1 =
							r.1.clamp(r.0, UInt::try_from(active_rooms.len().saturating_sub(1)).unwrap_or(UInt::MAX));
						let room_ids = if !active_rooms.is_empty() {
							active_rooms[usize_from_ruma(r.0)..=usize_from_ruma(r.1)].to_vec()
						new_known_rooms.extend(room_ids.iter().cloned());
						for room_id in &room_ids {
🥺's avatar
🥺 committed
							let todo_room = todo_rooms
								.entry(room_id.clone())
								.or_insert((BTreeSet::new(), 0, u64::MAX));
							let limit = list
								.room_details
								.timeline_limit
								.map_or(10, u64::from)
								.min(100);
							todo_room
								.0
								.extend(list.room_details.required_state.iter().cloned());
							todo_room.1 = todo_room.1.max(limit);
							// 0 means unknown because it got out of date
🥺's avatar
🥺 committed
							todo_room.2 = todo_room.2.min(
								known_rooms
									.get(&list_id)
									.and_then(|k| k.get(room_id))
									.copied()
									.unwrap_or(0),
							);
						}
						sync_events::v4::SyncOp {
							op: SlidingOp::Sync,
							range: Some(r),
							index: None,
							room_ids,
							room_id: None,
						}
					})
					.collect(),
				count: ruma_from_usize(active_rooms.len()),
			},
		);

		if let Some(conn_id) = &body.conn_id {
			services.users.update_sync_known_rooms(
				sender_user.clone(),
				sender_device.clone(),
				conn_id.clone(),
				list_id,
				new_known_rooms,
				globalsince,
			);
		}
	}

	let mut known_subscription_rooms = BTreeSet::new();
	for (room_id, room) in &body.room_subscriptions {
		if !services.rooms.metadata.exists(room_id)? {
🥺's avatar
🥺 committed
		let todo_room = todo_rooms
			.entry(room_id.clone())
			.or_insert((BTreeSet::new(), 0, u64::MAX));
		let limit = room.timeline_limit.map_or(10, u64::from).min(100);
		todo_room.0.extend(room.required_state.iter().cloned());
		todo_room.1 = todo_room.1.max(limit);
		// 0 means unknown because it got out of date
🥺's avatar
🥺 committed
		todo_room.2 = todo_room.2.min(
			known_rooms
				.get("subscriptions")
				.and_then(|k| k.get(room_id))
				.copied()
				.unwrap_or(0),
		);
		known_subscription_rooms.insert(room_id.clone());
	}

	for r in body.unsubscribe_rooms {
		known_subscription_rooms.remove(&r);
		body.room_subscriptions.remove(&r);
	}

	if let Some(conn_id) = &body.conn_id {
		services.users.update_sync_known_rooms(
			sender_user.clone(),
			sender_device.clone(),
			conn_id.clone(),
			"subscriptions".to_owned(),
			known_subscription_rooms,
			globalsince,
		);
	}

	if let Some(conn_id) = &body.conn_id {
		services.users.update_sync_subscriptions(
			sender_user.clone(),
			sender_device.clone(),
			conn_id.clone(),
			body.room_subscriptions,
		);
	}

	let mut rooms = BTreeMap::new();
	for (room_id, (required_state_request, timeline_limit, roomsince)) in &todo_rooms {
		let roomsincecount = PduCount::Normal(*roomsince);

Jason Volk's avatar
Jason Volk committed
		let (timeline_pdus, limited) =
			load_timeline(&services, &sender_user, room_id, roomsincecount, *timeline_limit)?;
		account_data.rooms.insert(
			room_id.clone(),
			services
				.account_data
				.changes_since(Some(room_id), &sender_user, *roomsince)?
				.into_iter()
				.filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Room))
				.collect(),
		);

		if roomsince != &0 && timeline_pdus.is_empty() && account_data.rooms.get(room_id).is_some_and(Vec::is_empty) {
			continue;
		}

		let prev_batch = timeline_pdus
			.first()
			.map_or(Ok::<_, Error>(None), |(pdu_count, _)| {
				Ok(Some(match pdu_count {
					PduCount::Backfilled(_) => {
						error!("timeline in backfill state?!");
						"0".to_owned()
					},
					PduCount::Normal(c) => c.to_string(),
				}))
			})?
			.or_else(|| {
				if roomsince != &0 {
					Some(roomsince.to_string())
				} else {
					None
				}
			});

🥺's avatar
🥺 committed
		let room_events: Vec<_> = timeline_pdus
			.iter()
			.map(|(_, pdu)| pdu.to_sync_room_event())
			.collect();
		let invite_state = if all_invited_rooms.contains(room_id) {
			services
				.rooms
				.state_cache
				.invite_state(&sender_user, room_id)
				.unwrap_or(None)
		} else {
			None
		};

		let mut timestamp: Option<_> = None;
		for (_, pdu) in timeline_pdus {
			timestamp = Some(MilliSecondsSinceUnixEpoch(pdu.origin_server_ts));
			if DEFAULT_BUMP_TYPES.contains(pdu.event_type()) {
				break;
			}
		}

		let required_state = required_state_request
			.iter()
🥺's avatar
🥺 committed
			.map(|state| {
				services
🥺's avatar
🥺 committed
					.rooms
					.state_accessor
					.room_state_get(room_id, &state.0, &state.1)
			})
			.flatten()
			.map(|state| state.to_sync_state_event())
			.collect();

		// Heroes
		let heroes = services
			.rooms
			.state_cache
			.room_members(room_id)
			.filter(|member| member != &sender_user)
			.map(|member| {
				Ok::<_, Error>(
					services
🥺's avatar
🥺 committed
						.rooms
						.state_accessor
						.get_member(room_id, &member)?
						.map(|memberevent| SlidingSyncRoomHero {
							user_id: member,
							name: memberevent.displayname,
							avatar: memberevent.avatar_url,
🥺's avatar
🥺 committed
						}),
			.flatten()
			.take(5)
			.collect::<Vec<_>>();
		let name = match heroes.len().cmp(&(1_usize)) {
			Ordering::Greater => {
				let firsts = heroes[1..]
					.iter()
					.map(|h| h.name.clone().unwrap_or_else(|| h.user_id.to_string()))
					.collect::<Vec<_>>()
					.join(", ");
				let last = heroes[0]
					.name
					.clone()
					.unwrap_or_else(|| heroes[0].user_id.to_string());
				Some(format!("{firsts} and {last}"))
			Ordering::Equal => Some(
				heroes[0]
					.name
					.clone()
					.unwrap_or_else(|| heroes[0].user_id.to_string()),
			),
			Ordering::Less => None,
		};

		let heroes_avatar = if heroes.len() == 1 {
			heroes[0].avatar.clone()
		} else {
			None
		};

		rooms.insert(
			room_id.clone(),
			sync_events::v4::SlidingSyncRoom {
				name: services.rooms.state_accessor.get_name(room_id)?.or(name),
				avatar: if let Some(heroes_avatar) = heroes_avatar {
					ruma::JsOption::Some(heroes_avatar)
				} else {
					match services.rooms.state_accessor.get_avatar(room_id)? {
🥺's avatar
🥺 committed
						ruma::JsOption::Some(avatar) => ruma::JsOption::from_option(avatar.url),
						ruma::JsOption::Null => ruma::JsOption::Null,
						ruma::JsOption::Undefined => ruma::JsOption::Undefined,
					}
				},
				initial: Some(roomsince == &0),
				is_dm: None,
				unread_notifications: UnreadNotificationsCount {
					highlight_count: Some(
						services
							.rooms
							.user
							.highlight_count(&sender_user, room_id)?
							.try_into()
							.expect("notification count can't go that high"),
					),
					notification_count: Some(
						services
							.rooms
							.user
							.notification_count(&sender_user, room_id)?
							.try_into()
							.expect("notification count can't go that high"),
					),
				},
				timeline: room_events,
				required_state,
				prev_batch,
				limited,
				joined_count: Some(
					services
🥺's avatar
🥺 committed
						.rooms
						.state_cache
						.room_joined_count(room_id)?
Jason Volk's avatar
Jason Volk committed
						.unwrap_or(0)
						.try_into()
						.unwrap_or_else(|_| uint!(0)),
				),
				invited_count: Some(
					services
🥺's avatar
🥺 committed
						.rooms
						.state_cache
						.room_invited_count(room_id)?
Jason Volk's avatar
Jason Volk committed
						.unwrap_or(0)
						.try_into()
						.unwrap_or_else(|_| uint!(0)),
				),
				num_live: None, // Count events in timeline greater than global sync counter
				heroes: Some(heroes),
🥺's avatar
🥺 committed
	if rooms
		.iter()
		.all(|(_, r)| r.timeline.is_empty() && r.required_state.is_empty())
	{
		// Hang a few seconds so requests are not spammed
		// Stop hanging if new info arrives
		let default = Duration::from_secs(30);
		let duration = cmp::min(body.timeout.unwrap_or(default), default);
		_ = tokio::time::timeout(duration, watcher).await;
	}

	Ok(sync_events::v4::Response {
		initial: globalsince == 0,
		txn_id: body.txn_id.clone(),
		pos: next_batch.to_string(),
		lists,
		rooms,
		extensions: sync_events::v4::Extensions {
			to_device: if body.extensions.to_device.enabled.unwrap_or(false) {
				Some(sync_events::v4::ToDevice {
					events: services
🥺's avatar
🥺 committed
						.users
						.get_to_device_events(&sender_user, &sender_device)?,
					next_batch: next_batch.to_string(),
				})
			} else {
				None
			},
			e2ee: sync_events::v4::E2EE {
				device_lists: DeviceLists {
					changed: device_list_changes.into_iter().collect(),
					left: device_list_left.into_iter().collect(),
				},
				device_one_time_keys_count: services
🥺's avatar
🥺 committed
					.users
					.count_one_time_keys(&sender_user, &sender_device)?,
				// Fallback keys are not yet supported
				device_unused_fallback_key_types: None,
			},
			account_data,
			receipts: sync_events::v4::Receipts {
				rooms: BTreeMap::new(),
			},
			typing: sync_events::v4::Typing {
				rooms: BTreeMap::new(),
			},
		},
		delta_token: None,
	})

fn filter_rooms(
	rooms: &[OwnedRoomId], State(services): State<crate::State>, filter: &[Option<RoomType>], negate: bool,
) -> Vec<OwnedRoomId> {
	return rooms
		.iter()
		.filter(|r| {
			match services.rooms.state_accessor.get_room_type(r) {
				Err(e) => {
					warn!("Requested room type for {}, but could not retrieve with error {}", r, e);
					false
				},
				Ok(None) => {
					// For rooms which do not have a room type, use 'null' to include them
					if negate {
						!filter.contains(&None)
					} else {
						filter.contains(&None)
					}
				},
				Ok(Some(room_type)) => {
					if negate {
						!filter.contains(&Some(room_type))
					} else {
						filter.is_empty() || filter.contains(&Some(room_type))
					}
				},
			}
		})
		.cloned()
		.collect();
}