Merge branch 'main' into feat-ui-roomlist

This commit is contained in:
Ivan Enderlin 2023-06-05 19:22:33 +02:00
commit 08559b58b6
No known key found for this signature in database
63 changed files with 1870 additions and 5263 deletions

158
Cargo.lock generated
View File

@ -538,7 +538,6 @@ dependencies = [
"matrix-sdk",
"matrix-sdk-base",
"matrix-sdk-crypto",
"matrix-sdk-sled",
"matrix-sdk-sqlite",
"matrix-sdk-test",
"pprof",
@ -1114,16 +1113,6 @@ dependencies = [
"typenum",
]
[[package]]
name = "ctor"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
dependencies = [
"quote",
"syn 1.0.109",
]
[[package]]
name = "ctor"
version = "0.2.0"
@ -1167,7 +1156,7 @@ dependencies = [
"hashbrown 0.12.3",
"lock_api",
"once_cell",
"parking_lot_core 0.9.7",
"parking_lot_core",
]
[[package]]
@ -1646,16 +1635,6 @@ version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541"
[[package]]
name = "fs2"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "fs_extra"
version = "1.3.0"
@ -1782,15 +1761,6 @@ dependencies = [
"slab",
]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@ -2475,11 +2445,10 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.17"
version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
dependencies = [
"cfg-if",
"value-bag",
]
@ -2569,7 +2538,7 @@ dependencies = [
"backoff",
"bytes",
"bytesize",
"ctor 0.2.0",
"ctor",
"dashmap",
"dirs",
"event-listener",
@ -2644,7 +2613,7 @@ dependencies = [
"assign",
"async-trait",
"bitflags 2.3.0",
"ctor 0.2.0",
"ctor",
"dashmap",
"eyeball",
"futures-executor",
@ -2697,7 +2666,7 @@ dependencies = [
"byteorder",
"cbc",
"cfg-if",
"ctor 0.2.0",
"ctor",
"ctr",
"dashmap",
"eyeball",
@ -2791,7 +2760,6 @@ dependencies = [
"http",
"matrix-sdk-common",
"matrix-sdk-crypto",
"matrix-sdk-sled",
"matrix-sdk-sqlite",
"napi",
"napi-build",
@ -2874,7 +2842,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"assign",
"ctor 0.2.0",
"ctor",
"matrix-sdk",
"once_cell",
"tempfile",
@ -2895,38 +2863,13 @@ dependencies = [
"vodozemac",
]
[[package]]
name = "matrix-sdk-sled"
version = "0.2.0"
dependencies = [
"assert_matches",
"async-trait",
"fs_extra",
"futures-core",
"futures-util",
"glob",
"matrix-sdk-base",
"matrix-sdk-crypto",
"matrix-sdk-store-encryption",
"matrix-sdk-test",
"once_cell",
"ruma",
"serde",
"serde_json",
"sled",
"tempfile",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "matrix-sdk-sqlite"
version = "0.1.0"
dependencies = [
"assert_matches",
"async-trait",
"ctor 0.2.0",
"ctor",
"deadpool-sqlite",
"glob",
"matrix-sdk-base",
@ -2999,7 +2942,7 @@ dependencies = [
"async-stream",
"async-trait",
"chrono",
"ctor 0.2.0",
"ctor",
"eyeball",
"eyeball-im",
"eyeball-im-util",
@ -3016,6 +2959,7 @@ dependencies = [
"ruma",
"serde",
"serde_json",
"stream_assert",
"thiserror",
"tokio",
"tracing",
@ -3138,7 +3082,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49ac8112fe5998579b22e29903c7b277fc7f91c7860c0236f35792caf8156e18"
dependencies = [
"bitflags 2.3.0",
"ctor 0.2.0",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
@ -3531,17 +3475,6 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e"
[[package]]
name = "parking_lot"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
"lock_api",
"parking_lot_core 0.8.6",
]
[[package]]
name = "parking_lot"
version = "0.12.1"
@ -3549,21 +3482,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
"parking_lot_core 0.9.7",
]
[[package]]
name = "parking_lot_core"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
dependencies = [
"cfg-if",
"instant",
"libc",
"redox_syscall 0.2.16",
"smallvec",
"winapi",
"parking_lot_core",
]
[[package]]
@ -3853,7 +3772,7 @@ dependencies = [
"log",
"nix",
"once_cell",
"parking_lot 0.12.1",
"parking_lot",
"smallvec",
"symbolic-demangle",
"tempfile",
@ -4320,7 +4239,7 @@ dependencies = [
[[package]]
name = "ruma"
version = "0.8.2"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"assign",
"js_int",
@ -4335,7 +4254,7 @@ dependencies = [
[[package]]
name = "ruma-appservice-api"
version = "0.8.1"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"js_int",
"ruma-common",
@ -4346,7 +4265,7 @@ dependencies = [
[[package]]
name = "ruma-client-api"
version = "0.16.2"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"assign",
"bytes",
@ -4363,7 +4282,7 @@ dependencies = [
[[package]]
name = "ruma-common"
version = "0.11.3"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"base64 0.21.0",
"bytes",
@ -4396,7 +4315,7 @@ dependencies = [
[[package]]
name = "ruma-federation-api"
version = "0.7.1"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"js_int",
"ruma-common",
@ -4407,7 +4326,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers-validation"
version = "0.9.1"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"js_int",
"thiserror",
@ -4416,7 +4335,7 @@ dependencies = [
[[package]]
name = "ruma-macros"
version = "0.11.3"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"once_cell",
"proc-macro-crate",
@ -4431,7 +4350,7 @@ dependencies = [
[[package]]
name = "ruma-push-gateway-api"
version = "0.7.1"
source = "git+https://github.com/ruma/ruma?rev=854d8076eff1c5b6454c42e4309b1b070b815893#854d8076eff1c5b6454c42e4309b1b070b815893"
source = "git+https://github.com/bnjbvr/ruma?rev=97fc09cd81ab32a2f1f6178b4996b3855ada565d#97fc09cd81ab32a2f1f6178b4996b3855ada565d"
dependencies = [
"js_int",
"ruma-common",
@ -4809,22 +4728,6 @@ dependencies = [
"autocfg",
]
[[package]]
name = "sled"
version = "0.34.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935"
dependencies = [
"crc32fast",
"crossbeam-epoch",
"crossbeam-utils",
"fs2",
"fxhash",
"libc",
"log",
"parking_lot 0.11.2",
]
[[package]]
name = "sliding-sync-integration-test"
version = "0.1.0"
@ -4889,6 +4792,15 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb"
[[package]]
name = "stream_assert"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58491272dc90918dba713fd9e3556a67a09e067621143f3616754788286489f1"
dependencies = [
"futures-util",
]
[[package]]
name = "string_cache"
version = "0.8.7"
@ -4897,7 +4809,7 @@ checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b"
dependencies = [
"new_debug_unreachable",
"once_cell",
"parking_lot 0.12.1",
"parking_lot",
"phf_shared 0.10.0",
"precomputed-hash",
"serde",
@ -5703,13 +5615,9 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "value-bag"
version = "1.0.0-alpha.9"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55"
dependencies = [
"ctor 0.1.26",
"version_check",
]
checksum = "a4d330786735ea358f3bc09eea4caa098569c1c93f342d9aca0514915022fe7e"
[[package]]
name = "vcpkg"

View File

@ -35,8 +35,8 @@ futures-core = "0.3.28"
futures-executor = "0.3.21"
futures-util = { version = "0.3.26", default-features = false, features = ["alloc"] }
http = "0.2.6"
ruma = { git = "https://github.com/ruma/ruma", rev = "854d8076eff1c5b6454c42e4309b1b070b815893", features = ["client-api-c", "compat-user-id"] }
ruma-common = { git = "https://github.com/ruma/ruma", rev = "854d8076eff1c5b6454c42e4309b1b070b815893" }
ruma = { git = "https://github.com/bnjbvr/ruma", rev = "97fc09cd81ab32a2f1f6178b4996b3855ada565d", features = ["client-api-c", "compat-user-id"] }
ruma-common = { git = "https://github.com/bnjbvr/ruma", rev = "97fc09cd81ab32a2f1f6178b4996b3855ada565d" }
once_cell = "1.16.0"
serde = "1.0.151"
serde_html_form = "0.2.0"

View File

@ -12,7 +12,6 @@ criterion = { version = "0.4.0", features = ["async", "async_tokio", "html_repor
matrix-sdk-base = { path = "../crates/matrix-sdk-base" }
matrix-sdk-crypto = { path = "../crates/matrix-sdk-crypto", version = "0.6.0"}
matrix-sdk-sqlite = { path = "../crates/matrix-sdk-sqlite", version = "0.1.0", default-features = false, features = ["crypto-store"] }
matrix-sdk-sled = { path = "../crates/matrix-sdk-sled", version = "0.2.0", features = ["crypto-store"] }
matrix-sdk-test = { path = "../testing/matrix-sdk-test", version = "0.6.0"}
matrix-sdk = { path = "../crates/matrix-sdk" }
ruma = { workspace = true }

View File

@ -2,7 +2,6 @@ use std::{ops::Deref, sync::Arc};
use criterion::*;
use matrix_sdk_crypto::{EncryptionSettings, OlmMachine};
use matrix_sdk_sled::SledCryptoStore;
use matrix_sdk_sqlite::SqliteCryptoStore;
use matrix_sdk_test::response_from_file;
use ruma::{
@ -90,18 +89,6 @@ pub fn keys_query(c: &mut Criterion) {
drop(machine);
}
// Benchmark (deprecated) sled store.
let dir = tempfile::tempdir().unwrap();
let store = Arc::new(runtime.block_on(SledCryptoStore::open(dir.path(), None)).unwrap());
let machine =
runtime.block_on(OlmMachine::with_store(alice_id(), alice_device_id(), store)).unwrap();
group.bench_with_input(BenchmarkId::new("sled store", &name), &response, |b, response| {
b.to_async(&runtime)
.iter(|| async { machine.mark_request_as_sent(&txn_id, response).await.unwrap() })
});
group.finish()
}
@ -164,28 +151,6 @@ pub fn keys_claiming(c: &mut Criterion) {
)
});
group.bench_with_input(BenchmarkId::new("sled store", &name), &response, |b, response| {
b.iter_batched(
|| {
let dir = tempfile::tempdir().unwrap();
let store =
Arc::new(runtime.block_on(SledCryptoStore::open(dir.path(), None)).unwrap());
let machine = runtime
.block_on(OlmMachine::with_store(alice_id(), alice_device_id(), store))
.unwrap();
runtime
.block_on(machine.mark_request_as_sent(&txn_id, &keys_query_response))
.unwrap();
(machine, &runtime, &txn_id)
},
move |(machine, runtime, txn_id)| {
runtime.block_on(machine.mark_request_as_sent(txn_id, response)).unwrap()
},
BatchSize::SmallInput,
)
});
group.finish()
}
@ -269,37 +234,6 @@ pub fn room_key_sharing(c: &mut Criterion) {
drop(machine);
}
// Benchmark (deprecated) sled store.
let dir = tempfile::tempdir().unwrap();
let store = Arc::new(runtime.block_on(SledCryptoStore::open(dir.path(), None)).unwrap());
let machine =
runtime.block_on(OlmMachine::with_store(alice_id(), alice_device_id(), store)).unwrap();
runtime.block_on(machine.mark_request_as_sent(&txn_id, &keys_query_response)).unwrap();
runtime.block_on(machine.mark_request_as_sent(&txn_id, &response)).unwrap();
group.bench_function(BenchmarkId::new("sled store", &name), |b| {
b.to_async(&runtime).iter(|| async {
let requests = machine
.share_room_key(
room_id,
users.iter().map(Deref::deref),
EncryptionSettings::default(),
)
.await
.unwrap();
assert!(!requests.is_empty());
for request in requests {
machine.mark_request_as_sent(&request.txn_id, &to_device_response).await.unwrap();
}
machine.invalidate_group_session(room_id).await.unwrap();
})
});
group.finish()
}
@ -349,22 +283,6 @@ pub fn devices_missing_sessions_collecting(c: &mut Criterion) {
drop(machine);
}
// Benchmark (deprecated) sled store.
let dir = tempfile::tempdir().unwrap();
let store = Arc::new(runtime.block_on(SledCryptoStore::open(dir.path(), None)).unwrap());
let machine =
runtime.block_on(OlmMachine::with_store(alice_id(), alice_device_id(), store)).unwrap();
runtime.block_on(machine.mark_request_as_sent(&txn_id, &response)).unwrap();
group.bench_function(BenchmarkId::new("sled store", &name), |b| {
b.to_async(&runtime).iter(|| async {
machine.get_missing_sessions(users.iter().map(Deref::deref)).await.unwrap()
})
});
group.finish()
}

View File

@ -1,7 +1,6 @@
use criterion::*;
use matrix_sdk::{config::StoreConfig, Client, RoomInfo, RoomState, Session, StateChanges};
use matrix_sdk_base::{store::MemoryStore, StateStore as _};
use matrix_sdk_sled::SledStateStore;
use matrix_sdk_sqlite::SqliteStateStore;
use ruma::{device_id, user_id, RoomId};
use tokio::runtime::Builder;
@ -74,36 +73,6 @@ pub fn restore_session(c: &mut Criterion) {
for encryption_password in [None, Some("hunter2")] {
let encrypted_suffix = if encryption_password.is_some() { "encrypted" } else { "clear" };
// Sled
let sled_path = tempfile::tempdir().unwrap().path().to_path_buf();
let mut sled_store_builder = SledStateStore::builder().path(sled_path);
if let Some(password) = encryption_password {
sled_store_builder = sled_store_builder.passphrase(password.to_owned());
}
let sled_store = sled_store_builder.build().expect("Can't create sled store");
runtime
.block_on(sled_store.save_changes(&changes))
.expect("initial filling of sled failed");
group.bench_with_input(
BenchmarkId::new(format!("sled store {encrypted_suffix}"), NAME),
&sled_store,
|b, store| {
b.to_async(&runtime).iter(|| async {
let client = Client::builder()
.homeserver_url("https://matrix.example.com")
.store_config(StoreConfig::new().state_store(store.clone()))
.build()
.await
.expect("Can't build client");
client
.restore_session(session.clone())
.await
.expect("couldn't restore session");
})
},
);
// Sqlite
let sqlite_dir = tempfile::tempdir().unwrap();
let sqlite_store = runtime

View File

@ -142,8 +142,8 @@ impl OlmMachine {
/// * `path` - The path where the state of the machine should be persisted.
///
/// * `passphrase` - The passphrase that should be used to encrypt the data
/// at rest in the Sled store. **Warning**, if no passphrase is given, the
/// store and all its data will remain unencrypted.
/// at rest in the crypto store. **Warning**, if no passphrase is given,
/// the store and all its data will remain unencrypted.
#[uniffi::constructor]
pub fn new(
user_id: String,

View File

@ -25,7 +25,6 @@ tracing = ["dep:tracing-subscriber"]
[dependencies]
matrix-sdk-common = { version = "0.6.0", path = "../../crates/matrix-sdk-common", features = ["js"] }
matrix-sdk-sled = { version = "0.2.0", path = "../../crates/matrix-sdk-sled", default-features = false, features = ["crypto-store"] }
matrix-sdk-sqlite = { version = "0.1.0", path = "../../crates/matrix-sdk-sqlite", features = ["crypto-store"] }
ruma = { workspace = true, features = ["rand"] }
napi = { version = "2.9.1", default-features = false, features = ["napi6", "tokio_rt"] }

View File

@ -59,11 +59,8 @@ impl Deref for OlmMachineInner {
#[derive(Default)]
#[napi]
pub enum StoreType {
/// Use `matrix-sdk-sled`.
#[default]
Sled,
/// Use `matrix-sdk-sqlite`.
#[default]
Sqlite,
}
@ -123,21 +120,6 @@ impl OlmMachine {
inner: OlmMachineInner::Opened(ManuallyDrop::new(match store_path {
Some(store_path) => {
let machine = match store_type.unwrap_or_default() {
StoreType::Sled => {
matrix_sdk_crypto::OlmMachine::with_store(
user_id,
device_id,
matrix_sdk_sled::SledCryptoStore::open(
store_path,
store_passphrase.as_deref(),
)
.await
.map(Arc::new)
.map_err(into_err)?,
)
.await
}
StoreType::Sqlite => {
matrix_sdk_crypto::OlmMachine::with_store(
user_id,

View File

@ -25,8 +25,7 @@ const fs = require("fs/promises");
describe("StoreType", () => {
test("has the correct variant values", () => {
expect(StoreType.Sled).toStrictEqual(0);
expect(StoreType.Sqlite).toStrictEqual(1);
expect(StoreType.Sqlite).toStrictEqual(0);
});
});
@ -53,7 +52,6 @@ describe(OlmMachine.name, () => {
describe("can be instantiated with a store", () => {
for (const [store_type, store_name] of [
[StoreType.Sled, "sled"],
[StoreType.Sqlite, "sqlite"],
[null, "default"],
]) {

View File

@ -1,3 +1,5 @@
use std::fmt::Display;
use matrix_sdk::{self, encryption::CryptoStoreError, HttpError, IdParseError, StoreError};
#[derive(Debug, thiserror::Error)]
@ -6,57 +8,63 @@ pub enum ClientError {
Generic { msg: String },
}
impl ClientError {
fn new<E: Display>(error: E) -> Self {
Self::Generic { msg: error.to_string() }
}
}
impl From<anyhow::Error> for ClientError {
fn from(e: anyhow::Error) -> ClientError {
ClientError::Generic { msg: e.to_string() }
ClientError::Generic { msg: format!("{e:#}") }
}
}
impl From<matrix_sdk::Error> for ClientError {
fn from(e: matrix_sdk::Error) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<StoreError> for ClientError {
fn from(e: StoreError) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<CryptoStoreError> for ClientError {
fn from(e: CryptoStoreError) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<HttpError> for ClientError {
fn from(e: HttpError) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<IdParseError> for ClientError {
fn from(e: IdParseError) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<serde_json::Error> for ClientError {
fn from(e: serde_json::Error) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<url::ParseError> for ClientError {
fn from(e: url::ParseError) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}
impl From<mime::FromStrError> for ClientError {
fn from(e: mime::FromStrError) -> Self {
anyhow::Error::from(e).into()
Self::new(e)
}
}

View File

@ -2,7 +2,7 @@ use std::sync::{Arc, RwLock};
use anyhow::Context;
use eyeball_im::VectorDiff;
use futures_util::{future::join4, pin_mut, StreamExt};
use futures_util::{future::join3, pin_mut, StreamExt};
pub use matrix_sdk::{
ruma::api::client::sync::sync_events::v4::SyncRequestListFilters, Client as MatrixClient,
LoopCtrl, RoomListEntry as MatrixRoomEntry, SlidingSyncBuilder as MatrixSlidingSyncBuilder,
@ -254,14 +254,6 @@ impl SlidingSyncRoom {
}
};
let handle_sync_gap = {
let gap_broadcast_rx = self.client.inner.subscribe_sync_gap(self.inner.room_id());
let timeline = timeline.to_owned();
async move {
gap_broadcast_rx.for_each(|_| timeline.clear()).await;
}
};
// This in the future could be removed, and the rx handling could be moved
// inside handle_sliding_sync_reset since we want to reset the sliding
// sync for ignore user list events
@ -276,13 +268,7 @@ impl SlidingSyncRoom {
let items = timeline_items.into_iter().map(TimelineItem::from_arc).collect();
let task_handle = TaskHandle::new(RUNTIME.spawn(async move {
join4(
handle_events,
handle_sliding_sync_reset,
handle_sync_gap,
handle_ignore_user_list_changes,
)
.await;
join3(handle_events, handle_sliding_sync_reset, handle_ignore_user_list_changes).await;
}));
Ok((items, task_handle))
@ -560,6 +546,14 @@ impl SlidingSyncListBuilder {
);
Arc::new(builder)
}
pub fn bump_event_types(self: Arc<Self>, bump_event_types: Vec<String>) -> Arc<Self> {
let mut builder = unwrap_or_clone_arc(self);
builder.inner = builder.inner.bump_event_types(
bump_event_types.into_iter().map(Into::into).collect::<Vec<_>>().as_slice(),
);
Arc::new(builder)
}
}
pub trait SlidingSyncListOnceBuilt: Sync + Send {
@ -818,10 +812,10 @@ impl SlidingSyncBuilder {
Ok(Arc::new(builder))
}
pub fn storage_key(self: Arc<Self>, name: Option<String>) -> Arc<Self> {
pub fn enable_caching(self: Arc<Self>) -> Result<Arc<Self>, ClientError> {
let mut builder = unwrap_or_clone_arc(self);
builder.inner = builder.inner.storage_key(name);
Arc::new(builder)
builder.inner = builder.inner.enable_caching()?;
Ok(Arc::new(builder))
}
pub fn add_list(self: Arc<Self>, list_builder: Arc<SlidingSyncListBuilder>) -> Arc<Self> {
@ -883,14 +877,6 @@ impl SlidingSyncBuilder {
Arc::new(builder)
}
pub fn bump_event_types(self: Arc<Self>, bump_event_types: Vec<String>) -> Arc<Self> {
let mut builder = unwrap_or_clone_arc(self);
builder.inner = builder.inner.bump_event_types(
bump_event_types.into_iter().map(Into::into).collect::<Vec<_>>().as_slice(),
);
Arc::new(builder)
}
pub fn build(self: Arc<Self>) -> Result<Arc<SlidingSync>, ClientError> {
let builder = unwrap_or_clone_arc(self);
RUNTIME.block_on(async move {
@ -901,8 +887,11 @@ impl SlidingSyncBuilder {
#[uniffi::export]
impl Client {
pub fn sliding_sync(&self) -> Arc<SlidingSyncBuilder> {
let mut inner = self.inner.sliding_sync();
/// Creates a new Sliding Sync instance with the given identifier.
///
/// Note: the identifier must be less than 16 chars long.
pub fn sliding_sync(&self, id: String) -> Result<Arc<SlidingSyncBuilder>, ClientError> {
let mut inner = self.inner.sliding_sync(id)?;
if let Some(sliding_sync_proxy) = self
.sliding_sync_proxy
.read()
@ -912,6 +901,6 @@ impl Client {
{
inner = inner.homeserver(sliding_sync_proxy);
}
Arc::new(SlidingSyncBuilder { inner, client: self.clone() })
Ok(Arc::new(SlidingSyncBuilder { inner, client: self.clone() }))
}
}

View File

@ -13,7 +13,6 @@ coverage:
- "crates/matrix-sdk-common/"
- "crates/matrix-sdk-crypto/"
- "crates/matrix-sdk-qrcode/"
- "crates/matrix-sdk-sled/"
- "crates/matrix-sdk-sqlite/"
- "crates/matrix-sdk-store-encryption/"
# Coverage of wasm tests isn't supported at the moment,

View File

@ -17,7 +17,11 @@
use std::{collections::BTreeMap, fmt};
pub use matrix_sdk_common::debug::*;
use ruma::{api::client::push::get_notifications::v3::Notification, serde::Raw, OwnedRoomId};
use ruma::{
api::client::{push::get_notifications::v3::Notification, sync::sync_events::v3::InvitedRoom},
serde::Raw,
OwnedRoomId,
};
/// A wrapper around a slice of `Raw` events that implements `Debug` in a way
/// that only prints the event type of each item.
@ -71,3 +75,28 @@ impl<'a> fmt::Debug for DebugNotification<'a> {
.finish()
}
}
/// A wrapper around an invited room as found in `/sync` responses that
/// implements `Debug` in a way that only prints the event ID and event type for
/// the raw events contained in `invite_state`.
pub struct DebugInvitedRoom<'a>(pub &'a InvitedRoom);
#[cfg(not(tarpaulin_include))]
impl<'a> fmt::Debug for DebugInvitedRoom<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("InvitedRoom")
.field("invite_state", &DebugListOfRawEvents(&self.0.invite_state.events))
.finish()
}
}
pub(crate) struct DebugListOfRawEvents<'a, T>(pub &'a [Raw<T>]);
#[cfg(not(tarpaulin_include))]
impl<'a, T> fmt::Debug for DebugListOfRawEvents<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut list = f.debug_list();
list.entries(self.0.iter().map(DebugRawEvent));
list.finish()
}
}

View File

@ -14,13 +14,18 @@
//! SDK-specific variations of response types from Ruma.
use std::collections::BTreeMap;
use std::{collections::BTreeMap, fmt};
pub use matrix_sdk_common::deserialized_responses::*;
use ruma::{
events::room::member::{
MembershipState, RoomMemberEvent, RoomMemberEventContent, StrippedRoomMemberEvent,
SyncRoomMemberEvent,
events::{
room::{
member::{MembershipState, RoomMemberEvent, RoomMemberEventContent},
power_levels::{RoomPowerLevels, RoomPowerLevelsEventContent},
},
AnyStrippedStateEvent, AnySyncStateEvent, EventContentFromType,
PossiblyRedactedStateEventContent, RedactContent, RedactedStateEventContent,
StateEventContent, StaticStateEventContent, StrippedStateEvent, SyncStateEvent,
},
serde::Raw,
EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, OwnedRoomId, OwnedUserId, UserId,
@ -61,69 +66,196 @@ pub struct MembersResponse {
pub ambiguity_changes: AmbiguityChanges,
}
/// Raw version of [`MemberEvent`].
#[derive(Debug, Serialize)]
/// Wrapper around both versions of any raw state event.
#[derive(Clone, Debug, Serialize)]
#[serde(untagged)]
pub enum RawMemberEvent {
/// A member event from a room in joined or left state.
Sync(Raw<SyncRoomMemberEvent>),
/// A member event from a room in invited state.
Stripped(Raw<StrippedRoomMemberEvent>),
pub enum RawAnySyncOrStrippedState {
/// An event from a room in joined or left state.
Sync(Raw<AnySyncStateEvent>),
/// An event from a room in invited state.
Stripped(Raw<AnyStrippedStateEvent>),
}
impl RawMemberEvent {
impl RawAnySyncOrStrippedState {
/// Try to deserialize the inner JSON as the expected type.
pub fn deserialize(&self) -> serde_json::Result<MemberEvent> {
pub fn deserialize(&self) -> serde_json::Result<AnySyncOrStrippedState> {
match self {
Self::Sync(e) => Ok(MemberEvent::Sync(e.deserialize()?)),
Self::Stripped(e) => Ok(MemberEvent::Stripped(e.deserialize()?)),
Self::Sync(raw) => Ok(AnySyncOrStrippedState::Sync(raw.deserialize()?)),
Self::Stripped(raw) => Ok(AnySyncOrStrippedState::Stripped(raw.deserialize()?)),
}
}
/// Turns this `RawAnySyncOrStrippedState` into `RawSyncOrStrippedState<C>`
/// without changing the underlying JSON.
pub fn cast<C>(self) -> RawSyncOrStrippedState<C>
where
C: StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent,
{
match self {
Self::Sync(raw) => RawSyncOrStrippedState::Sync(raw.cast()),
Self::Stripped(raw) => RawSyncOrStrippedState::Stripped(raw.cast()),
}
}
}
/// Wrapper around both MemberEvent-Types
/// Wrapper around both versions of any state event.
#[derive(Clone, Debug)]
pub enum MemberEvent {
/// A member event from a room in joined or left state.
Sync(SyncRoomMemberEvent),
/// A member event from a room in invited state.
Stripped(StrippedRoomMemberEvent),
pub enum AnySyncOrStrippedState {
/// An event from a room in joined or left state.
Sync(AnySyncStateEvent),
/// An event from a room in invited state.
Stripped(AnyStrippedStateEvent),
}
impl MemberEvent {
/// The inner Content of the wrapped Event
pub fn original_content(&self) -> Option<&RoomMemberEventContent> {
impl AnySyncOrStrippedState {
/// If this is an `AnySyncStateEvent`, return a reference to the inner
/// event.
pub fn as_sync(&self) -> Option<&AnySyncStateEvent> {
match self {
MemberEvent::Sync(e) => e.as_original().map(|e| &e.content),
MemberEvent::Stripped(e) => Some(&e.content),
Self::Sync(ev) => Some(ev),
Self::Stripped(_) => None,
}
}
/// If this is an `AnyStrippedStateEvent`, return a reference to the inner
/// event.
pub fn as_stripped(&self) -> Option<&AnyStrippedStateEvent> {
match self {
Self::Sync(_) => None,
Self::Stripped(ev) => Some(ev),
}
}
}
/// Wrapper around both versions of a raw state event.
#[derive(Clone, Debug, Serialize)]
#[serde(untagged)]
pub enum RawSyncOrStrippedState<C>
where
C: StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent,
{
/// An event from a room in joined or left state.
Sync(Raw<SyncStateEvent<C>>),
/// An event from a room in invited state.
Stripped(Raw<StrippedStateEvent<C::PossiblyRedacted>>),
}
impl<C> RawSyncOrStrippedState<C>
where
C: StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent + fmt::Debug + Clone,
{
/// Try to deserialize the inner JSON as the expected type.
pub fn deserialize(&self) -> serde_json::Result<SyncOrStrippedState<C>>
where
C: StaticStateEventContent + EventContentFromType + RedactContent,
C::Redacted: RedactedStateEventContent<StateKey = C::StateKey> + EventContentFromType,
C::PossiblyRedacted: PossiblyRedactedStateEventContent + EventContentFromType,
{
match self {
Self::Sync(ev) => Ok(SyncOrStrippedState::Sync(ev.deserialize()?)),
Self::Stripped(ev) => Ok(SyncOrStrippedState::Stripped(ev.deserialize()?)),
}
}
}
/// Raw version of [`MemberEvent`].
pub type RawMemberEvent = RawSyncOrStrippedState<RoomMemberEventContent>;
/// Wrapper around both versions of a state event.
#[derive(Clone, Debug)]
pub enum SyncOrStrippedState<C>
where
C: StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent + fmt::Debug + Clone,
{
/// An event from a room in joined or left state.
Sync(SyncStateEvent<C>),
/// An event from a room in invited state.
Stripped(StrippedStateEvent<C::PossiblyRedacted>),
}
impl<C> SyncOrStrippedState<C>
where
C: StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent<StateKey = C::StateKey> + fmt::Debug + Clone,
C::PossiblyRedacted: PossiblyRedactedStateEventContent<StateKey = C::StateKey>,
{
/// If this is a `SyncStateEvent`, return a reference to the inner event.
pub fn as_sync(&self) -> Option<&SyncStateEvent<C>> {
match self {
Self::Sync(ev) => Some(ev),
Self::Stripped(_) => None,
}
}
/// If this is a `StrippedStateEvent`, return a reference to the inner
/// event.
pub fn as_stripped(&self) -> Option<&StrippedStateEvent<C::PossiblyRedacted>> {
match self {
Self::Sync(_) => None,
Self::Stripped(ev) => Some(ev),
}
}
/// The sender of this event.
pub fn sender(&self) -> &UserId {
match self {
MemberEvent::Sync(e) => e.sender(),
MemberEvent::Stripped(e) => &e.sender,
Self::Sync(e) => e.sender(),
Self::Stripped(e) => &e.sender,
}
}
/// The ID of this event.
pub fn event_id(&self) -> Option<&EventId> {
match self {
MemberEvent::Sync(e) => Some(e.event_id()),
MemberEvent::Stripped(_) => None,
Self::Sync(e) => Some(e.event_id()),
Self::Stripped(_) => None,
}
}
/// The Server Timestamp of this event.
/// The server timestamp of this event.
pub fn origin_server_ts(&self) -> Option<MilliSecondsSinceUnixEpoch> {
match self {
MemberEvent::Sync(e) => Some(e.origin_server_ts()),
MemberEvent::Stripped(_) => None,
Self::Sync(e) => Some(e.origin_server_ts()),
Self::Stripped(_) => None,
}
}
/// The membership state of the user
/// The state key associated to this state event.
pub fn state_key(&self) -> &C::StateKey {
match self {
Self::Sync(e) => e.state_key(),
Self::Stripped(e) => &e.state_key,
}
}
}
impl<C> SyncOrStrippedState<C>
where
C: StaticStateEventContent<PossiblyRedacted = C>
+ RedactContent
+ PossiblyRedactedStateEventContent,
C::Redacted: RedactedStateEventContent<StateKey = <C as StateEventContent>::StateKey>
+ fmt::Debug
+ Clone,
{
/// The inner content of the wrapped event.
pub fn original_content(&self) -> Option<&C> {
match self {
Self::Sync(e) => e.as_original().map(|e| &e.content),
Self::Stripped(e) => Some(&e.content),
}
}
}
/// Wrapper around both MemberEvent-Types
pub type MemberEvent = SyncOrStrippedState<RoomMemberEventContent>;
impl MemberEvent {
/// The membership state of the user.
pub fn membership(&self) -> &MembershipState {
match self {
MemberEvent::Sync(e) => e.membership(),
@ -131,11 +263,18 @@ impl MemberEvent {
}
}
/// The user id associated to this member event
/// The user id associated to this member event.
pub fn user_id(&self) -> &UserId {
self.state_key()
}
}
impl SyncOrStrippedState<RoomPowerLevelsEventContent> {
/// The power levels of the event.
pub fn power_levels(&self) -> RoomPowerLevels {
match self {
MemberEvent::Sync(e) => e.state_key(),
MemberEvent::Stripped(e) => &e.state_key,
Self::Sync(e) => e.power_levels(),
Self::Stripped(e) => e.power_levels(),
}
}
}

View File

@ -19,14 +19,17 @@ use ruma::{
presence::PresenceEvent,
room::{
member::MembershipState,
power_levels::{PowerLevelAction, RoomPowerLevels, SyncRoomPowerLevelsEvent},
power_levels::{PowerLevelAction, RoomPowerLevels, RoomPowerLevelsEventContent},
},
MessageLikeEventType, StateEventType,
},
MxcUri, UserId,
};
use crate::{deserialized_responses::MemberEvent, MinimalRoomMemberEvent};
use crate::{
deserialized_responses::{MemberEvent, SyncOrStrippedState},
MinimalRoomMemberEvent,
};
/// A member of a room.
#[derive(Clone, Debug)]
@ -38,7 +41,7 @@ pub struct RoomMember {
pub(crate) profile: Arc<Option<MinimalRoomMemberEvent>>,
#[allow(dead_code)]
pub(crate) presence: Arc<Option<PresenceEvent>>,
pub(crate) power_levels: Arc<Option<SyncRoomPowerLevelsEvent>>,
pub(crate) power_levels: Arc<Option<SyncOrStrippedState<RoomPowerLevelsEventContent>>>,
pub(crate) max_power_level: i64,
pub(crate) is_room_creator: bool,
pub(crate) display_name_ambiguous: bool,

View File

@ -17,12 +17,12 @@ use ruma::{
SyncRoomMemberEvent,
},
power_levels::RoomPowerLevelsEventContent,
topic::{OriginalRoomTopicEvent, RedactedRoomTopicEvent, RoomTopicEventContent},
topic::RoomTopicEventContent,
MediaSource,
},
AnyEphemeralRoomEventContent, AnyGlobalAccountDataEvent, AnyRoomAccountDataEvent,
AnyStrippedStateEvent, AnySyncEphemeralRoomEvent, AnySyncStateEvent,
GlobalAccountDataEventType, RoomAccountDataEventType, StateEventType,
GlobalAccountDataEventType, RoomAccountDataEventType, StateEventType, SyncStateEvent,
},
mxc_uri, room_id,
serde::Raw,
@ -266,8 +266,12 @@ impl StateStoreIntegrationTests for DynStateStore {
self.get_state_event_static::<RoomTopicEventContent>(room_id)
.await?
.expect("room topic found before redaction")
.deserialize_as::<OriginalRoomTopicEvent>()
.deserialize()
.expect("can deserialize room topic before redaction")
.as_sync()
.expect("room topic is a sync state event")
.as_original()
.expect("room topic is not redacted yet")
.content
.topic,
"😀"
@ -283,23 +287,15 @@ impl StateStoreIntegrationTests for DynStateStore {
changes.add_redaction(room_id, &redacted_event_id, redaction_evt);
self.save_changes(&changes).await?;
match self
.get_state_event_static::<RoomTopicEventContent>(room_id)
.await?
.expect("room topic found before redaction")
.deserialize_as::<OriginalRoomTopicEvent>()
{
Err(_) => {} // as expected
Ok(_) => panic!("Topic has not been redacted"),
}
let _ = self
let redacted_event = self
.get_state_event_static::<RoomTopicEventContent>(room_id)
.await?
.expect("room topic found after redaction")
.deserialize_as::<RedactedRoomTopicEvent>()
.deserialize()
.expect("can deserialize room topic after redaction");
assert_matches!(redacted_event.as_sync(), Some(SyncStateEvent::Redacted(_)));
Ok(())
}

View File

@ -37,8 +37,9 @@ use tracing::{debug, warn};
use super::{Result, RoomInfo, StateChanges, StateStore, StoreError};
use crate::{
deserialized_responses::RawMemberEvent, media::MediaRequest, MinimalRoomMemberEvent,
RoomMemberships, StateStoreDataKey, StateStoreDataValue,
deserialized_responses::{RawAnySyncOrStrippedState, RawMemberEvent},
media::MediaRequest,
MinimalRoomMemberEvent, RoomMemberships, StateStoreDataKey, StateStoreDataValue,
};
/// In-Memory, non-persistent implementation of the `StateStore`
@ -376,25 +377,48 @@ impl MemoryStore {
room_id: &RoomId,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>> {
Ok(self
) -> Result<Option<RawAnySyncOrStrippedState>> {
if let Some(e) = self
.stripped_room_state
.get(room_id)
.as_ref()
.and_then(|events| events.get(&event_type))
.and_then(|m| m.get(state_key).map(|m| m.clone()))
{
Ok(Some(RawAnySyncOrStrippedState::Stripped(e)))
} else if let Some(e) = self
.room_state
.get(room_id)
.and_then(|e| e.get(&event_type).and_then(|s| s.get(state_key).map(|e| e.clone()))))
.as_ref()
.and_then(|events| events.get(&event_type))
.and_then(|m| m.get(state_key).map(|m| m.clone()))
{
Ok(Some(RawAnySyncOrStrippedState::Sync(e)))
} else {
Ok(None)
}
}
async fn get_state_events(
&self,
room_id: &RoomId,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>> {
Ok(self
.room_state
.get(room_id)
.and_then(|e| {
e.get(&event_type).map(|s| s.iter().map(|e| e.clone()).collect::<Vec<_>>())
) -> Result<Vec<RawAnySyncOrStrippedState>> {
if let Some(v) = self.stripped_room_state.get(room_id).as_ref().and_then(|events| {
events.get(&event_type).map(|s| {
s.iter().map(|e| RawAnySyncOrStrippedState::Stripped(e.clone())).collect::<Vec<_>>()
})
.unwrap_or_default())
}) {
Ok(v)
} else if let Some(v) = self.room_state.get(room_id).as_ref().and_then(|events| {
events.get(&event_type).map(|s| {
s.iter().map(|e| RawAnySyncOrStrippedState::Sync(e.clone())).collect::<Vec<_>>()
})
}) {
Ok(v)
} else {
Ok(Vec::new())
}
}
async fn get_profile(
@ -410,25 +434,9 @@ impl MemoryStore {
room_id: &RoomId,
state_key: &UserId,
) -> Result<Option<RawMemberEvent>> {
if let Some(e) = self
.stripped_room_state
.get(room_id)
.as_ref()
.and_then(|events| events.get(&StateEventType::RoomMember))
.and_then(|m| m.get(state_key.as_str()).map(|m| m.clone().cast()))
{
Ok(Some(RawMemberEvent::Stripped(e)))
} else if let Some(e) = self
.room_state
.get(room_id)
.as_ref()
.and_then(|events| events.get(&StateEventType::RoomMember))
.and_then(|m| m.get(state_key.as_str()).map(|m| m.clone().cast()))
{
Ok(Some(RawMemberEvent::Sync(e)))
} else {
Ok(None)
}
self.get_state_event(room_id, StateEventType::RoomMember, state_key.as_str())
.await
.map(|opt| opt.map(|raw| raw.cast()))
}
/// Get the user IDs for the given room with the given memberships and
@ -588,7 +596,7 @@ impl StateStore for MemoryStore {
room_id: &RoomId,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>> {
) -> Result<Option<RawAnySyncOrStrippedState>> {
self.get_state_event(room_id, event_type, state_key).await
}
@ -596,7 +604,7 @@ impl StateStore for MemoryStore {
&self,
room_id: &RoomId,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>> {
) -> Result<Vec<RawAnySyncOrStrippedState>> {
self.get_state_events(room_id, event_type).await
}

View File

@ -20,11 +20,10 @@ use ruma::{
events::{
presence::PresenceEvent,
receipt::{Receipt, ReceiptThread, ReceiptType},
AnyGlobalAccountDataEvent, AnyRoomAccountDataEvent, AnySyncStateEvent, EmptyStateKey,
GlobalAccountDataEvent, GlobalAccountDataEventContent, GlobalAccountDataEventType,
RedactContent, RedactedStateEventContent, RoomAccountDataEvent,
RoomAccountDataEventContent, RoomAccountDataEventType, StateEventType, StaticEventContent,
StaticStateEventContent, SyncStateEvent,
AnyGlobalAccountDataEvent, AnyRoomAccountDataEvent, EmptyStateKey, GlobalAccountDataEvent,
GlobalAccountDataEventContent, GlobalAccountDataEventType, RedactContent,
RedactedStateEventContent, RoomAccountDataEvent, RoomAccountDataEventContent,
RoomAccountDataEventType, StateEventType, StaticEventContent, StaticStateEventContent,
},
serde::Raw,
EventId, MxcUri, OwnedEventId, OwnedUserId, RoomId, UserId,
@ -32,8 +31,9 @@ use ruma::{
use super::{StateChanges, StoreError};
use crate::{
deserialized_responses::RawMemberEvent, media::MediaRequest, MinimalRoomMemberEvent, RoomInfo,
RoomMemberships,
deserialized_responses::{RawAnySyncOrStrippedState, RawMemberEvent, RawSyncOrStrippedState},
media::MediaRequest,
MinimalRoomMemberEvent, RoomInfo, RoomMemberships,
};
/// An abstract state store trait that can be used to implement different stores
@ -102,7 +102,7 @@ pub trait StateStore: AsyncTraitDeps {
room_id: &RoomId,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>, Self::Error>;
) -> Result<Option<RawAnySyncOrStrippedState>, Self::Error>;
/// Get a list of state events for a given room and `StateEventType`.
///
@ -115,7 +115,7 @@ pub trait StateStore: AsyncTraitDeps {
&self,
room_id: &RoomId,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>, Self::Error>;
) -> Result<Vec<RawAnySyncOrStrippedState>, Self::Error>;
/// Get the current profile for the given user in the given room.
///
@ -370,7 +370,7 @@ impl<T: StateStore> StateStore for EraseStateStoreError<T> {
room_id: &RoomId,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>, Self::Error> {
) -> Result<Option<RawAnySyncOrStrippedState>, Self::Error> {
self.0.get_state_event(room_id, event_type, state_key).await.map_err(Into::into)
}
@ -378,7 +378,7 @@ impl<T: StateStore> StateStore for EraseStateStoreError<T> {
&self,
room_id: &RoomId,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>, Self::Error> {
) -> Result<Vec<RawAnySyncOrStrippedState>, Self::Error> {
self.0.get_state_events(room_id, event_type).await.map_err(Into::into)
}
@ -530,12 +530,12 @@ pub trait StateStoreExt: StateStore {
async fn get_state_event_static<C>(
&self,
room_id: &RoomId,
) -> Result<Option<Raw<SyncStateEvent<C>>>, Self::Error>
) -> Result<Option<RawSyncOrStrippedState<C>>, Self::Error>
where
C: StaticEventContent + StaticStateEventContent<StateKey = EmptyStateKey> + RedactContent,
C::Redacted: RedactedStateEventContent,
{
Ok(self.get_state_event(room_id, C::TYPE.into(), "").await?.map(Raw::cast))
Ok(self.get_state_event(room_id, C::TYPE.into(), "").await?.map(|raw| raw.cast()))
}
/// Get a specific state event of statically-known type.
@ -547,14 +547,17 @@ pub trait StateStoreExt: StateStore {
&self,
room_id: &RoomId,
state_key: &K,
) -> Result<Option<Raw<SyncStateEvent<C>>>, Self::Error>
) -> Result<Option<RawSyncOrStrippedState<C>>, Self::Error>
where
C: StaticEventContent + StaticStateEventContent + RedactContent,
C::StateKey: Borrow<K>,
C::Redacted: RedactedStateEventContent,
K: AsRef<str> + ?Sized + Sync,
{
Ok(self.get_state_event(room_id, C::TYPE.into(), state_key.as_ref()).await?.map(Raw::cast))
Ok(self
.get_state_event(room_id, C::TYPE.into(), state_key.as_ref())
.await?
.map(|raw| raw.cast()))
}
/// Get a list of state events of a statically-known type for a given room.
@ -565,7 +568,7 @@ pub trait StateStoreExt: StateStore {
async fn get_state_events_static<C>(
&self,
room_id: &RoomId,
) -> Result<Vec<Raw<SyncStateEvent<C>>>, Self::Error>
) -> Result<Vec<RawSyncOrStrippedState<C>>, Self::Error>
where
C: StaticEventContent + StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent,
@ -575,7 +578,7 @@ pub trait StateStoreExt: StateStore {
.get_state_events(room_id, C::TYPE.into())
.await?
.into_iter()
.map(Raw::cast)
.map(|raw| raw.cast())
.collect())
}

View File

@ -16,7 +16,7 @@
use std::{collections::BTreeMap, fmt};
use matrix_sdk_common::{debug::DebugRawEvent, deserialized_responses::SyncTimelineEvent};
use matrix_sdk_common::deserialized_responses::SyncTimelineEvent;
use ruma::{
api::client::{
push::get_notifications::v3::Notification,
@ -34,7 +34,9 @@ use ruma::{
use serde::{Deserialize, Serialize};
use crate::{
debug::{DebugListOfRawEventsNoId, DebugNotificationMap},
debug::{
DebugInvitedRoom, DebugListOfRawEvents, DebugListOfRawEventsNoId, DebugNotificationMap,
},
deserialized_responses::AmbiguityChanges,
};
@ -228,25 +230,3 @@ impl<'a> fmt::Debug for DebugInvitedRooms<'a> {
f.debug_map().entries(self.0.iter().map(|(k, v)| (k, DebugInvitedRoom(v)))).finish()
}
}
struct DebugInvitedRoom<'a>(&'a InvitedRoom);
#[cfg(not(tarpaulin_include))]
impl<'a> fmt::Debug for DebugInvitedRoom<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("InvitedRoom")
.field("invite_state", &DebugListOfRawEvents(&self.0.invite_state.events))
.finish()
}
}
struct DebugListOfRawEvents<'a, T>(&'a [Raw<T>]);
#[cfg(not(tarpaulin_include))]
impl<'a, T> fmt::Debug for DebugListOfRawEvents<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut list = f.debug_list();
list.entries(self.0.iter().map(DebugRawEvent));
list.finish()
}
}

View File

@ -25,7 +25,7 @@ use std::{
#[cfg(target_arch = "wasm32")]
use futures_util::{future::RemoteHandle, FutureExt};
#[cfg(not(target_arch = "wasm32"))]
pub use tokio::spawn;
pub use tokio::task::{spawn, JoinHandle};
#[cfg(target_arch = "wasm32")]
pub fn spawn<F, T>(future: F) -> JoinHandle<T>

View File

@ -25,7 +25,7 @@ use super::{
compat::{Error as DecryptionError, Message, PkDecryption},
MegolmV1BackupKey,
};
use crate::{store::RecoveryKey, utilities::encode};
use crate::store::RecoveryKey;
/// Error type for the decoding of a RecoveryKey.
#[derive(Debug, Error)]
@ -203,13 +203,19 @@ impl RecoveryKey {
let message = Message::from_base64(ciphertext, mac, ephemeral_key)?;
let pk = self.get_pk_decrytpion();
pk.decrypt(&message).map(encode)
let decrypted = pk.decrypt(&message)?;
Ok(String::from_utf8_lossy(&decrypted).to_string())
}
}
#[cfg(test)]
mod tests {
use ruma::api::client::backup::KeyBackupData;
use serde_json::json;
use super::{DecodeError, RecoveryKey};
use crate::olm::BackedUpRoomKey;
const TEST_KEY: [u8; 32] = [
0x77, 0x07, 0x6D, 0x0A, 0x73, 0x18, 0xA5, 0x7D, 0x3C, 0x16, 0xC1, 0x72, 0x51, 0xB2, 0x66,
@ -260,14 +266,6 @@ mod tests {
Ok(())
}
}
#[cfg(test)]
mod test {
use ruma::api::client::backup::KeyBackupData;
use serde_json::json;
use super::*;
#[test]
fn test_decrypt_key() {
@ -299,8 +297,11 @@ mod test {
let ciphertext = key_backup_data.session_data.ciphertext.encode();
let mac = key_backup_data.session_data.mac.encode();
let _ = recovery_key
let decrypted = recovery_key
.decrypt_v1(&ephemeral, &mac, &ciphertext)
.expect("The backed up key should be decrypted successfully");
let _: BackedUpRoomKey = serde_json::from_str(&decrypted)
.expect("The decrypted payload should contain valid JSON");
}
}

View File

@ -27,9 +27,9 @@ pub(crate) use account::{Account, OlmDecryptionInfo, SessionType};
pub use account::{OlmMessageHash, PickledAccount, ReadOnlyAccount};
pub(crate) use group_sessions::ShareState;
pub use group_sessions::{
EncryptionSettings, ExportedRoomKey, InboundGroupSession, OutboundGroupSession,
PickledInboundGroupSession, PickledOutboundGroupSession, SessionCreationError,
SessionExportError, SessionKey, ShareInfo,
BackedUpRoomKey, EncryptionSettings, ExportedRoomKey, InboundGroupSession,
OutboundGroupSession, PickledInboundGroupSession, PickledOutboundGroupSession,
SessionCreationError, SessionExportError, SessionKey, ShareInfo,
};
pub use session::{PickledSession, Session};
pub use signing::{CrossSigningStatus, PickledCrossSigningIdentity, PrivateCrossSigningIdentity};

View File

@ -22,7 +22,7 @@ use async_trait::async_trait;
use gloo_utils::format::JsValueSerdeExt;
use indexed_db_futures::prelude::*;
use matrix_sdk_base::{
deserialized_responses::RawMemberEvent,
deserialized_responses::{RawAnySyncOrStrippedState, RawMemberEvent},
media::{MediaRequest, UniqueKey},
store::{StateChanges, StateStore, StoreError},
MinimalStateEvent, RoomInfo, RoomMemberships, StateStoreDataKey, StateStoreDataValue,
@ -398,7 +398,7 @@ impl IndexeddbStateStore {
// the wasm target (which would disable many other parts of the codebase).
#[cfg(target_arch = "wasm32")]
macro_rules! impl_state_store {
( $($body:tt)* ) => {
({ $($body:tt)* }) => {
#[async_trait(?Send)]
impl StateStore for IndexeddbStateStore {
type Error = IndexeddbStateStoreError;
@ -410,18 +410,15 @@ macro_rules! impl_state_store {
#[cfg(not(target_arch = "wasm32"))]
macro_rules! impl_state_store {
( $($body:tt)* ) => {
({ $($body:tt)* }) => {
impl IndexeddbStateStore {
$($body)*
}
};
}
impl_state_store! {
async fn get_kv_data(
&self,
key: StateStoreDataKey<'_>,
) -> Result<Option<StateStoreDataValue>> {
impl_state_store!({
async fn get_kv_data(&self, key: StateStoreDataKey<'_>) -> Result<Option<StateStoreDataValue>> {
let encoded_key = self.encode_kv_data_key(key);
let value = self
@ -453,17 +450,14 @@ impl_state_store! {
StateStoreDataKey::SyncToken => {
value.into_sync_token().expect("Session data not a sync token")
}
StateStoreDataKey::Filter(_) => {
value.into_filter().expect("Session data not a filter")
}
StateStoreDataKey::Filter(_) => value.into_filter().expect("Session data not a filter"),
StateStoreDataKey::UserAvatarUrl(_) => {
value.into_user_avatar_url().expect("Session data not an user avatar url")
}
};
let tx = self
.inner
.transaction_on_one_with_mode(keys::KV, IdbTransactionMode::Readwrite)?;
let tx =
self.inner.transaction_on_one_with_mode(keys::KV, IdbTransactionMode::Readwrite)?;
let obj = tx.object_store(keys::KV)?;
@ -477,9 +471,8 @@ impl_state_store! {
async fn remove_kv_data(&self, key: StateStoreDataKey<'_>) -> Result<()> {
let encoded_key = self.encode_kv_data_key(key);
let tx = self
.inner
.transaction_on_one_with_mode(keys::KV, IdbTransactionMode::Readwrite)?;
let tx =
self.inner.transaction_on_one_with_mode(keys::KV, IdbTransactionMode::Readwrite)?;
let obj = tx.object_store(keys::KV)?;
obj.delete(&encoded_key)?;
@ -522,10 +515,7 @@ impl_state_store! {
}
if !changes.stripped_state.is_empty() {
stores.extend([
keys::STRIPPED_ROOM_STATE,
keys::STRIPPED_USER_IDS,
]);
stores.extend([keys::STRIPPED_ROOM_STATE, keys::STRIPPED_USER_IDS]);
}
if !changes.receipts.is_empty() {
@ -612,11 +602,13 @@ impl_state_store! {
.delete(&self.encode_key(keys::STRIPPED_USER_IDS, key))?;
user_ids.put_key_val_owned(
&self.encode_key(keys::USER_IDS, key),
&self.serialize_event(&RoomMember::from(&event))?,
)?;
&self.encode_key(keys::USER_IDS, key),
&self.serialize_event(&RoomMember::from(&event))?,
)?;
if let Some(profile) = profile_changes.and_then(|p| p.get(event.state_key())) {
if let Some(profile) =
profile_changes.and_then(|p| p.get(event.state_key()))
{
profiles.put_key_val_owned(
&self.encode_key(keys::PROFILES, key),
&self.serialize_event(&profile)?,
@ -674,12 +666,16 @@ impl_state_store! {
store.put_key_val(&key, &self.serialize_event(&raw_event)?)?;
if *event_type == StateEventType::RoomMember {
let event = match raw_event.deserialize_as::<StrippedRoomMemberEvent>() {
let event = match raw_event.deserialize_as::<StrippedRoomMemberEvent>()
{
Ok(ev) => ev,
Err(e) => {
let event_id: Option<String> =
raw_event.get_field("event_id").ok().flatten();
debug!(event_id, "Failed to deserialize stripped member event: {e}");
debug!(
event_id,
"Failed to deserialize stripped member event: {e}"
);
continue;
}
};
@ -805,7 +801,7 @@ impl_state_store! {
tx.await.into_result().map_err(|e| e.into())
}
async fn get_presence_event(&self, user_id: &UserId) -> Result<Option<Raw<PresenceEvent>>> {
async fn get_presence_event(&self, user_id: &UserId) -> Result<Option<Raw<PresenceEvent>>> {
self.inner
.transaction_on_one_with_mode(keys::PRESENCE, IdbTransactionMode::Readonly)?
.object_store(keys::PRESENCE)?
@ -815,26 +811,60 @@ impl_state_store! {
.transpose()
}
async fn get_state_event(
async fn get_state_event(
&self,
room_id: &RoomId,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>> {
self.inner
) -> Result<Option<RawAnySyncOrStrippedState>> {
if let Some(e) = self
.inner
.transaction_on_one_with_mode(keys::STRIPPED_ROOM_STATE, IdbTransactionMode::Readonly)?
.object_store(keys::STRIPPED_ROOM_STATE)?
.get(&self.encode_key(keys::STRIPPED_ROOM_STATE, (room_id, &event_type, state_key)))?
.await?
.map(|f| self.deserialize_event(&f))
.transpose()?
{
Ok(Some(RawAnySyncOrStrippedState::Stripped(e)))
} else if let Some(e) = self
.inner
.transaction_on_one_with_mode(keys::ROOM_STATE, IdbTransactionMode::Readonly)?
.object_store(keys::ROOM_STATE)?
.get(&self.encode_key(keys::ROOM_STATE, (room_id, event_type, state_key)))?
.await?
.map(|f| self.deserialize_event(&f))
.transpose()
.transpose()?
{
Ok(Some(RawAnySyncOrStrippedState::Sync(e)))
} else {
Ok(None)
}
}
async fn get_state_events(
async fn get_state_events(
&self,
room_id: &RoomId,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>> {
) -> Result<Vec<RawAnySyncOrStrippedState>> {
let stripped_range =
self.encode_to_range(keys::STRIPPED_ROOM_STATE, (room_id, &event_type))?;
let stripped_events = self
.inner
.transaction_on_one_with_mode(keys::STRIPPED_ROOM_STATE, IdbTransactionMode::Readonly)?
.object_store(keys::STRIPPED_ROOM_STATE)?
.get_all_with_key(&stripped_range)?
.await?
.iter()
.filter_map(|f| {
self.deserialize_event(&f).ok().map(RawAnySyncOrStrippedState::Stripped)
})
.collect::<Vec<_>>();
if !stripped_events.is_empty() {
return Ok(stripped_events);
}
let range = self.encode_to_range(keys::ROOM_STATE, (room_id, event_type))?;
Ok(self
.inner
@ -843,11 +873,11 @@ impl_state_store! {
.get_all_with_key(&range)?
.await?
.iter()
.filter_map(|f| self.deserialize_event(&f).ok())
.filter_map(|f| self.deserialize_event(&f).ok().map(RawAnySyncOrStrippedState::Sync))
.collect::<Vec<_>>())
}
async fn get_profile(
async fn get_profile(
&self,
room_id: &RoomId,
user_id: &UserId,
@ -861,7 +891,7 @@ impl_state_store! {
.transpose()
}
async fn get_member_event(
async fn get_member_event(
&self,
room_id: &RoomId,
state_key: &UserId,
@ -870,7 +900,10 @@ impl_state_store! {
.inner
.transaction_on_one_with_mode(keys::STRIPPED_ROOM_STATE, IdbTransactionMode::Readonly)?
.object_store(keys::STRIPPED_ROOM_STATE)?
.get(&self.encode_key(keys::STRIPPED_ROOM_STATE, (room_id, StateEventType::RoomMember, state_key)))?
.get(&self.encode_key(
keys::STRIPPED_ROOM_STATE,
(room_id, StateEventType::RoomMember, state_key),
))?
.await?
.map(|f| self.deserialize_event(&f))
.transpose()?
@ -880,7 +913,10 @@ impl_state_store! {
.inner
.transaction_on_one_with_mode(keys::ROOM_STATE, IdbTransactionMode::Readonly)?
.object_store(keys::ROOM_STATE)?
.get(&self.encode_key(keys::ROOM_STATE, (room_id, StateEventType::RoomMember, state_key)))?
.get(
&self
.encode_key(keys::ROOM_STATE, (room_id, StateEventType::RoomMember, state_key)),
)?
.await?
.map(|f| self.deserialize_event(&f))
.transpose()?
@ -891,7 +927,7 @@ impl_state_store! {
}
}
async fn get_room_infos(&self) -> Result<Vec<RoomInfo>> {
async fn get_room_infos(&self) -> Result<Vec<RoomInfo>> {
let entries: Vec<_> = self
.inner
.transaction_on_one_with_mode(keys::ROOM_INFOS, IdbTransactionMode::Readonly)?
@ -905,7 +941,7 @@ impl_state_store! {
Ok(entries)
}
async fn get_stripped_room_infos(&self) -> Result<Vec<RoomInfo>> {
async fn get_stripped_room_infos(&self) -> Result<Vec<RoomInfo>> {
let entries = self
.inner
.transaction_on_one_with_mode(keys::STRIPPED_ROOM_INFOS, IdbTransactionMode::Readonly)?
@ -919,7 +955,7 @@ impl_state_store! {
Ok(entries)
}
async fn get_users_with_display_name(
async fn get_users_with_display_name(
&self,
room_id: &RoomId,
display_name: &str,
@ -933,7 +969,7 @@ impl_state_store! {
.unwrap_or_else(|| Ok(Default::default()))
}
async fn get_account_data_event(
async fn get_account_data_event(
&self,
event_type: GlobalAccountDataEventType,
) -> Result<Option<Raw<AnyGlobalAccountDataEvent>>> {
@ -946,7 +982,7 @@ impl_state_store! {
.transpose()
}
async fn get_room_account_data_event(
async fn get_room_account_data_event(
&self,
room_id: &RoomId,
event_type: RoomAccountDataEventType,
@ -1131,7 +1167,11 @@ impl_state_store! {
tx.await.into_result().map_err(|e| e.into())
}
async fn get_user_ids(&self, room_id: &RoomId, memberships: RoomMemberships) -> Result<Vec<OwnedUserId>> {
async fn get_user_ids(
&self,
room_id: &RoomId,
memberships: RoomMemberships,
) -> Result<Vec<OwnedUserId>> {
let ids = self.get_user_ids_inner(room_id, memberships, true).await?;
if !ids.is_empty() {
return Ok(ids);
@ -1146,7 +1186,7 @@ impl_state_store! {
async fn get_joined_user_ids(&self, room_id: &RoomId) -> Result<Vec<OwnedUserId>> {
self.get_user_ids(room_id, RoomMemberships::JOIN).await
}
}
});
/// A room member.
#[derive(Debug, Serialize, Deserialize)]

View File

@ -1,54 +0,0 @@
[package]
name = "matrix-sdk-sled"
version = "0.2.0"
edition = "2021"
authors = ["Damir Jelić <poljar@termina.org.uk>"]
repository = "https://github.com/matrix-org/matrix-rust-sdk"
description = "Sled Storage backend for matrix-sdk for native environments"
license = "Apache-2.0"
rust-version = { workspace = true }
readme = "README.md"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[features]
default = ["state-store"]
state-store = ["dep:matrix-sdk-base"]
crypto-store = [
"dep:matrix-sdk-base",
"dep:matrix-sdk-crypto",
"matrix-sdk-base?/e2e-encryption",
]
docsrs = [
"crypto-store",
]
[dependencies]
async-trait = { workspace = true }
fs_extra = "1.2.0"
futures-core = { workspace = true }
futures-util = { workspace = true }
matrix-sdk-base = { version = "0.6.0", path = "../matrix-sdk-base", optional = true }
matrix-sdk-crypto = { version = "0.6.0", path = "../matrix-sdk-crypto", optional = true }
matrix-sdk-store-encryption = { version = "0.2.0", path = "../matrix-sdk-store-encryption" }
ruma = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sled = "0.34.7"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["fs"] }
tracing = { workspace = true }
[dev-dependencies]
assert_matches = { workspace = true }
glob = "0.3.0"
matrix-sdk-base = { path = "../matrix-sdk-base", features = ["testing"] }
matrix-sdk-crypto = { path = "../matrix-sdk-crypto", features = ["testing"] }
matrix-sdk-test = { path = "../../testing/matrix-sdk-test" }
once_cell = { workspace = true }
tempfile = "3.3.0"
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }

View File

@ -1,25 +0,0 @@
# matrix-sdk-sled
This crate implements a storage backend using [sled][sled] for native and mobile environments using the matrix-sdk-base primitives. When using **matrix-sdk** this is included by default.
_Note_: the future of [sled][sled] is unclear. While it is currently the default for mobile and native environments for matrix-rust-sdk, [we are actively looking at replacing it with a different storage backend](https://github.com/matrix-org/matrix-rust-sdk/issues/294).
## Crate Feature Flags
The following crate feature flags are available:
* `state-store`: (on by default) Enables the state store
* `crypto-store`: Enables the store for end-to-end encrypted data.
## Minimum Supported Rust Version (MSRV)
These crates are built with the Rust language version 2021 and require a minimum compiler version of `1.62`.
## License
[Apache-2.0](https://www.apache.org/licenses/LICENSE-2.0)
[sled]: https://sled.rs/

View File

@ -1,17 +0,0 @@
use std::{env, process};
fn main() {
let target_arch = env::var_os("CARGO_CFG_TARGET_ARCH");
if target_arch.map_or(false, |arch| arch == "wasm32") {
let err = "this crate does not support the target arch 'wasm32'";
eprintln!(
"\n\
{pad}\n\
error: {err} \n\
{pad}\n\
",
pad = "".repeat(err.len()),
);
process::exit(1);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,290 +0,0 @@
use std::{borrow::Cow, ops::Deref};
use matrix_sdk_store_encryption::StoreCipher;
use ruma::{
events::{
receipt::ReceiptType, secret::request::SecretName, GlobalAccountDataEventType,
RoomAccountDataEventType, StateEventType,
},
DeviceId, EventId, MxcUri, OwnedEventId, OwnedRoomId, OwnedUserId, RoomId, TransactionId,
UserId,
};
/// Hold any data to be used as an encoding key
/// without checking for the existence of `ENCODE_SEPARATOR` within
pub struct EncodeUnchecked<'a>(&'a [u8]);
#[cfg(feature = "state-store")]
impl<'a> EncodeUnchecked<'a> {
/// Wrap any `[u8]`
pub fn from(bytes: &'a [u8]) -> Self {
EncodeUnchecked(bytes)
}
}
impl<'a> EncodeKey for EncodeUnchecked<'a> {
fn encode_as_bytes(&self) -> Cow<'a, [u8]> {
(self.0).into()
}
}
pub const ENCODE_SEPARATOR: u8 = 0xff;
pub trait EncodeKey {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
unimplemented!()
}
fn encode(&self) -> Vec<u8> {
[self.encode_as_bytes().deref(), &[ENCODE_SEPARATOR]].concat()
}
fn encode_secure(&self, table_name: &str, store_cipher: &StoreCipher) -> Vec<u8> {
let key = store_cipher.hash_key(table_name, &self.encode_as_bytes());
[key.as_slice(), &[ENCODE_SEPARATOR]].concat()
}
}
impl<T: EncodeKey + ?Sized> EncodeKey for &T {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
T::encode_as_bytes(self)
}
fn encode(&self) -> Vec<u8> {
T::encode(self)
}
fn encode_secure(&self, table_name: &str, store_cipher: &StoreCipher) -> Vec<u8> {
T::encode_secure(self, table_name, store_cipher)
}
}
impl EncodeKey for str {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_bytes().into()
}
}
impl EncodeKey for String {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for DeviceId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for EventId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for OwnedEventId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for RoomId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for OwnedRoomId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for TransactionId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for MxcUri {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
let s: &str = self.as_ref();
s.as_bytes().into()
}
}
impl EncodeKey for SecretName {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
let s: &str = self.as_ref();
s.as_bytes().into()
}
}
impl EncodeKey for ReceiptType {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
let s: &str = self.as_ref();
s.as_bytes().into()
}
}
impl EncodeKey for RoomAccountDataEventType {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.to_string().as_bytes().to_vec().into()
}
}
impl EncodeKey for UserId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for OwnedUserId {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.as_str().as_bytes().into()
}
}
impl EncodeKey for StateEventType {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.to_string().as_bytes().to_vec().into()
}
}
impl EncodeKey for GlobalAccountDataEventType {
fn encode_as_bytes(&self) -> Cow<'_, [u8]> {
self.to_string().as_bytes().to_vec().into()
}
}
impl<A, B> EncodeKey for (A, B)
where
A: EncodeKey,
B: EncodeKey,
{
fn encode(&self) -> Vec<u8> {
[
self.0.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.1.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
]
.concat()
}
fn encode_secure(&self, table_name: &str, store_cipher: &StoreCipher) -> Vec<u8> {
[
store_cipher.hash_key(table_name, &self.0.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.1.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
]
.concat()
}
}
impl<A, B, C> EncodeKey for (A, B, C)
where
A: EncodeKey,
B: EncodeKey,
C: EncodeKey,
{
fn encode(&self) -> Vec<u8> {
[
self.0.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.1.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.2.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
]
.concat()
}
fn encode_secure(&self, table_name: &str, store_cipher: &StoreCipher) -> Vec<u8> {
[
store_cipher.hash_key(table_name, &self.0.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.1.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.2.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
]
.concat()
}
}
impl<A, B, C, D> EncodeKey for (A, B, C, D)
where
A: EncodeKey,
B: EncodeKey,
C: EncodeKey,
D: EncodeKey,
{
fn encode(&self) -> Vec<u8> {
[
self.0.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.1.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.2.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.3.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
]
.concat()
}
fn encode_secure(&self, table_name: &str, store_cipher: &StoreCipher) -> Vec<u8> {
[
store_cipher.hash_key(table_name, &self.0.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.1.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.2.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.3.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
]
.concat()
}
}
impl<A, B, C, D, E> EncodeKey for (A, B, C, D, E)
where
A: EncodeKey,
B: EncodeKey,
C: EncodeKey,
D: EncodeKey,
E: EncodeKey,
{
fn encode(&self) -> Vec<u8> {
[
self.0.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.1.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.2.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.3.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
self.4.encode_as_bytes().deref(),
&[ENCODE_SEPARATOR],
]
.concat()
}
fn encode_secure(&self, table_name: &str, store_cipher: &StoreCipher) -> Vec<u8> {
[
store_cipher.hash_key(table_name, &self.0.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.1.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.2.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.3.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
store_cipher.hash_key(table_name, &self.4.encode_as_bytes()).as_slice(),
&[ENCODE_SEPARATOR],
]
.concat()
}
}

View File

@ -1,92 +0,0 @@
#[cfg(any(feature = "state-store", feature = "crypto-store"))]
use matrix_sdk_base::store::StoreConfig;
#[cfg(feature = "state-store")]
use matrix_sdk_base::store::StoreError;
#[cfg(feature = "crypto-store")]
use matrix_sdk_crypto::store::CryptoStoreError;
use sled::Error as SledError;
use thiserror::Error;
#[cfg(feature = "crypto-store")]
mod crypto_store;
mod encode_key;
#[cfg(feature = "state-store")]
mod state_store;
#[cfg(feature = "crypto-store")]
pub use crypto_store::SledCryptoStore;
#[cfg(feature = "state-store")]
pub use state_store::{MigrationConflictStrategy, SledStateStore, SledStateStoreBuilder};
/// All the errors that can occur when opening a sled store.
#[derive(Error, Debug)]
#[non_exhaustive]
pub enum OpenStoreError {
/// An error occurred with the state store implementation.
#[cfg(feature = "state-store")]
#[error(transparent)]
State(#[from] StoreError),
/// An error occurred with the crypto store implementation.
#[cfg(feature = "crypto-store")]
#[error(transparent)]
Crypto(#[from] CryptoStoreError),
/// An error occurred with sled.
#[error(transparent)]
Sled(#[from] SledError),
}
/// Create a [`StoreConfig`] with an opened [`SledStateStore`] that uses the
/// given path and passphrase.
///
/// If the `e2e-encryption` Cargo feature is enabled, a [`SledCryptoStore`] with
/// the same parameters is also opened.
///
/// [`StoreConfig`]: #StoreConfig
#[cfg(any(feature = "state-store", feature = "crypto-store"))]
pub async fn make_store_config(
path: impl AsRef<std::path::Path>,
passphrase: Option<&str>,
) -> Result<StoreConfig, OpenStoreError> {
#[cfg(all(feature = "crypto-store", feature = "state-store"))]
{
let (state_store, crypto_store) = open_stores_with_path(path, passphrase).await?;
Ok(StoreConfig::new().state_store(state_store).crypto_store(crypto_store))
}
#[cfg(all(feature = "crypto-store", not(feature = "state-store")))]
{
let crypto_store = SledCryptoStore::open(path, passphrase).await?;
Ok(StoreConfig::new().crypto_store(crypto_store))
}
#[cfg(not(feature = "crypto-store"))]
{
let mut store_builder = SledStateStore::builder().path(path.as_ref().to_path_buf());
if let Some(passphrase) = passphrase {
store_builder = store_builder.passphrase(passphrase.to_owned());
}
let state_store = store_builder.build().map_err(StoreError::backend)?;
Ok(StoreConfig::new().state_store(state_store))
}
}
/// Create a [`StateStore`] and a [`CryptoStore`] that use the same database and
/// passphrase.
#[cfg(all(feature = "state-store", feature = "crypto-store"))]
async fn open_stores_with_path(
path: impl AsRef<std::path::Path>,
passphrase: Option<&str>,
) -> Result<(SledStateStore, SledCryptoStore), OpenStoreError> {
let mut store_builder = SledStateStore::builder().path(path.as_ref().to_path_buf());
if let Some(passphrase) = passphrase {
store_builder = store_builder.passphrase(passphrase.to_owned());
}
let state_store = store_builder.build().map_err(StoreError::backend)?;
let crypto_store = state_store.open_crypto_store().await?;
Ok((state_store, crypto_store))
}

View File

@ -1,822 +0,0 @@
// Copyright 2021 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use matrix_sdk_base::{
store::{Result as StoreResult, StoreError},
RoomInfo, StateStoreDataKey,
};
use ruma::{
events::{
room::member::{StrippedRoomMemberEvent, SyncRoomMemberEvent},
StateEventType,
},
serde::Raw,
};
use serde_json::value::{RawValue as RawJsonValue, Value as JsonValue};
use sled::{transaction::TransactionError, Batch, Transactional, Tree};
use tracing::debug;
use super::{keys, Result, RoomMember, SledStateStore, SledStoreError};
use crate::encode_key::EncodeKey;
const DATABASE_VERSION: u8 = 7;
const VERSION_KEY: &str = "state-store-version";
/// Sometimes Migrations can't proceed without having to drop existing
/// data. This allows you to configure, how these cases should be handled.
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum MigrationConflictStrategy {
/// Just drop the data, we don't care that we have to sync again
Drop,
/// Raise a `SledStoreError::MigrationConflict` error with the path to the
/// DB in question. The caller then has to take care about what they want
/// to do and try again after.
Raise,
/// _Default_: The _entire_ database is backed up under
/// `$path.$timestamp.backup` (this includes the crypto store if they
/// are linked), before the state tables are dropped.
BackupAndDrop,
}
impl SledStateStore {
pub(super) fn upgrade(&mut self) -> Result<()> {
let old_version = self.db_version()?;
if old_version == 0 {
// we are fresh, let's write the current version
return self.set_db_version(DATABASE_VERSION);
}
if old_version == DATABASE_VERSION {
// current, we don't have to do anything
return Ok(());
};
debug!(old_version, new_version = DATABASE_VERSION, "Upgrading the Sled state store");
if old_version == 1 && self.store_cipher.is_some() {
// we stored some fields un-encrypted. Drop them to force re-creation
return Err(SledStoreError::MigrationConflict {
path: self.path.take().expect("Path must exist for a migration to fail"),
old_version: old_version.into(),
new_version: DATABASE_VERSION.into(),
});
}
if old_version < 3 {
self.migrate_to_v3()?;
}
if old_version < 4 {
self.migrate_to_v4()?;
}
if old_version < 5 {
self.migrate_to_v5()?;
return Ok(());
}
// Version 6 was dropped and migration is similar to v7.
if old_version < 7 {
self.migrate_to_v7()?;
return Ok(());
}
// FUTURE UPGRADE CODE GOES HERE
// can't upgrade from that version to the new one
Err(SledStoreError::MigrationConflict {
path: self.path.take().expect("Path must exist for a migration to fail"),
old_version: old_version.into(),
new_version: DATABASE_VERSION.into(),
})
}
/// Get the version of the database.
///
/// Returns `0` for a new database.
fn db_version(&self) -> Result<u8> {
Ok(self
.inner
.get(VERSION_KEY)?
.map(|v| {
let (version_bytes, _) = v.split_at(std::mem::size_of::<u8>());
u8::from_be_bytes(version_bytes.try_into().unwrap_or_default())
})
.unwrap_or_default())
}
fn set_db_version(&self, version: u8) -> Result<()> {
self.inner.insert(VERSION_KEY, version.to_be_bytes().as_ref())?;
self.inner.flush()?;
Ok(())
}
pub fn drop_v1_tables(self) -> StoreResult<()> {
for name in V1_DB_STORES {
self.inner.drop_tree(name).map_err(StoreError::backend)?;
}
self.inner.remove(VERSION_KEY).map_err(StoreError::backend)?;
Ok(())
}
fn v3_fix_tree(&self, tree: &Tree, batch: &mut Batch) -> Result<()> {
fn maybe_fix_json(raw_json: &RawJsonValue) -> Result<Option<JsonValue>> {
let json = raw_json.get();
if json.contains(r#""content":null"#) {
let mut value: JsonValue = serde_json::from_str(json)?;
if let Some(content) = value.get_mut("content") {
if matches!(content, JsonValue::Null) {
*content = JsonValue::Object(Default::default());
return Ok(Some(value));
}
}
}
Ok(None)
}
for entry in tree.iter() {
let (key, value) = entry?;
let raw_json: Box<RawJsonValue> = self.deserialize_value(&value)?;
if let Some(fixed_json) = maybe_fix_json(&raw_json)? {
batch.insert(key, self.serialize_value(&fixed_json)?);
}
}
Ok(())
}
fn migrate_to_v3(&self) -> Result<()> {
let mut room_info_batch = sled::Batch::default();
self.v3_fix_tree(&self.room_info, &mut room_info_batch)?;
let mut room_state_batch = sled::Batch::default();
self.v3_fix_tree(&self.room_state, &mut room_state_batch)?;
let ret: Result<(), TransactionError<SledStoreError>> = (&self.room_info, &self.room_state)
.transaction(|(room_info, room_state)| {
room_info.apply_batch(&room_info_batch)?;
room_state.apply_batch(&room_state_batch)?;
Ok(())
});
ret?;
self.set_db_version(3u8)
}
/// Replace the SYNC_TOKEN and SESSION trees by KV.
fn migrate_to_v4(&self) -> Result<()> {
{
let session = &self.inner.open_tree(old_keys::SESSION)?;
let mut batch = sled::Batch::default();
// Sync token
let sync_token = session.get(StateStoreDataKey::SYNC_TOKEN.encode())?;
if let Some(sync_token) = sync_token {
batch.insert(StateStoreDataKey::SYNC_TOKEN.encode(), sync_token);
}
// Filters
let key = self.encode_key(keys::SESSION, StateStoreDataKey::FILTER);
for res in session.scan_prefix(key) {
let (key, value) = res?;
batch.insert(key, value);
}
self.kv.apply_batch(batch)?;
}
// This was unused so we can just drop it.
self.inner.drop_tree(old_keys::SYNC_TOKEN)?;
self.inner.drop_tree(old_keys::SESSION)?;
self.set_db_version(4)
}
/// Move the member events with the other state events.
fn migrate_to_v5(&self) -> Result<()> {
{
let members = &self.inner.open_tree(old_keys::MEMBER)?;
let mut state_batch = sled::Batch::default();
for room_info in
self.room_info.iter().map(|r| self.deserialize_value::<RoomInfo>(&r?.1))
{
let room_info = room_info?;
let room_id = room_info.room_id();
let prefix = self.encode_key(old_keys::MEMBER, room_id);
for entry in members.scan_prefix(prefix) {
let (_, value) = entry?;
let raw_member_event =
self.deserialize_value::<Raw<SyncRoomMemberEvent>>(&value)?;
let state_key =
raw_member_event.get_field::<String>("state_key")?.unwrap_or_default();
let key = self.encode_key(
keys::ROOM_STATE,
(room_id, StateEventType::RoomMember, state_key),
);
state_batch.insert(key, value);
}
}
let stripped_members = &self.inner.open_tree(old_keys::STRIPPED_ROOM_MEMBER)?;
let mut stripped_state_batch = sled::Batch::default();
for room_info in
self.stripped_room_infos.iter().map(|r| self.deserialize_value::<RoomInfo>(&r?.1))
{
let room_info = room_info?;
let room_id = room_info.room_id();
let prefix = self.encode_key(old_keys::STRIPPED_ROOM_MEMBER, room_id);
for entry in stripped_members.scan_prefix(prefix) {
let (_, value) = entry?;
let raw_member_event =
self.deserialize_value::<Raw<StrippedRoomMemberEvent>>(&value)?;
let state_key =
raw_member_event.get_field::<String>("state_key")?.unwrap_or_default();
let key = self.encode_key(
keys::STRIPPED_ROOM_STATE,
(room_id, StateEventType::RoomMember, state_key),
);
stripped_state_batch.insert(key, value);
}
}
let ret: Result<(), TransactionError<SledStoreError>> =
(&self.room_state, &self.stripped_room_state).transaction(
|(room_state, stripped_room_state)| {
room_state.apply_batch(&state_batch)?;
stripped_room_state.apply_batch(&stripped_state_batch)?;
Ok(())
},
);
ret?;
}
self.inner.drop_tree(old_keys::MEMBER)?;
self.inner.drop_tree(old_keys::STRIPPED_ROOM_MEMBER)?;
self.set_db_version(5)
}
/// Remove the old user IDs stores and populate the new ones.
fn migrate_to_v7(&self) -> Result<()> {
{
// Reset v6 stores.
self.user_ids.clear()?;
self.stripped_user_ids.clear()?;
// We only have joined and invited user IDs in the old stores, so instead we
// use the room member events to populate the new stores.
let state = &self.inner.open_tree(keys::ROOM_STATE)?;
let mut user_ids_batch = sled::Batch::default();
for room_info in
self.room_info.iter().map(|r| self.deserialize_value::<RoomInfo>(&r?.1))
{
let room_info = room_info?;
let room_id = room_info.room_id();
let prefix =
self.encode_key(keys::ROOM_STATE, (room_id, StateEventType::RoomMember));
for entry in state.scan_prefix(prefix) {
let (_, value) = entry?;
let member_event = self
.deserialize_value::<Raw<SyncRoomMemberEvent>>(&value)?
.deserialize()?;
let key = self.encode_key(keys::USER_ID, (room_id, member_event.state_key()));
let value = self.serialize_value(&RoomMember::from(&member_event))?;
user_ids_batch.insert(key, value);
}
}
let stripped_state = &self.inner.open_tree(keys::STRIPPED_ROOM_STATE)?;
let mut stripped_user_ids_batch = sled::Batch::default();
for room_info in
self.stripped_room_infos.iter().map(|r| self.deserialize_value::<RoomInfo>(&r?.1))
{
let room_info = room_info?;
let room_id = room_info.room_id();
let prefix = self
.encode_key(keys::STRIPPED_ROOM_STATE, (room_id, StateEventType::RoomMember));
for entry in stripped_state.scan_prefix(prefix) {
let (_, value) = entry?;
let stripped_member_event = self
.deserialize_value::<Raw<StrippedRoomMemberEvent>>(&value)?
.deserialize()?;
let key = self.encode_key(
keys::STRIPPED_USER_ID,
(room_id, &stripped_member_event.state_key),
);
let value = self.serialize_value(&RoomMember::from(&stripped_member_event))?;
stripped_user_ids_batch.insert(key, value);
}
}
let ret: Result<(), TransactionError<SledStoreError>> =
(&self.user_ids, &self.stripped_user_ids).transaction(
|(user_ids, stripped_user_ids)| {
user_ids.apply_batch(&user_ids_batch)?;
stripped_user_ids.apply_batch(&stripped_user_ids_batch)?;
Ok(())
},
);
ret?;
}
self.inner.drop_tree(old_keys::JOINED_USER_ID)?;
self.inner.drop_tree(old_keys::INVITED_USER_ID)?;
self.inner.drop_tree(old_keys::STRIPPED_JOINED_USER_ID)?;
self.inner.drop_tree(old_keys::STRIPPED_INVITED_USER_ID)?;
self.set_db_version(7)
}
}
mod old_keys {
/// Old stores.
pub const SYNC_TOKEN: &str = "sync_token";
pub const SESSION: &str = "session";
pub const MEMBER: &str = "member";
pub const STRIPPED_ROOM_MEMBER: &str = "stripped-room-member";
pub const INVITED_USER_ID: &str = "invited-user-id";
pub const JOINED_USER_ID: &str = "joined-user-id";
pub const STRIPPED_INVITED_USER_ID: &str = "stripped-invited-user-id";
pub const STRIPPED_JOINED_USER_ID: &str = "stripped-joined-user-id";
}
pub const V1_DB_STORES: &[&str] = &[
keys::ACCOUNT_DATA,
old_keys::SYNC_TOKEN,
keys::DISPLAY_NAME,
old_keys::INVITED_USER_ID,
old_keys::JOINED_USER_ID,
keys::MEDIA,
old_keys::MEMBER,
keys::PRESENCE,
keys::PROFILE,
keys::ROOM_ACCOUNT_DATA,
keys::ROOM_EVENT_RECEIPT,
keys::ROOM_INFO,
keys::ROOM_STATE,
keys::ROOM_USER_RECEIPT,
keys::ROOM,
old_keys::SESSION,
old_keys::STRIPPED_INVITED_USER_ID,
old_keys::STRIPPED_JOINED_USER_ID,
keys::STRIPPED_ROOM_INFO,
old_keys::STRIPPED_ROOM_MEMBER,
keys::STRIPPED_ROOM_STATE,
keys::CUSTOM,
];
#[cfg(test)]
mod test {
use assert_matches::assert_matches;
use matrix_sdk_base::{
deserialized_responses::RawMemberEvent, RoomInfo, RoomMemberships, RoomState,
StateStoreDataKey,
};
use matrix_sdk_test::{async_test, test_json};
use ruma::{
events::{
room::member::{StrippedRoomMemberEvent, SyncRoomMemberEvent},
AnySyncStateEvent, StateEventType,
},
room_id,
serde::Raw,
user_id,
};
use serde_json::json;
use tempfile::TempDir;
use super::{old_keys, MigrationConflictStrategy};
use crate::{
encode_key::EncodeKey,
state_store::{keys, Result, SledStateStore, SledStoreError},
};
#[async_test]
pub async fn migrating_v1_to_2_plain() -> Result<()> {
let folder = TempDir::new()?;
let store = SledStateStore::builder().path(folder.path().to_path_buf()).build()?;
store.set_db_version(1u8)?;
drop(store);
// this transparently migrates to the latest version
let _store = SledStateStore::builder().path(folder.path().to_path_buf()).build()?;
Ok(())
}
#[async_test]
pub async fn migrating_v1_to_2_with_pw_backed_up() -> Result<()> {
let folder = TempDir::new()?;
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("something".to_owned())
.build()?;
store.set_db_version(1u8)?;
drop(store);
// this transparently creates a backup and a fresh db
let _store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("something".to_owned())
.build()?;
assert_eq!(std::fs::read_dir(folder.path())?.count(), 2);
Ok(())
}
#[async_test]
pub async fn migrating_v1_to_2_with_pw_drop() -> Result<()> {
let folder = TempDir::new()?;
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("other thing".to_owned())
.build()?;
store.set_db_version(1u8)?;
drop(store);
// this transparently creates a backup and a fresh db
let _store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("other thing".to_owned())
.migration_conflict_strategy(MigrationConflictStrategy::Drop)
.build()?;
assert_eq!(std::fs::read_dir(folder.path())?.count(), 1);
Ok(())
}
#[async_test]
pub async fn migrating_v1_to_2_with_pw_raises() -> Result<()> {
let folder = TempDir::new()?;
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()?;
store.set_db_version(1u8)?;
drop(store);
// this transparently creates a backup and a fresh db
let res = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.migration_conflict_strategy(MigrationConflictStrategy::Raise)
.build();
if let Err(SledStoreError::MigrationConflict { .. }) = res {
// all good
} else {
panic!("Didn't raise the expected error: {res:?}");
}
assert_eq!(std::fs::read_dir(folder.path())?.count(), 1);
Ok(())
}
#[async_test]
pub async fn migrating_v2_to_v3() {
// An event that fails to deserialize.
let wrong_redacted_state_event = json!({
"content": null,
"event_id": "$wrongevent",
"origin_server_ts": 1673887516047_u64,
"sender": "@example:localhost",
"state_key": "",
"type": "m.room.topic",
"unsigned": {
"redacted_because": {
"type": "m.room.redaction",
"sender": "@example:localhost",
"content": {},
"redacts": "$wrongevent",
"origin_server_ts": 1673893816047_u64,
"unsigned": {},
"event_id": "$redactionevent",
},
},
});
serde_json::from_value::<AnySyncStateEvent>(wrong_redacted_state_event.clone())
.unwrap_err();
let room_id = room_id!("!some_room:localhost");
let folder = TempDir::new().unwrap();
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
store
.room_state
.insert(
store.encode_key(keys::ROOM_STATE, (room_id, StateEventType::RoomTopic, "")),
store.serialize_value(&wrong_redacted_state_event).unwrap(),
)
.unwrap();
store.set_db_version(2u8).unwrap();
drop(store);
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
let event =
store.get_state_event(room_id, StateEventType::RoomTopic, "").await.unwrap().unwrap();
event.deserialize().unwrap();
}
#[async_test]
pub async fn migrating_v3_to_v4() {
let sync_token = "a_very_unique_string";
let filter_1 = "filter_1";
let filter_1_id = "filter_1_id";
let filter_2 = "filter_2";
let filter_2_id = "filter_2_id";
let folder = TempDir::new().unwrap();
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
let session = store.inner.open_tree(old_keys::SESSION).unwrap();
let mut batch = sled::Batch::default();
batch.insert(
StateStoreDataKey::SYNC_TOKEN.encode(),
store.serialize_value(&sync_token).unwrap(),
);
batch.insert(
store.encode_key(keys::SESSION, (StateStoreDataKey::FILTER, filter_1)),
store.serialize_value(&filter_1_id).unwrap(),
);
batch.insert(
store.encode_key(keys::SESSION, (StateStoreDataKey::FILTER, filter_2)),
store.serialize_value(&filter_2_id).unwrap(),
);
session.apply_batch(batch).unwrap();
store.set_db_version(3).unwrap();
drop(session);
drop(store);
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
let stored_sync_token = store
.get_kv_data(StateStoreDataKey::SyncToken)
.await
.unwrap()
.unwrap()
.into_sync_token()
.unwrap();
assert_eq!(stored_sync_token, sync_token);
let stored_filter_1_id = store
.get_kv_data(StateStoreDataKey::Filter(filter_1))
.await
.unwrap()
.unwrap()
.into_filter()
.unwrap();
assert_eq!(stored_filter_1_id, filter_1_id);
let stored_filter_2_id = store
.get_kv_data(StateStoreDataKey::Filter(filter_2))
.await
.unwrap()
.unwrap()
.into_filter()
.unwrap();
assert_eq!(stored_filter_2_id, filter_2_id);
}
#[async_test]
pub async fn migrating_v4_to_v5() {
let room_id = room_id!("!room:localhost");
let member_event =
Raw::new(&*test_json::MEMBER_INVITE).unwrap().cast::<SyncRoomMemberEvent>();
let user_id = user_id!("@invited:localhost");
let stripped_room_id = room_id!("!stripped_room:localhost");
let stripped_member_event =
Raw::new(&*test_json::MEMBER_STRIPPED).unwrap().cast::<StrippedRoomMemberEvent>();
let stripped_user_id = user_id!("@example:localhost");
let folder = TempDir::new().unwrap();
{
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
let members = store.inner.open_tree(old_keys::MEMBER).unwrap();
members
.insert(
store.encode_key(old_keys::MEMBER, (room_id, user_id)),
store.serialize_value(&member_event).unwrap(),
)
.unwrap();
let room_infos = store.inner.open_tree(keys::ROOM_INFO).unwrap();
let room_info = RoomInfo::new(room_id, RoomState::Joined);
room_infos
.insert(
store.encode_key(keys::ROOM_INFO, room_id),
store.serialize_value(&room_info).unwrap(),
)
.unwrap();
let stripped_members = store.inner.open_tree(old_keys::STRIPPED_ROOM_MEMBER).unwrap();
stripped_members
.insert(
store.encode_key(
old_keys::STRIPPED_ROOM_MEMBER,
(stripped_room_id, stripped_user_id),
),
store.serialize_value(&stripped_member_event).unwrap(),
)
.unwrap();
let stripped_room_infos = store.inner.open_tree(keys::STRIPPED_ROOM_INFO).unwrap();
let stripped_room_info = RoomInfo::new(stripped_room_id, RoomState::Invited);
stripped_room_infos
.insert(
store.encode_key(keys::STRIPPED_ROOM_INFO, stripped_room_id),
store.serialize_value(&stripped_room_info).unwrap(),
)
.unwrap();
store.set_db_version(4).unwrap();
}
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
let stored_member_event = assert_matches!(
store.get_member_event(room_id, user_id).await,
Ok(Some(RawMemberEvent::Sync(e))) => e
);
assert_eq!(stored_member_event.json().get(), member_event.json().get());
let stored_stripped_member_event = assert_matches!(
store.get_member_event(stripped_room_id, stripped_user_id).await,
Ok(Some(RawMemberEvent::Stripped(e))) => e
);
assert_eq!(stored_stripped_member_event.json().get(), stripped_member_event.json().get());
}
#[async_test]
pub async fn migrating_v5_to_v7() {
let room_id = room_id!("!room:localhost");
let invite_member_event =
Raw::new(&*test_json::MEMBER_INVITE).unwrap().cast::<SyncRoomMemberEvent>();
let invite_user_id = user_id!("@invited:localhost");
let ban_member_event =
Raw::new(&*test_json::MEMBER_BAN).unwrap().cast::<SyncRoomMemberEvent>();
let ban_user_id = user_id!("@banned:localhost");
let stripped_room_id = room_id!("!stripped_room:localhost");
let stripped_member_event =
Raw::new(&*test_json::MEMBER_STRIPPED).unwrap().cast::<StrippedRoomMemberEvent>();
let stripped_user_id = user_id!("@example:localhost");
let folder = TempDir::new().unwrap();
{
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
let state = store.inner.open_tree(keys::ROOM_STATE).unwrap();
state
.insert(
store.encode_key(
keys::ROOM_STATE,
(room_id, StateEventType::RoomMember, invite_user_id),
),
store.serialize_value(&invite_member_event).unwrap(),
)
.unwrap();
state
.insert(
store.encode_key(
keys::ROOM_STATE,
(room_id, StateEventType::RoomMember, ban_user_id),
),
store.serialize_value(&ban_member_event).unwrap(),
)
.unwrap();
let room_infos = store.inner.open_tree(keys::ROOM_INFO).unwrap();
let room_info = RoomInfo::new(room_id, RoomState::Joined);
room_infos
.insert(
store.encode_key(keys::ROOM_INFO, room_id),
store.serialize_value(&room_info).unwrap(),
)
.unwrap();
let stripped_state = store.inner.open_tree(keys::STRIPPED_ROOM_STATE).unwrap();
stripped_state
.insert(
store.encode_key(
keys::STRIPPED_ROOM_STATE,
(stripped_room_id, StateEventType::RoomMember, stripped_user_id),
),
store.serialize_value(&stripped_member_event).unwrap(),
)
.unwrap();
let stripped_room_infos = store.inner.open_tree(keys::STRIPPED_ROOM_INFO).unwrap();
let stripped_room_info = RoomInfo::new(stripped_room_id, RoomState::Invited);
stripped_room_infos
.insert(
store.encode_key(keys::STRIPPED_ROOM_INFO, stripped_room_id),
store.serialize_value(&stripped_room_info).unwrap(),
)
.unwrap();
store.set_db_version(5).unwrap();
}
let store = SledStateStore::builder()
.path(folder.path().to_path_buf())
.passphrase("secret".to_owned())
.build()
.unwrap();
assert_eq!(
store.get_user_ids(room_id, RoomMemberships::JOIN, false).await.unwrap().len(),
0
);
assert_eq!(
store.get_user_ids(room_id, RoomMemberships::INVITE, false).await.unwrap().as_slice(),
[invite_user_id.to_owned()]
);
let user_ids = store.get_user_ids(room_id, RoomMemberships::empty(), false).await.unwrap();
assert_eq!(user_ids.len(), 2);
assert!(user_ids.contains(&invite_user_id.to_owned()));
assert!(user_ids.contains(&ban_user_id.to_owned()));
assert_eq!(
store
.get_user_ids(stripped_room_id, RoomMemberships::JOIN, true)
.await
.unwrap()
.as_slice(),
[stripped_user_id.to_owned()]
);
assert_eq!(
store
.get_user_ids(stripped_room_id, RoomMemberships::INVITE, true)
.await
.unwrap()
.len(),
0
);
assert_eq!(
store
.get_user_ids(stripped_room_id, RoomMemberships::empty(), true)
.await
.unwrap()
.as_slice(),
[stripped_user_id.to_owned()]
);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,7 @@ use std::{
use async_trait::async_trait;
use deadpool_sqlite::{Object as SqliteConn, Pool as SqlitePool, Runtime};
use matrix_sdk_base::{
deserialized_responses::RawMemberEvent,
deserialized_responses::{RawAnySyncOrStrippedState, RawMemberEvent},
media::{MediaRequest, UniqueKey},
RoomInfo, RoomMemberships, RoomState, StateChanges, StateStore, StateStoreDataKey,
StateStoreDataValue,
@ -547,24 +547,6 @@ trait SqliteObjectStateStoreExt: SqliteObjectExt {
.await?)
}
async fn get_state_event(
&self,
room_id: Key,
event_type: Key,
state_key: Key,
stripped: bool,
) -> Result<Option<Vec<u8>>> {
Ok(self
.query_row(
"SELECT data FROM state_event
WHERE room_id = ? AND event_type = ? AND state_key = ? AND stripped = ?",
(room_id, event_type, state_key, stripped),
|row| row.get(0),
)
.await
.optional()?)
}
async fn get_maybe_stripped_state_event(
&self,
room_id: Key,
@ -582,12 +564,20 @@ trait SqliteObjectStateStoreExt: SqliteObjectExt {
.optional()?)
}
async fn get_state_events(&self, room_id: Key, event_type: Key) -> Result<Vec<Vec<u8>>> {
async fn get_maybe_stripped_state_events(
&self,
room_id: Key,
event_type: Key,
) -> Result<Vec<(bool, Vec<u8>)>> {
Ok(self
.prepare(
"SELECT data FROM state_event
WHERE room_id = ? AND event_type = ? AND stripped = FALSE",
|mut stmt| stmt.query((room_id, event_type))?.mapped(|row| row.get(0)).collect(),
"SELECT stripped, data FROM state_event
WHERE room_id = ? AND event_type = ?",
|mut stmt| {
stmt.query((room_id, event_type))?
.mapped(|row| Ok((row.get(0)?, row.get(1)?)))
.collect()
},
)
.await?)
}
@ -1080,15 +1070,23 @@ impl StateStore for SqliteStateStore {
room_id: &RoomId,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>> {
) -> Result<Option<RawAnySyncOrStrippedState>> {
let room_id = self.encode_key(keys::STATE_EVENT, room_id);
let event_type = self.encode_key(keys::STATE_EVENT, event_type.to_string());
let state_key = self.encode_key(keys::STATE_EVENT, state_key);
self.acquire()
.await?
.get_state_event(room_id, event_type, state_key, false)
.get_maybe_stripped_state_event(room_id, event_type, state_key)
.await?
.map(|data| self.deserialize_json(&data))
.map(|(stripped, data)| {
let ev = if stripped {
RawAnySyncOrStrippedState::Stripped(self.deserialize_json(&data)?)
} else {
RawAnySyncOrStrippedState::Sync(self.deserialize_json(&data)?)
};
Ok(ev)
})
.transpose()
}
@ -1096,15 +1094,23 @@ impl StateStore for SqliteStateStore {
&self,
room_id: &RoomId,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>> {
) -> Result<Vec<RawAnySyncOrStrippedState>> {
let room_id = self.encode_key(keys::STATE_EVENT, room_id);
let event_type = self.encode_key(keys::STATE_EVENT, event_type.to_string());
self.acquire()
.await?
.get_state_events(room_id, event_type)
.get_maybe_stripped_state_events(room_id, event_type)
.await?
.iter()
.map(|data| self.deserialize_json(data))
.into_iter()
.map(|(stripped, data)| {
let ev = if stripped {
RawAnySyncOrStrippedState::Stripped(self.deserialize_json(&data)?)
} else {
RawAnySyncOrStrippedState::Sync(self.deserialize_json(&data)?)
};
Ok(ev)
})
.collect()
}

View File

@ -45,6 +45,7 @@ assert-json-diff = "2.0"
assert_matches = { workspace = true }
ctor = { workspace = true }
matrix-sdk-test = { version = "0.6.0", path = "../../testing/matrix-sdk-test" }
stream_assert = "0.1.0"
tracing-subscriber = { version = "0.3.11", features = ["env-filter"] }
wiremock = "0.5.13"

View File

@ -96,8 +96,10 @@ impl RoomList {
/// already pre-configured.
pub async fn new(client: Client) -> Result<Self, Error> {
let sliding_sync = client
.sliding_sync()
.storage_key(Some("matrix-sdk-ui-roomlist".to_string()))
.sliding_sync("room-list")
.map_err(Error::SlidingSync)?
.enable_caching()
.map_err(Error::SlidingSync)?
.add_cached_list(
SlidingSyncList::builder(ALL_ROOMS_LIST_NAME)
.sync_mode(SlidingSyncMode::new_selective().add_range(0..=19))

View File

@ -16,19 +16,15 @@ use std::sync::Arc;
use imbl::Vector;
use matrix_sdk::{
deserialized_responses::{EncryptionInfo, SyncTimelineEvent},
room,
deserialized_responses::SyncTimelineEvent, executor::spawn, room, sync::RoomUpdate,
};
use ruma::{
events::receipt::{ReceiptThread, ReceiptType, SyncReceiptEvent},
push::Action,
};
use tokio::sync::Mutex;
use tracing::error;
use ruma::events::receipt::{ReceiptThread, ReceiptType};
use tokio::sync::{broadcast, Mutex};
use tracing::{error, warn};
#[cfg(feature = "e2e-encryption")]
use super::to_device::{handle_forwarded_room_key_event, handle_room_key_event};
use super::{inner::TimelineInner, Timeline, TimelineEventHandlerHandles};
use super::{inner::TimelineInner, Timeline, TimelineDropHandle};
/// Builder that allows creating and configuring various parts of a
/// [`Timeline`].
@ -119,17 +115,40 @@ impl TimelineBuilder {
if has_events {
inner.add_initial_events(events).await;
}
if track_read_marker_and_receipts {
inner.load_fully_read_event().await;
}
let inner = Arc::new(inner);
let room = inner.room();
let client = room.client();
let timeline_event_handle = room.add_event_handler({
let mut room_update_rx = room.subscribe_to_updates();
let room_update_join_handle = spawn({
let inner = inner.clone();
move |event, encryption_info: Option<EncryptionInfo>, push_actions: Vec<Action>| {
let inner = inner.clone();
async move {
inner.handle_live_event(event, encryption_info, push_actions).await;
async move {
loop {
let update = match room_update_rx.recv().await {
Ok(up) => up,
Err(broadcast::error::RecvError::Closed) => break,
Err(broadcast::error::RecvError::Lagged(_)) => {
warn!("Lagged behind sync responses, resetting timeline");
inner.clear().await;
continue;
}
};
match update {
RoomUpdate::Left { updates, .. } => {
inner.handle_sync_timeline(updates.timeline).await;
}
RoomUpdate::Joined { updates, .. } => {
inner.handle_joined_room_update(updates).await;
}
RoomUpdate::Invited { .. } => {
warn!("Room is in invited state, can't build or update its timeline");
}
}
}
}
});
@ -146,45 +165,22 @@ impl TimelineBuilder {
room.room_id().to_owned(),
));
let mut handles = vec![
timeline_event_handle,
let handles = vec![
#[cfg(feature = "e2e-encryption")]
room_key_handle,
#[cfg(feature = "e2e-encryption")]
forwarded_room_key_handle,
];
if track_read_marker_and_receipts {
inner.load_fully_read_event().await;
let fully_read_handle = room.add_event_handler({
let inner = inner.clone();
move |event| {
let inner = inner.clone();
async move {
inner.handle_fully_read(event).await;
}
}
});
handles.push(fully_read_handle);
let read_receipts_handle = room.add_event_handler({
let inner = inner.clone();
move |read_receipts: SyncReceiptEvent| {
let inner = inner.clone();
async move {
inner.handle_read_receipts(read_receipts.content).await;
}
}
});
handles.push(read_receipts_handle);
}
let timeline = Timeline {
inner,
start_token: Mutex::new(prev_token),
_end_token: Mutex::new(None),
event_handler_handles: Arc::new(TimelineEventHandlerHandles { client, handles }),
drop_handle: Arc::new(TimelineDropHandle {
client,
event_handler_handles: handles,
room_update_join_handle,
}),
};
#[cfg(feature = "e2e-encryption")]

View File

@ -23,18 +23,23 @@ use indexmap::{IndexMap, IndexSet};
#[cfg(all(test, feature = "e2e-encryption"))]
use matrix_sdk::crypto::OlmMachine;
use matrix_sdk::{
deserialized_responses::{EncryptionInfo, SyncTimelineEvent, TimelineEvent},
room, Error, Result,
deserialized_responses::{SyncTimelineEvent, TimelineEvent},
room,
sync::{JoinedRoom, Timeline},
Error, Result,
};
#[cfg(test)]
use ruma::events::receipt::ReceiptEventContent;
#[cfg(all(test, feature = "e2e-encryption"))]
use ruma::RoomId;
use ruma::{
api::client::receipt::create_receipt::v3::ReceiptType as SendReceiptType,
events::{
fully_read::FullyReadEvent,
receipt::{Receipt, ReceiptEventContent, ReceiptThread, ReceiptType},
receipt::{Receipt, ReceiptThread, ReceiptType},
relation::Annotation,
AnyMessageLikeEventContent, AnySyncTimelineEvent,
AnyMessageLikeEventContent, AnyRoomAccountDataEvent, AnySyncEphemeralRoomEvent,
AnySyncTimelineEvent,
},
push::{Action, PushConditionRoomCtx, Ruleset},
serde::Raw,
@ -52,10 +57,6 @@ use super::{
update_read_marker, Flow, HandleEventResult, TimelineEventHandler, TimelineEventKind,
TimelineEventMetadata, TimelineItemPosition,
},
read_receipts::{
handle_explicit_read_receipts, latest_user_read_receipt, load_read_receipts_for_event,
user_receipt,
},
rfind_event_by_id, rfind_event_item, EventSendState, EventTimelineItem, InReplyToDetails,
Message, Profile, RelativePosition, RepliedToEvent, TimelineDetails, TimelineItem,
TimelineItemContent,
@ -148,50 +149,76 @@ impl<P: RoomDataProvider> TimelineInner<P> {
debug!("Adding {} initial events", events.len());
let state = self.state.get_mut();
for event in events {
handle_remote_event(
event.event,
event.encryption_info,
event.push_actions,
TimelineItemPosition::End { from_cache: true },
state,
state
.handle_remote_event(
event,
TimelineItemPosition::End { from_cache: true },
&self.room_data_provider,
self.track_read_receipts,
)
.await;
}
}
pub(super) async fn clear(&self) {
trace!("Clearing timeline");
self.state.lock().await.clear();
}
pub(super) async fn handle_joined_room_update(&self, update: JoinedRoom) {
let mut state = self.state.lock().await;
state
.handle_sync_timeline(
update.timeline,
&self.room_data_provider,
self.track_read_receipts,
)
.await;
for raw_event in update.account_data {
match raw_event.deserialize() {
Ok(AnyRoomAccountDataEvent::FullyRead(ev)) => {
state.set_fully_read_event(ev.content.event_id)
}
Ok(_) => {}
Err(e) => {
warn!("Failed to deserialize account data: {e}");
}
}
}
if !update.ephemeral.is_empty() {
let own_user_id = self.room_data_provider.own_user_id();
for raw_event in update.ephemeral {
match raw_event.deserialize() {
Ok(AnySyncEphemeralRoomEvent::Receipt(ev)) => {
state.handle_explicit_read_receipts(ev.content, own_user_id);
}
Ok(_) => {}
Err(e) => {
warn!("Failed to deserialize ephemeral event: {e}");
}
}
}
}
}
#[cfg(feature = "experimental-sliding-sync")]
pub(super) async fn clear(&self) {
trace!("Clearing timeline");
let mut state = self.state.lock().await;
state.items.clear();
state.reaction_map.clear();
state.fully_read_event = None;
state.event_should_update_fully_read_marker = false;
pub(super) async fn handle_sync_timeline(&self, timeline: Timeline) {
self.state
.lock()
.await
.handle_sync_timeline(timeline, &self.room_data_provider, self.track_read_receipts)
.await;
}
#[instrument(skip_all)]
pub(super) async fn handle_live_event(
&self,
raw: Raw<AnySyncTimelineEvent>,
encryption_info: Option<EncryptionInfo>,
push_actions: Vec<Action>,
) {
let mut state = self.state.lock().await;
handle_remote_event(
raw,
encryption_info,
push_actions,
TimelineItemPosition::End { from_cache: false },
&mut state,
&self.room_data_provider,
self.track_read_receipts,
)
.await;
#[cfg(test)]
pub(super) async fn handle_live_event(&self, event: SyncTimelineEvent) {
self.state
.lock()
.await
.handle_live_event(event, &self.room_data_provider, self.track_read_receipts)
.await;
}
/// Handle the creation of a new local event.
@ -254,6 +281,32 @@ impl<P: RoomDataProvider> TimelineInner<P> {
let Some(local_item) = item.as_local() else {
// Remote echo already received. This is very unlikely.
trace!("Remote echo received before send-event response");
let local_echo = rfind_event_item(&state.items, |it| {
it.transaction_id() == Some(txn_id)
});
// If there's both the remote echo and a local echo, that means the
// remote echo was received before the response *and* contained no
// transaction ID (and thus duplicated the local echo).
if let Some((idx, _)) = local_echo {
warn!("Message echo got duplicated, removing the local one");
state.items.remove(idx);
if idx == 0 {
error!("Inconsistent state: Local echo was not preceded by day divider");
return;
}
if idx == state.items.len() {
error!("Inconsistent state: Echo was duplicated but local echo was last");
return;
}
if state.items[idx - 1].is_day_divider() && state.items[idx].is_day_divider() {
// If local echo was the only event from that day, remove day divider.
state.items.remove(idx - 1);
}
}
return;
};
@ -276,17 +329,16 @@ impl<P: RoomDataProvider> TimelineInner<P> {
&self,
event: TimelineEvent,
) -> HandleEventResult {
let mut state = self.state.lock().await;
handle_remote_event(
event.event.cast(),
event.encryption_info,
event.push_actions,
TimelineItemPosition::Start,
&mut state,
&self.room_data_provider,
self.track_read_receipts,
)
.await
self.state
.lock()
.await
.handle_remote_event(
event.into(),
TimelineItemPosition::Start,
&self.room_data_provider,
self.track_read_receipts,
)
.await
}
#[instrument(skip_all)]
@ -317,36 +369,8 @@ impl<P: RoomDataProvider> TimelineInner<P> {
}
}
#[instrument(skip_all)]
pub(super) async fn handle_fully_read(&self, raw: Raw<FullyReadEvent>) {
let fully_read_event_id = match raw.deserialize() {
Ok(ev) => ev.content.event_id,
Err(e) => {
error!("Failed to deserialize fully-read account data: {e}");
return;
}
};
self.set_fully_read_event(fully_read_event_id).await;
}
#[instrument(skip_all)]
pub(super) async fn set_fully_read_event(&self, fully_read_event_id: OwnedEventId) {
let mut state = self.state.lock().await;
// A similar event has been handled already. We can ignore it.
if state.fully_read_event.as_ref().map_or(false, |id| *id == fully_read_event_id) {
return;
}
state.fully_read_event = Some(fully_read_event_id);
let state = &mut *state;
update_read_marker(
&mut state.items,
state.fully_read_event.as_deref(),
&mut state.event_should_update_fully_read_marker,
);
self.state.lock().await.set_fully_read_event(fully_read_event_id)
}
#[cfg(feature = "e2e-encryption")]
@ -438,28 +462,26 @@ impl<P: RoomDataProvider> TimelineInner<P> {
// another one.
let mut idx = 0;
while let Some(item) = state.items.get(idx) {
let Some(event) = retry_one(item.clone()).await else {
let Some(mut event) = retry_one(item.clone()).await else {
idx += 1;
continue;
};
let push_actions = push_rules_context
event.push_actions = push_rules_context
.as_ref()
.map(|(push_rules, push_context)| {
push_rules.get_actions(&event.event, push_context).to_owned()
})
.unwrap_or_default();
let result = handle_remote_event(
event.event.cast(),
event.encryption_info,
push_actions,
TimelineItemPosition::Update(idx),
&mut state,
&self.room_data_provider,
self.track_read_receipts,
)
.await;
let result = state
.handle_remote_event(
event.into(),
TimelineItemPosition::Update(idx),
&self.room_data_provider,
self.track_read_receipts,
)
.await;
// If the UTD was removed rather than updated, run the loop again
// with the same index.
@ -525,11 +547,10 @@ impl<P: RoomDataProvider> TimelineInner<P> {
}
}
#[cfg(test)]
pub(super) async fn handle_read_receipts(&self, receipt_event_content: ReceiptEventContent) {
let mut state = self.state.lock().await;
let own_user_id = self.room_data_provider.own_user_id();
handle_explicit_read_receipts(receipt_event_content, own_user_id, &mut state)
self.state.lock().await.handle_explicit_read_receipts(receipt_event_content, own_user_id);
}
}
@ -637,7 +658,7 @@ impl TimelineInner {
let state = self.state.lock().await;
let room = self.room();
latest_user_read_receipt(user_id, &state, room).await
state.latest_user_read_receipt(user_id, room).await
}
/// Check whether the given receipt should be sent.
@ -661,7 +682,7 @@ impl TimelineInner {
match receipt_type {
SendReceiptType::Read => {
if let Some((old_pub_read, _)) =
user_receipt(own_user_id, ReceiptType::Read, &state, room).await
state.user_receipt(own_user_id, ReceiptType::Read, room).await
{
if let Some(relative_pos) =
compare_events_positions(&old_pub_read, event_id, &state.items)
@ -674,7 +695,7 @@ impl TimelineInner {
// doesn't make sense to have a private read receipt behind a public one.
SendReceiptType::ReadPrivate => {
if let Some((old_priv_read, _)) =
latest_user_read_receipt(own_user_id, &state, room).await
state.latest_user_read_receipt(own_user_id, room).await
{
if let Some(relative_pos) =
compare_events_positions(&old_priv_read, event_id, &state.items)
@ -702,6 +723,127 @@ impl TimelineInner {
}
}
impl TimelineInnerState {
#[instrument(skip_all)]
pub(super) async fn handle_sync_timeline<P: RoomDataProvider>(
&mut self,
timeline: Timeline,
room_data_provider: &P,
track_read_receipts: bool,
) {
if timeline.limited {
debug!("Got limited sync response, resetting timeline");
self.clear();
}
for event in timeline.events {
self.handle_live_event(event, room_data_provider, track_read_receipts).await;
}
}
/// Handle a live remote event.
///
/// Shorthand for `handle_remote_event` with a `position` of
/// `TimelineItemPosition::End { from_cache: false }`.
async fn handle_live_event<P: RoomDataProvider>(
&mut self,
event: SyncTimelineEvent,
room_data_provider: &P,
track_read_receipts: bool,
) -> HandleEventResult {
self.handle_remote_event(
event,
TimelineItemPosition::End { from_cache: false },
room_data_provider,
track_read_receipts,
)
.await
}
/// Handle a remote event.
///
/// Returns the number of timeline updates that were made.
async fn handle_remote_event<P: RoomDataProvider>(
&mut self,
event: SyncTimelineEvent,
position: TimelineItemPosition,
room_data_provider: &P,
track_read_receipts: bool,
) -> HandleEventResult {
let raw = event.event;
let (event_id, sender, timestamp, txn_id, event_kind) = match raw.deserialize() {
Ok(event) => (
event.event_id().to_owned(),
event.sender().to_owned(),
event.origin_server_ts(),
event.transaction_id().map(ToOwned::to_owned),
event.into(),
),
Err(e) => match raw.deserialize_as::<SyncTimelineEventWithoutContent>() {
Ok(event) => (
event.event_id().to_owned(),
event.sender().to_owned(),
event.origin_server_ts(),
event.transaction_id().map(ToOwned::to_owned),
TimelineEventKind::failed_to_parse(event, e),
),
Err(e) => {
let event_type: Option<String> = raw.get_field("type").ok().flatten();
let event_id: Option<String> = raw.get_field("event_id").ok().flatten();
warn!(event_type, event_id, "Failed to deserialize timeline event: {e}");
return HandleEventResult::default();
}
},
};
let is_own_event = sender == room_data_provider.own_user_id();
let encryption_info = event.encryption_info;
let sender_profile = room_data_provider.profile(&sender).await;
let read_receipts = if track_read_receipts {
self.load_read_receipts_for_event(&event_id, room_data_provider).await
} else {
Default::default()
};
let is_highlighted = event.push_actions.iter().any(Action::is_highlight);
let event_meta = TimelineEventMetadata {
sender,
sender_profile,
timestamp,
is_own_event,
encryption_info,
read_receipts,
is_highlighted,
};
let flow = Flow::Remote { event_id, raw_event: raw, txn_id, position };
TimelineEventHandler::new(event_meta, flow, self, track_read_receipts)
.handle_event(event_kind)
}
pub(super) fn clear(&mut self) {
self.items.clear();
self.reaction_map.clear();
self.fully_read_event = None;
self.event_should_update_fully_read_marker = false;
}
#[instrument(skip_all)]
fn set_fully_read_event(&mut self, fully_read_event_id: OwnedEventId) {
// A similar event has been handled already. We can ignore it.
if self.fully_read_event.as_ref().map_or(false, |id| *id == fully_read_event_id) {
return;
}
self.fully_read_event = Some(fully_read_event_id);
update_read_marker(
&mut self.items,
self.fully_read_event.as_deref(),
&mut self.event_should_update_fully_read_marker,
);
}
}
async fn fetch_replied_to_event(
mut state: MutexGuard<'_, TimelineInnerState>,
index: usize,
@ -812,66 +954,6 @@ impl RoomDataProvider for room::Common {
}
}
/// Handle a remote event.
///
/// Returns the number of timeline updates that were made.
async fn handle_remote_event<P: RoomDataProvider>(
raw: Raw<AnySyncTimelineEvent>,
encryption_info: Option<EncryptionInfo>,
push_actions: Vec<Action>,
position: TimelineItemPosition,
timeline_state: &mut TimelineInnerState,
room_data_provider: &P,
track_read_receipts: bool,
) -> HandleEventResult {
let (event_id, sender, timestamp, txn_id, event_kind) = match raw.deserialize() {
Ok(event) => (
event.event_id().to_owned(),
event.sender().to_owned(),
event.origin_server_ts(),
event.transaction_id().map(ToOwned::to_owned),
event.into(),
),
Err(e) => match raw.deserialize_as::<SyncTimelineEventWithoutContent>() {
Ok(event) => (
event.event_id().to_owned(),
event.sender().to_owned(),
event.origin_server_ts(),
event.transaction_id().map(ToOwned::to_owned),
TimelineEventKind::failed_to_parse(event, e),
),
Err(e) => {
let event_type: Option<String> = raw.get_field("type").ok().flatten();
let event_id: Option<String> = raw.get_field("event_id").ok().flatten();
warn!(event_type, event_id, "Failed to deserialize timeline event: {e}");
return HandleEventResult::default();
}
},
};
let is_own_event = sender == room_data_provider.own_user_id();
let sender_profile = room_data_provider.profile(&sender).await;
let read_receipts = if track_read_receipts {
load_read_receipts_for_event(&event_id, timeline_state, room_data_provider).await
} else {
Default::default()
};
let is_highlighted = push_actions.iter().any(Action::is_highlight);
let event_meta = TimelineEventMetadata {
sender,
sender_profile,
timestamp,
is_own_event,
encryption_info,
read_receipts,
is_highlighted,
};
let flow = Flow::Remote { event_id, raw_event: raw, txn_id, position };
TimelineEventHandler::new(event_meta, flow, timeline_state, track_read_receipts)
.handle_event(event_kind)
}
// Internal helper to make most of retry_event_decryption independent of a room
// object, which is annoying to create for testing and not really needed
#[async_trait]

View File

@ -25,6 +25,7 @@ use imbl::Vector;
use matrix_sdk::{
attachment::AttachmentConfig,
event_handler::EventHandlerHandle,
executor::JoinHandle,
room::{self, MessagesOptions, Receipts, Room},
Client, Result,
};
@ -88,7 +89,7 @@ pub struct Timeline {
inner: Arc<TimelineInner<room::Common>>,
start_token: Mutex<Option<String>>,
_end_token: Mutex<Option<String>>,
event_handler_handles: Arc<TimelineEventHandlerHandles>,
drop_handle: Arc<TimelineDropHandle>,
}
impl Timeline {
@ -239,7 +240,7 @@ impl Timeline {
&self,
) -> (Vector<Arc<TimelineItem>>, impl Stream<Item = VectorDiff<Arc<TimelineItem>>>) {
let (items, stream) = self.inner.subscribe().await;
let stream = TimelineStream::new(stream, self.event_handler_handles.clone());
let stream = TimelineStream::new(stream, self.drop_handle.clone());
(items, stream)
}
@ -474,16 +475,18 @@ impl Timeline {
}
#[derive(Debug)]
struct TimelineEventHandlerHandles {
struct TimelineDropHandle {
client: Client,
handles: Vec<EventHandlerHandle>,
event_handler_handles: Vec<EventHandlerHandle>,
room_update_join_handle: JoinHandle<()>,
}
impl Drop for TimelineEventHandlerHandles {
impl Drop for TimelineDropHandle {
fn drop(&mut self) {
for handle in self.handles.drain(..) {
for handle in self.event_handler_handles.drain(..) {
self.client.remove_event_handler(handle);
}
self.room_update_join_handle.abort();
}
}
@ -491,14 +494,14 @@ pin_project! {
struct TimelineStream {
#[pin]
inner: VectorSubscriber<Arc<TimelineItem>>,
event_handler_handles: Arc<TimelineEventHandlerHandles>,
event_handler_handles: Arc<TimelineDropHandle>,
}
}
impl TimelineStream {
fn new(
inner: VectorSubscriber<Arc<TimelineItem>>,
event_handler_handles: Arc<TimelineEventHandlerHandles>,
event_handler_handles: Arc<TimelineDropHandle>,
) -> Self {
Self { inner, event_handler_handles }
}

View File

@ -37,61 +37,161 @@ struct FullReceipt<'a> {
receipt: &'a Receipt,
}
pub(super) fn handle_explicit_read_receipts(
receipt_event_content: ReceiptEventContent,
own_user_id: &UserId,
timeline_state: &mut TimelineInnerState,
) {
for (event_id, receipt_types) in receipt_event_content.0 {
for (receipt_type, receipts) in receipt_types {
// We only care about read receipts here.
if !matches!(receipt_type, ReceiptType::Read | ReceiptType::ReadPrivate) {
continue;
}
impl TimelineInnerState {
/// Update the new item pointed to by the user's read receipt.
fn add_read_receipt(
&mut self,
receipt_item_pos: Option<usize>,
user_id: OwnedUserId,
receipt: Receipt,
) {
let Some(pos) = receipt_item_pos else { return };
let Some(mut event_item) = self.items[pos].as_event().cloned() else { return };
for (user_id, receipt) in receipts {
if receipt.thread != ReceiptThread::Unthreaded {
event_item.as_remote_mut().unwrap().add_read_receipt(user_id, receipt);
self.items.set(pos, Arc::new(event_item.into()));
}
pub(super) fn handle_explicit_read_receipts(
&mut self,
receipt_event_content: ReceiptEventContent,
own_user_id: &UserId,
) {
for (event_id, receipt_types) in receipt_event_content.0 {
for (receipt_type, receipts) in receipt_types {
// We only care about read receipts here.
if !matches!(receipt_type, ReceiptType::Read | ReceiptType::ReadPrivate) {
continue;
}
let receipt_item_pos =
rfind_event_by_id(&timeline_state.items, &event_id).map(|(pos, _)| pos);
let is_own_user_id = user_id == own_user_id;
let full_receipt = FullReceipt {
event_id: &event_id,
user_id: &user_id,
receipt_type: receipt_type.clone(),
receipt: &receipt,
};
for (user_id, receipt) in receipts {
if receipt.thread != ReceiptThread::Unthreaded {
continue;
}
let read_receipt_updated = maybe_update_read_receipt(
full_receipt,
receipt_item_pos,
is_own_user_id,
&mut timeline_state.items,
&mut timeline_state.users_read_receipts,
);
let receipt_item_pos =
rfind_event_by_id(&self.items, &event_id).map(|(pos, _)| pos);
let is_own_user_id = user_id == own_user_id;
let full_receipt = FullReceipt {
event_id: &event_id,
user_id: &user_id,
receipt_type: receipt_type.clone(),
receipt: &receipt,
};
if read_receipt_updated && !is_own_user_id {
add_read_receipt(receipt_item_pos, user_id, receipt, timeline_state);
let read_receipt_updated = maybe_update_read_receipt(
full_receipt,
receipt_item_pos,
is_own_user_id,
&mut self.items,
&mut self.users_read_receipts,
);
if read_receipt_updated && !is_own_user_id {
self.add_read_receipt(receipt_item_pos, user_id, receipt);
}
}
}
}
}
}
/// Update the new item pointed to by the user's read receipt.
fn add_read_receipt(
receipt_item_pos: Option<usize>,
user_id: OwnedUserId,
receipt: Receipt,
timeline_state: &mut TimelineInnerState,
) {
let Some(pos) = receipt_item_pos else { return };
let Some(mut event_item) = timeline_state.items[pos].as_event().cloned() else { return };
/// Load the read receipts from the store for the given event ID.
pub(super) async fn load_read_receipts_for_event<P: RoomDataProvider>(
&mut self,
event_id: &EventId,
room_data_provider: &P,
) -> IndexMap<OwnedUserId, Receipt> {
let read_receipts = room_data_provider.read_receipts_for_event(event_id).await;
event_item.as_remote_mut().unwrap().add_read_receipt(user_id, receipt);
timeline_state.items.set(pos, Arc::new(event_item.into()));
// Filter out receipts for our own user.
let own_user_id = room_data_provider.own_user_id();
let read_receipts: IndexMap<OwnedUserId, Receipt> =
read_receipts.into_iter().filter(|(user_id, _)| user_id != own_user_id).collect();
// Keep track of the user's read receipt.
for (user_id, receipt) in read_receipts.clone() {
// Only insert the read receipt if the user is not known to avoid conflicts with
// `TimelineInner::handle_read_receipts`.
if !self.users_read_receipts.contains_key(&user_id) {
self.users_read_receipts
.entry(user_id)
.or_default()
.insert(ReceiptType::Read, (event_id.to_owned(), receipt));
}
}
read_receipts
}
/// Get the unthreaded receipt of the given type for the given user in the
/// timeline.
pub(super) async fn user_receipt(
&self,
user_id: &UserId,
receipt_type: ReceiptType,
room: &room::Common,
) -> Option<(OwnedEventId, Receipt)> {
if let Some(receipt) = self
.users_read_receipts
.get(user_id)
.and_then(|user_map| user_map.get(&receipt_type))
.cloned()
{
return Some(receipt);
}
room.user_receipt(receipt_type.clone(), ReceiptThread::Unthreaded, user_id)
.await
.unwrap_or_else(|e| {
error!("Could not get user read receipt of type {receipt_type:?}: {e}");
None
})
}
/// Get the latest read receipt for the given user.
///
/// Useful to get the latest read receipt, whether it's private or public.
pub(super) async fn latest_user_read_receipt(
&self,
user_id: &UserId,
room: &room::Common,
) -> Option<(OwnedEventId, Receipt)> {
let public_read_receipt = self.user_receipt(user_id, ReceiptType::Read, room).await;
let private_read_receipt = self.user_receipt(user_id, ReceiptType::ReadPrivate, room).await;
// If we only have one, return it.
let Some((pub_event_id, pub_receipt)) = &public_read_receipt else {
return private_read_receipt;
};
let Some((priv_event_id, priv_receipt)) = &private_read_receipt else {
return public_read_receipt;
};
// Compare by position in the timeline.
if let Some(relative_pos) =
compare_events_positions(pub_event_id, priv_event_id, &self.items)
{
if relative_pos == RelativePosition::After {
return private_read_receipt;
}
return public_read_receipt;
}
// Compare by timestamp.
if let Some((pub_ts, priv_ts)) = pub_receipt.ts.zip(priv_receipt.ts) {
if priv_ts > pub_ts {
return private_read_receipt;
}
return public_read_receipt;
}
// As a fallback, let's assume that a private read receipt should be more recent
// than a public read receipt, otherwise there's no point in the private read
// receipt.
private_read_receipt
}
}
/// Add an implicit read receipt to the given event item, if it is more recent
@ -205,103 +305,3 @@ fn maybe_update_read_receipt(
true
}
/// Load the read receipts from the store for the given event ID.
pub(super) async fn load_read_receipts_for_event<P: RoomDataProvider>(
event_id: &EventId,
timeline_state: &mut TimelineInnerState,
room_data_provider: &P,
) -> IndexMap<OwnedUserId, Receipt> {
let read_receipts = room_data_provider.read_receipts_for_event(event_id).await;
// Filter out receipts for our own user.
let own_user_id = room_data_provider.own_user_id();
let read_receipts: IndexMap<OwnedUserId, Receipt> =
read_receipts.into_iter().filter(|(user_id, _)| user_id != own_user_id).collect();
// Keep track of the user's read receipt.
for (user_id, receipt) in read_receipts.clone() {
// Only insert the read receipt if the user is not known to avoid conflicts with
// `TimelineInner::handle_read_receipts`.
if !timeline_state.users_read_receipts.contains_key(&user_id) {
timeline_state
.users_read_receipts
.entry(user_id)
.or_default()
.insert(ReceiptType::Read, (event_id.to_owned(), receipt));
}
}
read_receipts
}
/// Get the unthreaded receipt of the given type for the given user in the
/// timeline.
pub(super) async fn user_receipt(
user_id: &UserId,
receipt_type: ReceiptType,
timeline_state: &TimelineInnerState,
room: &room::Common,
) -> Option<(OwnedEventId, Receipt)> {
if let Some(receipt) = timeline_state
.users_read_receipts
.get(user_id)
.and_then(|user_map| user_map.get(&receipt_type))
.cloned()
{
return Some(receipt);
}
room.user_receipt(receipt_type.clone(), ReceiptThread::Unthreaded, user_id)
.await
.unwrap_or_else(|e| {
error!("Could not get user read receipt of type {receipt_type:?}: {e}");
None
})
}
/// Get the latest read receipt for the given user.
///
/// Useful to get the latest read receipt, whether it's private or public.
pub(super) async fn latest_user_read_receipt(
user_id: &UserId,
timeline_state: &TimelineInnerState,
room: &room::Common,
) -> Option<(OwnedEventId, Receipt)> {
let public_read_receipt = user_receipt(user_id, ReceiptType::Read, timeline_state, room).await;
let private_read_receipt =
user_receipt(user_id, ReceiptType::ReadPrivate, timeline_state, room).await;
// If we only have one, return it.
let Some((pub_event_id, pub_receipt)) = &public_read_receipt else {
return private_read_receipt;
};
let Some((priv_event_id, priv_receipt)) = &private_read_receipt else {
return public_read_receipt;
};
// Compare by position in the timeline.
if let Some(relative_pos) =
compare_events_positions(pub_event_id, priv_event_id, &timeline_state.items)
{
if relative_pos == RelativePosition::After {
return private_read_receipt;
}
return public_read_receipt;
}
// Compare by timestamp.
if let Some((pub_ts, priv_ts)) = pub_receipt.ts.zip(priv_receipt.ts) {
if priv_ts > pub_ts {
return private_read_receipt;
}
return public_read_receipt;
}
// As a fallback, let's assume that a private read receipt should be more recent
// than a public read receipt, otherwise there's no point in the private read
// receipt.
private_read_receipt
}

View File

@ -26,7 +26,7 @@ use async_trait::async_trait;
use eyeball_im::VectorDiff;
use futures_core::Stream;
use indexmap::IndexMap;
use matrix_sdk::deserialized_responses::TimelineEvent;
use matrix_sdk::deserialized_responses::{SyncTimelineEvent, TimelineEvent};
use once_cell::sync::Lazy;
use ruma::{
events::{
@ -156,8 +156,9 @@ impl TestTimeline {
self.handle_live_event(raw).await;
}
async fn handle_live_event(&self, raw: Raw<AnySyncTimelineEvent>) {
self.inner.handle_live_event(raw, None, vec![]).await
async fn handle_live_event(&self, event: Raw<AnySyncTimelineEvent>) {
let event = SyncTimelineEvent { event, encryption_info: None, push_actions: vec![] };
self.inner.handle_live_event(event).await
}
async fn handle_local_event(&self, content: AnyMessageLikeEventContent) -> OwnedTransactionId {

View File

@ -1,3 +1,17 @@
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use matrix_sdk::{config::RequestConfig, Client, ClientBuilder, Session};
use matrix_sdk_test::test_json;
use ruma::{api::MatrixVersion, device_id, user_id};

View File

@ -0,0 +1,211 @@
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{sync::Arc, time::Duration};
use assert_matches::assert_matches;
use eyeball_im::VectorDiff;
use futures_util::StreamExt;
use matrix_sdk::{config::SyncSettings, executor::spawn, ruma::MilliSecondsSinceUnixEpoch};
use matrix_sdk_test::{async_test, EventBuilder, JoinedRoomBuilder, TimelineTestEvent};
use matrix_sdk_ui::timeline::{
EventSendState, RoomExt, TimelineItem, TimelineItemContent, VirtualTimelineItem,
};
use ruma::{
event_id,
events::room::message::{MessageType, RoomMessageEventContent},
room_id, uint, TransactionId,
};
use serde_json::json;
use stream_assert::assert_next_matches;
use wiremock::{
matchers::{header, method, path_regex},
Mock, ResponseTemplate,
};
use crate::{logged_in_client, mock_encryption_state, mock_sync};
#[async_test]
async fn echo() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
let event_id = event_id!("$wWgymRfo7ri1uQx0NXO40vLJ");
let txn_id: &TransactionId = "my-txn-id".into();
mock_encryption_state(&server, false).await;
Mock::given(method("PUT"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/send/.*"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(&json!({ "event_id": event_id })))
.mount(&server)
.await;
// Don't move the original timeline, it must live until the end of the test
let timeline = timeline.clone();
#[allow(unknown_lints, clippy::redundant_async_block)] // false positive
let send_hdl = spawn(async move {
timeline
.send(RoomMessageEventContent::text_plain("Hello, World!").into(), Some(txn_id))
.await
});
let _day_divider = assert_matches!(timeline_stream.next().await, Some(VectorDiff::PushBack { value }) => value);
let local_echo = assert_matches!(timeline_stream.next().await, Some(VectorDiff::PushBack { value }) => value);
let item = local_echo.as_event().unwrap();
assert_matches!(item.send_state(), Some(EventSendState::NotSentYet));
let msg = assert_matches!(item.content(), TimelineItemContent::Message(msg) => msg);
let text = assert_matches!(msg.msgtype(), MessageType::Text(text) => text);
assert_eq!(text.body, "Hello, World!");
// Wait for the sending to finish and assert everything was successful
send_hdl.await.unwrap();
let sent_confirmation = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Set { index: 1, value }) => value
);
let item = sent_confirmation.as_event().unwrap();
assert_matches!(item.send_state(), Some(EventSendState::Sent { .. }));
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id).add_timeline_event(
TimelineTestEvent::Custom(json!({
"content": {
"body": "Hello, World!",
"msgtype": "m.text",
},
"event_id": "$7at8sd:localhost",
"origin_server_ts": 152038280,
"sender": "@example:localhost",
"type": "m.room.message",
"unsigned": { "transaction_id": txn_id, },
})),
));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
// Local echo is removed
assert_matches!(timeline_stream.next().await, Some(VectorDiff::Remove { index: 1 }));
// Local echo day divider is removed
assert_matches!(timeline_stream.next().await, Some(VectorDiff::Remove { index: 0 }));
// New day divider is added
let new_item = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushBack { value }) => value
);
assert_matches!(&*new_item, TimelineItem::Virtual(VirtualTimelineItem::DayDivider(_)));
// Remote echo is added
let remote_echo = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushBack { value }) => value
);
let item = remote_echo.as_event().unwrap();
assert!(item.is_own());
assert_eq!(item.timestamp(), MilliSecondsSinceUnixEpoch(uint!(152038280)));
}
#[async_test]
async fn dedup_by_event_id_late() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
let event_id = event_id!("$wWgymRfo7ri1uQx0NXO40vLJ");
let txn_id: &TransactionId = "my-txn-id".into();
mock_encryption_state(&server, false).await;
Mock::given(method("PUT"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/send/.*"))
.and(header("authorization", "Bearer 1234"))
.respond_with(
ResponseTemplate::new(200)
.set_body_json(&json!({ "event_id": event_id }))
// Not great to use a timer for this, but it's what wiremock gives us right now.
// Ideally we'd wait on a channel to produce a value or sth. like that.
.set_delay(Duration::from_millis(100)),
)
.mount(&server)
.await;
let send_hdl = spawn(async move {
timeline
.send(RoomMessageEventContent::text_plain("Hello, World!").into(), Some(txn_id))
.await
});
assert_matches!(timeline_stream.next().await, Some(VectorDiff::PushBack { .. })); // day divider
let local_echo = assert_matches!(timeline_stream.next().await, Some(VectorDiff::PushBack { value }) => value);
let item = local_echo.as_event().unwrap();
assert_matches!(item.send_state(), Some(EventSendState::NotSentYet));
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id).add_timeline_event(
TimelineTestEvent::Custom(json!({
"content": {
"body": "Hello, World!",
"msgtype": "m.text",
},
"event_id": event_id,
"origin_server_ts": 123456,
"sender": "@example:localhost",
"type": "m.room.message",
// no transaction ID
})),
));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
assert_next_matches!(timeline_stream, VectorDiff::PushBack { .. }); // day divider
let remote_echo =
assert_next_matches!(timeline_stream, VectorDiff::PushBack { value } => value);
let item = remote_echo.as_event().unwrap();
assert_eq!(item.event_id(), Some(event_id));
// Wait for the sending to finish
send_hdl.await.unwrap();
// Local echo and its day divider are removed.
assert_next_matches!(timeline_stream, VectorDiff::Remove { index: 1 });
assert_next_matches!(timeline_stream, VectorDiff::Remove { index: 0 });
}

View File

@ -1,36 +1,44 @@
use std::{sync::Arc, time::Duration};
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::time::Duration;
use assert_matches::assert_matches;
use eyeball_im::VectorDiff;
use futures_util::StreamExt;
use matrix_sdk::{config::SyncSettings, executor::spawn, ruma::MilliSecondsSinceUnixEpoch};
use matrix_sdk::{config::SyncSettings, ruma::MilliSecondsSinceUnixEpoch};
use matrix_sdk_test::{
async_test, test_json, EventBuilder, JoinedRoomBuilder, RoomAccountDataTestEvent,
StateTestEvent, TimelineTestEvent,
async_test, EventBuilder, JoinedRoomBuilder, RoomAccountDataTestEvent, StateTestEvent,
TimelineTestEvent,
};
use matrix_sdk_ui::timeline::{
AnyOtherFullStateEventContent, Error as TimelineError, EventSendState, PaginationOptions,
RoomExt, TimelineDetails, TimelineItem, TimelineItemContent, VirtualTimelineItem,
};
use ruma::{
event_id,
events::{
room::message::{MessageType, RoomMessageEventContent},
FullStateEventContent,
},
room_id, uint, user_id, TransactionId,
Error as TimelineError, RoomExt, TimelineDetails, TimelineItemContent, VirtualTimelineItem,
};
use ruma::{event_id, events::room::message::MessageType, room_id, uint, user_id};
use serde_json::json;
use wiremock::{
matchers::{header, method, path_regex},
Mock, ResponseTemplate,
};
mod echo;
mod pagination;
mod read_receipts;
#[cfg(feature = "experimental-sliding-sync")]
pub(crate) mod sliding_sync;
use crate::{logged_in_client, mock_encryption_state, mock_sync};
use crate::{logged_in_client, mock_sync};
#[async_test]
async fn edit() {
@ -146,216 +154,6 @@ async fn edit() {
assert!(edited.is_edited());
}
#[async_test]
async fn echo() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
let event_id = event_id!("$wWgymRfo7ri1uQx0NXO40vLJ");
let txn_id: &TransactionId = "my-txn-id".into();
mock_encryption_state(&server, false).await;
Mock::given(method("PUT"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/send/.*"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(&json!({ "event_id": event_id })))
.mount(&server)
.await;
// Don't move the original timeline, it must live until the end of the test
let timeline = timeline.clone();
#[allow(unknown_lints, clippy::redundant_async_block)] // false positive
let send_hdl = spawn(async move {
timeline
.send(RoomMessageEventContent::text_plain("Hello, World!").into(), Some(txn_id))
.await
});
let _day_divider = assert_matches!(timeline_stream.next().await, Some(VectorDiff::PushBack { value }) => value);
let local_echo = assert_matches!(timeline_stream.next().await, Some(VectorDiff::PushBack { value }) => value);
let item = local_echo.as_event().unwrap();
assert_matches!(item.send_state(), Some(EventSendState::NotSentYet));
let msg = assert_matches!(item.content(), TimelineItemContent::Message(msg) => msg);
let text = assert_matches!(msg.msgtype(), MessageType::Text(text) => text);
assert_eq!(text.body, "Hello, World!");
// Wait for the sending to finish and assert everything was successful
send_hdl.await.unwrap();
let sent_confirmation = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Set { index: 1, value }) => value
);
let item = sent_confirmation.as_event().unwrap();
assert_matches!(item.send_state(), Some(EventSendState::Sent { .. }));
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id).add_timeline_event(
TimelineTestEvent::Custom(json!({
"content": {
"body": "Hello, World!",
"msgtype": "m.text",
},
"event_id": "$7at8sd:localhost",
"origin_server_ts": 152038280,
"sender": "@example:localhost",
"type": "m.room.message",
"unsigned": { "transaction_id": txn_id, },
})),
));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
// Local echo is removed
assert_matches!(timeline_stream.next().await, Some(VectorDiff::Remove { index: 1 }));
// Local echo day divider is removed
assert_matches!(timeline_stream.next().await, Some(VectorDiff::Remove { index: 0 }));
// New day divider is added
let new_item = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushBack { value }) => value
);
assert_matches!(&*new_item, TimelineItem::Virtual(VirtualTimelineItem::DayDivider(_)));
// Remote echo is added
let remote_echo = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushBack { value }) => value
);
let item = remote_echo.as_event().unwrap();
assert!(item.is_own());
assert_eq!(item.timestamp(), MilliSecondsSinceUnixEpoch(uint!(152038280)));
}
#[async_test]
async fn back_pagination() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
Mock::given(method("GET"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/messages$"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(&*test_json::ROOM_MESSAGES_BATCH_1))
.expect(1)
.named("messages_batch_1")
.mount(&server)
.await;
timeline.paginate_backwards(PaginationOptions::single_request(10)).await.unwrap();
server.reset().await;
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushFront { value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::LoadingIndicator);
let day_divider = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 1, value }) => value
);
assert_matches!(day_divider.as_virtual().unwrap(), VirtualTimelineItem::DayDivider(_));
let message = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let msg = assert_matches!(
message.as_event().unwrap().content(),
TimelineItemContent::Message(msg) => msg
);
let text = assert_matches!(msg.msgtype(), MessageType::Text(text) => text);
assert_eq!(text.body, "hello world");
let message = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let msg = assert_matches!(
message.as_event().unwrap().content(),
TimelineItemContent::Message(msg) => msg
);
let text = assert_matches!(msg.msgtype(), MessageType::Text(text) => text);
assert_eq!(text.body, "the world is big");
let message = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let state = assert_matches!(
message.as_event().unwrap().content(),
TimelineItemContent::OtherState(state) => state
);
assert_eq!(state.state_key(), "");
let (content, prev_content) = assert_matches!(
state.content(),
AnyOtherFullStateEventContent::RoomName(
FullStateEventContent::Original { content, prev_content }
) => (content, prev_content)
);
assert_eq!(content.name.as_ref().unwrap(), "New room name");
assert_eq!(prev_content.as_ref().unwrap().name.as_ref().unwrap(), "Old room name");
// Removal of the loading indicator
assert_matches!(timeline_stream.next().await, Some(VectorDiff::PopFront));
Mock::given(method("GET"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/messages$"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
// Usually there would be a few events here, but we just want to test
// that the timeline start item is added when there is no end token
"chunk": [],
"start": "t47409-4357353_219380_26003_2269"
})))
.expect(1)
.named("messages_batch_1")
.mount(&server)
.await;
timeline.paginate_backwards(PaginationOptions::single_request(10)).await.unwrap();
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushFront { value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::LoadingIndicator);
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Set { index: 0, value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::TimelineStart);
}
#[async_test]
async fn reaction() {
let room_id = room_id!("!a98sd12bjh:example.org");
@ -825,99 +623,3 @@ async fn sync_highlighted() {
// `m.room.tombstone` should be highlighted by default.
assert!(remote_event.is_highlighted());
}
#[async_test]
async fn back_pagination_highlighted() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder
// We need the member event and power levels locally so the push rules processor works.
.add_joined_room(
JoinedRoomBuilder::new(room_id)
.add_state_event(StateTestEvent::Member)
.add_state_event(StateTestEvent::PowerLevels),
);
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
let response_json = json!({
"chunk": [
{
"content": {
"body": "hello",
"msgtype": "m.text",
},
"event_id": "$msda7m0df9E9op3",
"origin_server_ts": 152037280,
"sender": "@example:localhost",
"type": "m.room.message",
"room_id": room_id,
},
{
"content": {
"body": "This room has been replaced",
"replacement_room": "!newroom:localhost",
},
"event_id": "$foun39djjod0f",
"origin_server_ts": 152039280,
"sender": "@bob:localhost",
"state_key": "",
"type": "m.room.tombstone",
"room_id": room_id,
},
],
"end": "t47409-4357353_219380_26003_2269",
"start": "t392-516_47314_0_7_1_1_1_11444_1"
});
Mock::given(method("GET"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/messages$"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(response_json))
.expect(1)
.named("messages_batch_1")
.mount(&server)
.await;
timeline.paginate_backwards(PaginationOptions::single_request(10)).await.unwrap();
server.reset().await;
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushFront { value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::LoadingIndicator);
let day_divider = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 1, value }) => value
);
assert_matches!(day_divider.as_virtual().unwrap(), VirtualTimelineItem::DayDivider(_));
let first = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let remote_event = first.as_event().unwrap();
// Own events don't trigger push rules.
assert!(!remote_event.is_highlighted());
let second = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let remote_event = second.as_event().unwrap();
// `m.room.tombstone` should be highlighted by default.
assert!(remote_event.is_highlighted());
// Removal of the loading indicator
assert_matches!(timeline_stream.next().await, Some(VectorDiff::PopFront));
}

View File

@ -0,0 +1,245 @@
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{sync::Arc, time::Duration};
use assert_matches::assert_matches;
use eyeball_im::VectorDiff;
use futures_util::StreamExt;
use matrix_sdk::config::SyncSettings;
use matrix_sdk_test::{async_test, test_json, EventBuilder, JoinedRoomBuilder, StateTestEvent};
use matrix_sdk_ui::timeline::{
AnyOtherFullStateEventContent, PaginationOptions, RoomExt, TimelineItemContent,
VirtualTimelineItem,
};
use ruma::{
events::{room::message::MessageType, FullStateEventContent},
room_id,
};
use serde_json::json;
use wiremock::{
matchers::{header, method, path_regex},
Mock, ResponseTemplate,
};
use crate::{logged_in_client, mock_sync};
#[async_test]
async fn back_pagination() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder.add_joined_room(JoinedRoomBuilder::new(room_id));
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
Mock::given(method("GET"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/messages$"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(&*test_json::ROOM_MESSAGES_BATCH_1))
.expect(1)
.named("messages_batch_1")
.mount(&server)
.await;
timeline.paginate_backwards(PaginationOptions::single_request(10)).await.unwrap();
server.reset().await;
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushFront { value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::LoadingIndicator);
let day_divider = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 1, value }) => value
);
assert_matches!(day_divider.as_virtual().unwrap(), VirtualTimelineItem::DayDivider(_));
let message = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let msg = assert_matches!(
message.as_event().unwrap().content(),
TimelineItemContent::Message(msg) => msg
);
let text = assert_matches!(msg.msgtype(), MessageType::Text(text) => text);
assert_eq!(text.body, "hello world");
let message = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let msg = assert_matches!(
message.as_event().unwrap().content(),
TimelineItemContent::Message(msg) => msg
);
let text = assert_matches!(msg.msgtype(), MessageType::Text(text) => text);
assert_eq!(text.body, "the world is big");
let message = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let state = assert_matches!(
message.as_event().unwrap().content(),
TimelineItemContent::OtherState(state) => state
);
assert_eq!(state.state_key(), "");
let (content, prev_content) = assert_matches!(
state.content(),
AnyOtherFullStateEventContent::RoomName(
FullStateEventContent::Original { content, prev_content }
) => (content, prev_content)
);
assert_eq!(content.name.as_ref().unwrap(), "New room name");
assert_eq!(prev_content.as_ref().unwrap().name.as_ref().unwrap(), "Old room name");
// Removal of the loading indicator
assert_matches!(timeline_stream.next().await, Some(VectorDiff::PopFront));
Mock::given(method("GET"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/messages$"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
// Usually there would be a few events here, but we just want to test
// that the timeline start item is added when there is no end token
"chunk": [],
"start": "t47409-4357353_219380_26003_2269"
})))
.expect(1)
.named("messages_batch_1")
.mount(&server)
.await;
timeline.paginate_backwards(PaginationOptions::single_request(10)).await.unwrap();
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushFront { value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::LoadingIndicator);
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Set { index: 0, value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::TimelineStart);
}
#[async_test]
async fn back_pagination_highlighted() {
let room_id = room_id!("!a98sd12bjh:example.org");
let (client, server) = logged_in_client().await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
let mut ev_builder = EventBuilder::new();
ev_builder
// We need the member event and power levels locally so the push rules processor works.
.add_joined_room(
JoinedRoomBuilder::new(room_id)
.add_state_event(StateTestEvent::Member)
.add_state_event(StateTestEvent::PowerLevels),
);
mock_sync(&server, ev_builder.build_json_sync_response(), None).await;
let _response = client.sync_once(sync_settings.clone()).await.unwrap();
server.reset().await;
let room = client.get_room(room_id).unwrap();
let timeline = Arc::new(room.timeline().await);
let (_, mut timeline_stream) = timeline.subscribe().await;
let response_json = json!({
"chunk": [
{
"content": {
"body": "hello",
"msgtype": "m.text",
},
"event_id": "$msda7m0df9E9op3",
"origin_server_ts": 152037280,
"sender": "@example:localhost",
"type": "m.room.message",
"room_id": room_id,
},
{
"content": {
"body": "This room has been replaced",
"replacement_room": "!newroom:localhost",
},
"event_id": "$foun39djjod0f",
"origin_server_ts": 152039280,
"sender": "@bob:localhost",
"state_key": "",
"type": "m.room.tombstone",
"room_id": room_id,
},
],
"end": "t47409-4357353_219380_26003_2269",
"start": "t392-516_47314_0_7_1_1_1_11444_1"
});
Mock::given(method("GET"))
.and(path_regex(r"^/_matrix/client/r0/rooms/.*/messages$"))
.and(header("authorization", "Bearer 1234"))
.respond_with(ResponseTemplate::new(200).set_body_json(response_json))
.expect(1)
.named("messages_batch_1")
.mount(&server)
.await;
timeline.paginate_backwards(PaginationOptions::single_request(10)).await.unwrap();
server.reset().await;
let loading = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::PushFront { value }) => value
);
assert_matches!(loading.as_virtual().unwrap(), VirtualTimelineItem::LoadingIndicator);
let day_divider = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 1, value }) => value
);
assert_matches!(day_divider.as_virtual().unwrap(), VirtualTimelineItem::DayDivider(_));
let first = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let remote_event = first.as_event().unwrap();
// Own events don't trigger push rules.
assert!(!remote_event.is_highlighted());
let second = assert_matches!(
timeline_stream.next().await,
Some(VectorDiff::Insert { index: 2, value }) => value
);
let remote_event = second.as_event().unwrap();
// `m.room.tombstone` should be highlighted by default.
assert!(remote_event.is_highlighted());
// Removal of the loading indicator
assert_matches!(timeline_stream.next().await, Some(VectorDiff::PopFront));
}

View File

@ -1,3 +1,17 @@
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::time::Duration;
use assert_matches::assert_matches;

View File

@ -1,3 +1,17 @@
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{pin::Pin, sync::Arc};
use anyhow::{Context, Result};
@ -160,7 +174,7 @@ pub(crate) use assert_timeline_stream;
async fn new_sliding_sync(lists: Vec<SlidingSyncListBuilder>) -> Result<(MockServer, SlidingSync)> {
let (client, server) = logged_in_client().await;
let mut sliding_sync_builder = client.sliding_sync();
let mut sliding_sync_builder = client.sliding_sync("integration-test")?;
for list in lists {
sliding_sync_builder = sliding_sync_builder.add_list(list);

View File

@ -9,7 +9,10 @@
results by any membership state.
- `Common::active_members(_no_sync)` and `Common::joined_members(_no_sync)` are deprecated.
- `matrix-sdk-sqlite` is the new default store implementation outside of WASM, behind the `sqlite` feature.
- The `sled` feature was removed. It is still possible to use `matrix-sdk-sled` as a custom store.
- The `sled` feature was removed. The `matrix-sdk-sled` crate is deprecated and no longer maintained.
- The `Common` methods to retrieve state events can now return a sync or stripped event, so it can be used
for invited rooms too.
- Add `Client::subscribe_to_room_updates` and `room::Common::subscribe_to_updates`
# 0.6.2

View File

@ -415,6 +415,7 @@ impl ClientBuilder {
typing_notice_times: Default::default(),
event_handlers: Default::default(),
notification_handlers: Default::default(),
room_update_channels: Default::default(),
sync_gap_broadcast_txs: Default::default(),
appservice_mode: self.appservice_mode,
respect_login_well_known: self.respect_login_well_known,

View File

@ -15,7 +15,7 @@
// limitations under the License.
use std::{
collections::BTreeMap,
collections::{btree_map, BTreeMap},
fmt::{self, Debug},
future::Future,
pin::Pin,
@ -53,7 +53,7 @@ use ruma::{
session::{
get_login_types, login, logout, refresh_token, sso_login, sso_login_with_provider,
},
sync::sync_events,
sync::sync_events::{self},
uiaa::{AuthData, UserIdentifier},
user_directory::search_users,
},
@ -80,7 +80,7 @@ use crate::{
},
http_client::HttpClient,
room,
sync::SyncResponse,
sync::{RoomUpdate, SyncResponse},
Account, Error, Media, RefreshTokenError, Result, RumaApiError,
};
@ -164,6 +164,7 @@ pub(crate) struct ClientInner {
pub(crate) event_handlers: EventHandlerStore,
/// Notification handlers. See `register_notification_handler`.
notification_handlers: RwLock<Vec<NotificationHandlerFn>>,
pub(crate) room_update_channels: StdMutex<BTreeMap<OwnedRoomId, broadcast::Sender<RoomUpdate>>>,
pub(crate) sync_gap_broadcast_txs: StdMutex<BTreeMap<OwnedRoomId, Observable<()>>>,
/// Whether the client should operate in application service style mode.
/// This is low-level functionality. For an high-level API check the
@ -845,6 +846,21 @@ impl Client {
self
}
/// Subscribe to all updates for the room with the given ID.
///
/// The returned receiver will receive a new message for each sync response
/// that contains updates for that room.
pub fn subscribe_to_room_updates(&self, room_id: &RoomId) -> broadcast::Receiver<RoomUpdate> {
match self.inner.room_update_channels.lock().unwrap().entry(room_id.to_owned()) {
btree_map::Entry::Vacant(entry) => {
let (tx, rx) = broadcast::channel(8);
entry.insert(tx);
rx
}
btree_map::Entry::Occupied(entry) => entry.get().subscribe(),
}
}
pub(crate) async fn notification_handlers(
&self,
) -> RwLockReadGuard<'_, Vec<NotificationHandlerFn>> {

View File

@ -328,7 +328,7 @@ impl Client {
pub(crate) async fn handle_sync_events<T>(
&self,
kind: HandlerKind,
room: &Option<room::Room>,
room: Option<&room::Room>,
events: &[Raw<T>],
) -> serde_json::Result<()> {
#[derive(Deserialize)]
@ -347,7 +347,7 @@ impl Client {
pub(crate) async fn handle_sync_state_events(
&self,
room: &Option<room::Room>,
room: Option<&room::Room>,
state_events: &[Raw<AnySyncStateEvent>],
) -> serde_json::Result<()> {
#[derive(Deserialize)]
@ -375,7 +375,7 @@ impl Client {
pub(crate) async fn handle_sync_timeline_events(
&self,
room: &Option<room::Room>,
room: Option<&room::Room>,
timeline_events: &[SyncTimelineEvent],
) -> serde_json::Result<()> {
#[derive(Deserialize)]
@ -441,14 +441,14 @@ impl Client {
#[instrument(skip_all, fields(?event_kind, ?event_type, room_id))]
async fn call_event_handlers(
&self,
room: &Option<room::Room>,
room: Option<&room::Room>,
raw: &RawJsonValue,
event_kind: HandlerKind,
event_type: &str,
encryption_info: Option<&EncryptionInfo>,
push_actions: &[Action],
) {
let room_id = room.as_ref().map(|r| r.room_id());
let room_id = room.map(|r| r.room_id());
if let Some(room_id) = room_id {
tracing::Span::current().record("room_id", debug(room_id));
}
@ -464,7 +464,7 @@ impl Client {
.map(|(handle, handler_fn)| {
let data = EventHandlerData {
client: self.clone(),
room: room.clone(),
room: room.cloned(),
raw,
encryption_info,
push_actions,

View File

@ -1,7 +1,10 @@
use std::{borrow::Borrow, collections::BTreeMap, fmt, ops::Deref, sync::Arc};
use matrix_sdk_base::{
deserialized_responses::{MembersResponse, TimelineEvent},
deserialized_responses::{
MembersResponse, RawAnySyncOrStrippedState, RawSyncOrStrippedState, SyncOrStrippedState,
TimelineEvent,
},
store::StateStoreExt,
RoomMemberships, StateChanges,
};
@ -35,10 +38,9 @@ use ruma::{
MediaSource,
},
tag::{TagInfo, TagName},
AnyRoomAccountDataEvent, AnyStateEvent, AnySyncStateEvent, EmptyStateKey, RedactContent,
AnyRoomAccountDataEvent, AnyStateEvent, EmptyStateKey, RedactContent,
RedactedStateEventContent, RoomAccountDataEvent, RoomAccountDataEventContent,
RoomAccountDataEventType, StateEventType, StaticEventContent, StaticStateEventContent,
SyncStateEvent,
},
push::{Action, PushConditionRoomCtx},
serde::Raw,
@ -46,7 +48,7 @@ use ruma::{
UInt, UserId,
};
use serde::de::DeserializeOwned;
use tokio::sync::Mutex;
use tokio::sync::{broadcast, Mutex};
use tracing::{debug, instrument};
use super::Joined;
@ -54,6 +56,7 @@ use crate::{
event_handler::{EventHandler, EventHandlerHandle, SyncEvent},
media::{MediaFormat, MediaRequest},
room::{Left, RoomMember, RoomState},
sync::RoomUpdate,
BaseRoom, Client, Error, HttpError, HttpResult, Result,
};
@ -267,6 +270,14 @@ impl Common {
self.client.add_room_event_handler(self.room_id(), handler)
}
/// Subscribe to all updates for this room.
///
/// The returned receiver will receive a new message for each sync response
/// that contains updates for this room.
pub fn subscribe_to_updates(&self) -> broadcast::Receiver<RoomUpdate> {
self.client.subscribe_to_room_updates(self.room_id())
}
/// Fetch the event with the given `EventId` in this room.
pub async fn event(&self, event_id: &EventId) -> Result<TimelineEvent> {
let request =
@ -539,7 +550,7 @@ impl Common {
pub async fn get_state_events(
&self,
event_type: StateEventType,
) -> Result<Vec<Raw<AnySyncStateEvent>>> {
) -> Result<Vec<RawAnySyncOrStrippedState>> {
self.client.store().get_state_events(self.room_id(), event_type).await.map_err(Into::into)
}
@ -551,15 +562,15 @@ impl Common {
/// # async {
/// # let room: matrix_sdk::room::Common = todo!();
/// use matrix_sdk::ruma::{
/// events::room::member::SyncRoomMemberEvent, serde::Raw,
/// events::room::member::RoomMemberEventContent, serde::Raw,
/// };
///
/// let room_members: Vec<Raw<SyncRoomMemberEvent>> =
/// room.get_state_events_static().await?;
/// let room_members =
/// room.get_state_events_static::<RoomMemberEventContent>().await?;
/// # anyhow::Ok(())
/// # };
/// ```
pub async fn get_state_events_static<C>(&self) -> Result<Vec<Raw<SyncStateEvent<C>>>>
pub async fn get_state_events_static<C>(&self) -> Result<Vec<RawSyncOrStrippedState<C>>>
where
C: StaticEventContent + StaticStateEventContent + RedactContent,
C::Redacted: RedactedStateEventContent,
@ -572,7 +583,7 @@ impl Common {
&self,
event_type: StateEventType,
state_key: &str,
) -> Result<Option<Raw<AnySyncStateEvent>>> {
) -> Result<Option<RawAnySyncOrStrippedState>> {
self.client
.store()
.get_state_event(self.room_id(), event_type, state_key)
@ -588,17 +599,17 @@ impl Common {
/// ```no_run
/// # async {
/// # let room: matrix_sdk::room::Common = todo!();
/// use matrix_sdk::ruma::events::room::power_levels::SyncRoomPowerLevelsEvent;
/// use matrix_sdk::ruma::events::room::power_levels::RoomPowerLevelsEventContent;
///
/// let power_levels: SyncRoomPowerLevelsEvent = room
/// .get_state_event_static()
/// let power_levels = room
/// .get_state_event_static::<RoomPowerLevelsEventContent>()
/// .await?
/// .expect("every room has a power_levels event")
/// .deserialize()?;
/// # anyhow::Ok(())
/// # };
/// ```
pub async fn get_state_event_static<C>(&self) -> Result<Option<Raw<SyncStateEvent<C>>>>
pub async fn get_state_event_static<C>(&self) -> Result<Option<RawSyncOrStrippedState<C>>>
where
C: StaticEventContent + StaticStateEventContent<StateKey = EmptyStateKey> + RedactContent,
C::Redacted: RedactedStateEventContent,
@ -614,11 +625,13 @@ impl Common {
/// # async {
/// # let room: matrix_sdk::room::Common = todo!();
/// use matrix_sdk::ruma::{
/// events::room::member::SyncRoomMemberEvent, serde::Raw, user_id,
/// events::room::member::RoomMemberEventContent, serde::Raw, user_id,
/// };
///
/// let member_event: Option<Raw<SyncRoomMemberEvent>> = room
/// .get_state_event_static_for_key(user_id!("@alice:example.org"))
/// let member_event = room
/// .get_state_event_static_for_key::<RoomMemberEventContent, _>(user_id!(
/// "@alice:example.org"
/// ))
/// .await?;
/// # anyhow::Ok(())
/// # };
@ -626,7 +639,7 @@ impl Common {
pub async fn get_state_event_static_for_key<C, K>(
&self,
state_key: &K,
) -> Result<Option<Raw<SyncStateEvent<C>>>>
) -> Result<Option<RawSyncOrStrippedState<C>>>
where
C: StaticEventContent + StaticStateEventContent + RedactContent,
C::StateKey: Borrow<K>,
@ -839,7 +852,10 @@ impl Common {
.get_state_event_static::<RoomServerAclEventContent>()
.await?
.and_then(|ev| ev.deserialize().ok());
let acl = acl_ev.as_ref().and_then(|ev| ev.as_original()).map(|ev| &ev.content);
let acl = acl_ev.as_ref().and_then(|ev| match ev {
SyncOrStrippedState::Sync(ev) => ev.as_original().map(|ev| &ev.content),
SyncOrStrippedState::Stripped(ev) => Some(&ev.content),
});
// Filter out server names that:
// - Are blocked due to server ACLs

View File

@ -27,12 +27,16 @@ To create a new Sliding Sync session, one must query an existing
[`Client::sliding_sync`](`super::Client::sliding_sync`). The
[`SlidingSyncBuilder`] is the baseline configuration to create a
[`SlidingSync`] session by calling `.build()` once everything is ready.
Typically one configures the custom homeserver endpoint.
Typically one configures the custom homeserver endpoint, although it's
automatically detected using the `.well-known` endpoint, if configured.
At the time of writing, no Matrix server natively supports Sliding Sync;
a sidecar called the [Sliding Sync Proxy][proxy] is needed. As that
typically runs on a separate domain, it can be configured on the
[`SlidingSyncBuilder`]:
[`SlidingSyncBuilder`].
A unique identifier, less than 16 chars long, is required for each instance
of Sliding Sync, and must be provided when getting a builder:
```rust,no_run
# use matrix_sdk::Client;
@ -41,7 +45,7 @@ typically runs on a separate domain, it can be configured on the
# let homeserver = Url::parse("http://example.com")?;
# let client = Client::new(homeserver).await?;
let sliding_sync_builder = client
.sliding_sync()
.sliding_sync("main-sync")?
.homeserver(Url::parse("http://sliding-sync.example.org")?);
# anyhow::Ok(())
@ -268,7 +272,7 @@ In full, this typically looks like this:
# let homeserver = Url::parse("http://example.com")?;
# let client = Client::new(homeserver).await?;
let sliding_sync = client
.sliding_sync()
.sliding_sync("main-sync")?
// any lists you want are added here.
.build()
.await?;
@ -351,22 +355,25 @@ timeline events as well as all list `room_lists` and
out).
This is a purely in-memory cache layer though. If one wants Sliding Sync to
persist and load from cold (storage) cache, one needs to set its key with
[`storage_key(name)`][`SlidingSyncBuilder::storage_key`] and for each list
present at `.build()`[`SlidingSyncBuilder::build`] sliding sync will attempt
to load their latest cached version from storage, as well as some overall
information of Sliding Sync. If that succeeded the lists `state` has been
set to [`Preloaded`][SlidingSyncState::Preloaded]. Only room data of rooms
present in one of the lists is loaded from storage.
persist and load from cold (storage) cache, one needs to explicitly
[`enable_caching()`][`SlidingSyncBuilder::enable_caching`]. This will reload the
Sliding Sync state from the storage, namely since tokens.
Notice that lists added after Sliding Sync has been built **will not be
loaded from cache** regardless of their settings (as this could lead to
inconsistencies between lists). The same goes for any extension: some
extension data (like the to-device-message position) are stored to storage,
but only retrieved upon `build()` of the `SlidingSyncBuilder`. So if one
only adds them later, they will not be reading the data from storage (to
avoid inconsistencies) and might require more data to be sent in their first
request than if they were loaded form cold-cache.
Caching for lists can be enabled independently, using the
[`add_cached_list`][`SlidingSyncBuilder::add_cached_list`] method, assuming
caching has been enabled before. In this case, during
`.build()`[`SlidingSyncBuilder::build`] sliding sync will attempt to load their
latest cached version from storage, as well as some overall information of
Sliding Sync. If that succeeded the lists `state` has been set to
[`Preloaded`][SlidingSyncState::Preloaded]. Only room data of rooms present in
one of the lists is loaded from storage.
Any extension data will not be loaded from the cache, if added after Sliding
Sync has been built: some extension data (like the to-device-message position)
are stored to storage, but only retrieved upon `build()` of the
`SlidingSyncBuilder`. So if one only adds them later, they will not be reading
the data from storage (to avoid inconsistencies) and might require more data to
be sent in their first request than if they were loaded from a cold cache.
When loading from storage `room_list` entries found are set to
`Invalidated` — the initial setting here is communicated as a single
@ -411,10 +418,10 @@ use std::future::ready;
let full_sync_list_name = "full-sync".to_owned();
let active_list_name = "active-list".to_owned();
let sliding_sync_builder = client
.sliding_sync()
.sliding_sync("main-sync")?
.homeserver(Url::parse("http://sliding-sync.example.org")?) // our proxy server
.with_common_extensions() // we want the e2ee and to-device enabled, please
.storage_key(Some("example-cache".to_owned())); // we want these to be loaded from and stored into the persistent storage
.enable_caching()?; // we want these to be loaded from and stored into the persistent storage
let full_sync_list = SlidingSyncList::builder(&full_sync_list_name)
.sync_mode(SlidingSyncMode::Growing { batch_size: 50, maximum_number_of_rooms_to_fetch: Some(500) }) // sync up by growing the window

View File

@ -5,15 +5,15 @@ use ruma::{
self, AccountDataConfig, E2EEConfig, ExtensionsConfig, ReceiptsConfig, ToDeviceConfig,
TypingConfig,
},
events::TimelineEventType,
OwnedRoomId,
};
use tokio::sync::{broadcast::channel, RwLock as AsyncRwLock};
use url::Url;
use super::{
cache::restore_sliding_sync_state, SlidingSync, SlidingSyncInner, SlidingSyncListBuilder,
SlidingSyncPositionMarkers, SlidingSyncRoom,
cache::{format_storage_key_prefix, restore_sliding_sync_state},
Error, SlidingSync, SlidingSyncInner, SlidingSyncListBuilder, SlidingSyncPositionMarkers,
SlidingSyncRoom,
};
use crate::{Client, Result};
@ -23,34 +23,45 @@ use crate::{Client, Result};
/// [`crate::SlidingSync::builder`].
#[derive(Debug, Clone)]
pub struct SlidingSyncBuilder {
id: String,
storage_key: Option<String>,
homeserver: Option<Url>,
client: Client,
lists: Vec<SlidingSyncListBuilder>,
bump_event_types: Vec<TimelineEventType>,
extensions: Option<ExtensionsConfig>,
subscriptions: BTreeMap<OwnedRoomId, v4::RoomSubscription>,
rooms: BTreeMap<OwnedRoomId, SlidingSyncRoom>,
}
impl SlidingSyncBuilder {
pub(super) fn new(client: Client) -> Self {
Self {
storage_key: None,
homeserver: None,
client,
lists: Vec::new(),
bump_event_types: Vec::new(),
extensions: None,
subscriptions: BTreeMap::new(),
rooms: BTreeMap::new(),
pub(super) fn new(id: String, client: Client) -> Result<Self, Error> {
if id.len() > 16 {
Err(Error::InvalidSlidingSyncIdentifier)
} else {
Ok(Self {
id,
storage_key: None,
homeserver: None,
client,
lists: Vec::new(),
extensions: None,
subscriptions: BTreeMap::new(),
rooms: BTreeMap::new(),
})
}
}
/// Set the storage key to keep this cache at and load it from.
pub fn storage_key(mut self, value: Option<String>) -> Self {
self.storage_key = value;
self
/// Enable caching for the given sliding sync.
///
/// This will cause lists and the sliding sync tokens to be saved into and
/// restored from the cache.
pub fn enable_caching(mut self) -> Result<Self> {
// Compute the final storage key now.
self.storage_key = Some(format_storage_key_prefix(
&self.id,
self.client.user_id().ok_or(super::Error::UnauthenticatedUser)?,
));
Ok(self)
}
/// Set the homeserver for sliding sync only.
@ -70,13 +81,14 @@ impl SlidingSyncBuilder {
/// Enroll the list in caching, reloads it from the cache if possible, and
/// adds it to the list of lists.
///
/// This will raise an error if a [`storage_key()`][Self::storage_key] was
/// not set, or if there was a I/O error reading from the cache.
/// This will raise an error if caching wasn't enabled with
/// [`enable_caching`][Self::enable_caching], or if there was a I/O error
/// reading from the cache.
///
/// Replace any list with the same name.
pub async fn add_cached_list(mut self, mut list: SlidingSyncListBuilder) -> Result<Self> {
let Some(ref storage_key) = self.storage_key else {
return Err(super::error::Error::MissingStorageKeyForCaching.into());
return Err(super::error::Error::CacheDisabled.into());
};
let reloaded_rooms = list.set_cached_and_reload(&self.client, storage_key).await?;
@ -205,17 +217,6 @@ impl SlidingSyncBuilder {
self
}
/// Allowlist of event types which should be considered recent activity
/// when sorting `by_recency`. By omitting event types, clients can ensure
/// that uninteresting events (e.g. a profile rename) do not cause a
/// room to jump to the top of its list(s). Empty or
/// omitted `bump_event_types` have no effect: all events in a room will
/// be considered recent activity.
pub fn bump_event_types(mut self, bump_event_types: &[TimelineEventType]) -> Self {
self.bump_event_types = bump_event_types.to_vec();
self
}
/// Build the Sliding Sync.
///
/// If `self.storage_key` is `Some(_)`, load the cached data from cold
@ -252,13 +253,13 @@ impl SlidingSyncBuilder {
let lists = AsyncRwLock::new(lists);
Ok(SlidingSync::new(SlidingSyncInner {
_id: Some(self.id),
homeserver: self.homeserver,
client,
storage_key: self.storage_key,
lists,
rooms,
bump_event_types: self.bump_event_types,
extensions: self.extensions.unwrap_or_default(),
reset_counter: Default::default(),

View File

@ -7,21 +7,28 @@
use std::collections::BTreeMap;
use matrix_sdk_base::{StateStore, StoreError};
use ruma::UserId;
use tracing::{trace, warn};
use super::{FrozenSlidingSync, FrozenSlidingSyncList, SlidingSync, SlidingSyncList};
use crate::{sliding_sync::SlidingSyncListCachePolicy, Client, Result};
/// Be careful: as this is used as a storage key; changing it requires migrating
/// data!
pub(super) fn format_storage_key_prefix(id: &str, user_id: &UserId) -> String {
format!("sliding_sync_store::{}::{}", id, user_id)
}
/// Be careful: as this is used as a storage key; changing it requires migrating
/// data!
fn format_storage_key_for_sliding_sync(storage_key: &str) -> String {
format!("sliding_sync_store::{storage_key}")
format!("{storage_key}::instance")
}
/// Be careful: as this is used as a storage key; changing it requires migrating
/// data!
fn format_storage_key_for_sliding_sync_list(storage_key: &str, list_name: &str) -> String {
format!("sliding_sync_store::{storage_key}::{list_name}")
format!("{storage_key}::list::{list_name}")
}
/// Invalidate a single [`SlidingSyncList`] cache entry by removing it from the
@ -202,26 +209,22 @@ mod tests {
use futures_executor::block_on;
use futures_util::StreamExt;
use url::Url;
use super::*;
use crate::{Client, Result};
use crate::{test_utils::logged_in_client, Result};
#[test]
fn test_cannot_cache_without_a_storage_key() -> Result<()> {
block_on(async {
let homeserver = Url::parse("https://foo.bar")?;
let client = Client::new(homeserver).await?;
let client = logged_in_client(Some("https://foo.bar".to_owned())).await;
let err = client
.sliding_sync()
.sliding_sync("test")?
.add_cached_list(SlidingSyncList::builder("list_foo"))
.await
.unwrap_err();
assert!(matches!(
err,
crate::Error::SlidingSync(
crate::sliding_sync::error::Error::MissingStorageKeyForCaching
)
crate::Error::SlidingSync(crate::sliding_sync::error::Error::CacheDisabled)
));
Ok(())
})
@ -230,8 +233,7 @@ mod tests {
#[allow(clippy::await_holding_lock)]
#[tokio::test]
async fn test_sliding_sync_can_be_stored_and_restored() -> Result<()> {
let homeserver = Url::parse("https://foo.bar")?;
let client = Client::new(homeserver).await?;
let client = logged_in_client(Some("https://foo.bar".to_owned())).await;
let store = client.store();
@ -256,10 +258,12 @@ mod tests {
.is_none());
// Create a new `SlidingSync` instance, and store it.
{
let storage_key = {
let sync_id = "test-sync-id";
let storage_key = format_storage_key_prefix(sync_id, client.user_id().unwrap());
let sliding_sync = client
.sliding_sync()
.storage_key(Some("hello".to_owned()))
.sliding_sync(sync_id)?
.enable_caching()?
.add_cached_list(SlidingSyncList::builder("list_foo"))
.await?
.add_list(SlidingSyncList::builder("list_bar"))
@ -268,7 +272,7 @@ mod tests {
// Modify both lists, so we can check expected caching behavior later.
{
let lists = sliding_sync.inner.lists.write().await;
let lists = sliding_sync.inner.lists.write().unwrap();
let list_foo = lists.get("list_foo").unwrap();
list_foo.set_maximum_number_of_rooms(Some(42));
@ -278,17 +282,18 @@ mod tests {
}
assert!(sliding_sync.cache_to_storage().await.is_ok());
}
storage_key
};
// Store entries now exist for the sliding sync object and list_foo.
assert!(store
.get_custom_value(format_storage_key_for_sliding_sync("hello").as_bytes())
.get_custom_value(format_storage_key_for_sliding_sync(&storage_key).as_bytes())
.await?
.is_some());
assert!(store
.get_custom_value(
format_storage_key_for_sliding_sync_list("hello", "list_foo").as_bytes()
format_storage_key_for_sliding_sync_list(&storage_key, "list_foo").as_bytes()
)
.await?
.is_some());
@ -296,18 +301,20 @@ mod tests {
// But not for list_bar.
assert!(store
.get_custom_value(
format_storage_key_for_sliding_sync_list("hello", "list_bar").as_bytes()
format_storage_key_for_sliding_sync_list(&storage_key, "list_bar").as_bytes()
)
.await?
.is_none());
// Create a new `SlidingSync`, and it should be read from the cache.
{
let storage_key = {
let sync_id = "test-sync-id";
let storage_key = format_storage_key_prefix(sync_id, client.user_id().unwrap());
let max_number_of_room_stream = Arc::new(RwLock::new(None));
let cloned_stream = max_number_of_room_stream.clone();
let sliding_sync = client
.sliding_sync()
.storage_key(Some("hello".to_owned()))
.sliding_sync(sync_id)?
.enable_caching()?
.add_cached_list(SlidingSyncList::builder("list_foo").once_built(move |list| {
// In the `once_built()` handler, nothing has been read from the cache yet.
assert_eq!(list.maximum_number_of_rooms(), None);
@ -323,7 +330,7 @@ mod tests {
// Check the list' state.
{
let lists = sliding_sync.inner.lists.write().await;
let lists = sliding_sync.inner.lists.write().unwrap();
// This one was cached.
let list_foo = lists.get("list_foo").unwrap();
@ -345,27 +352,26 @@ mod tests {
}
// Clean the cache.
let lists = sliding_sync.inner.lists.read().await;
clean_storage(&client, "hello", &lists).await;
}
clean_storage(&client, &storage_key, &sliding_sync.inner.lists.read().unwrap()).await;
storage_key
};
// Store entries don't exist.
assert!(store
.get_custom_value(format_storage_key_for_sliding_sync("hello").as_bytes())
.get_custom_value(format_storage_key_for_sliding_sync(&storage_key).as_bytes())
.await?
.is_none());
assert!(store
.get_custom_value(
format_storage_key_for_sliding_sync_list("hello", "list_foo").as_bytes()
format_storage_key_for_sliding_sync_list(&storage_key, "list_foo").as_bytes()
)
.await?
.is_none());
assert!(store
.get_custom_value(
format_storage_key_for_sliding_sync_list("hello", "list_bar").as_bytes()
format_storage_key_for_sliding_sync_list(&storage_key, "list_bar").as_bytes()
)
.await?
.is_none());

View File

@ -6,9 +6,12 @@ use super::{SlidingSync, SlidingSyncBuilder};
use crate::{Client, Result};
impl Client {
/// Create a [`SlidingSyncBuilder`] tied to this client.
pub fn sliding_sync(&self) -> SlidingSyncBuilder {
SlidingSync::builder(self.clone())
/// Create a [`SlidingSyncBuilder`] tied to this client, with the given
/// identifier.
///
/// Note: the identifier must not be more than 16 chars long!
pub fn sliding_sync(&self, id: impl Into<String>) -> Result<SlidingSyncBuilder> {
Ok(SlidingSync::builder(id.into(), self.clone())?)
}
#[instrument(skip(self, response))]

View File

@ -30,10 +30,20 @@ pub enum Error {
/// Missing storage key when asking to deserialize some sub-state of sliding
/// sync.
#[error("A caching request was made but a storage key is missing in sliding sync")]
MissingStorageKeyForCaching,
#[error(
"A caching request was made but caching was not enabled in this instance of sliding sync"
)]
CacheDisabled,
/// We tried to read the user id of a client but it was missing.
#[error("Unauthenticated user in sliding sync")]
UnauthenticatedUser,
/// The internal channel of `SlidingSync` seems to be broken.
#[error("SlidingSync's internal channel is broken")]
InternalChannelIsBroken,
/// The name of the Sliding Sync instance is too long.
#[error("The Sliding Sync instance's identifier must be less than 16 chars long")]
InvalidSlidingSyncIdentifier,
}

View File

@ -10,7 +10,11 @@ use std::{
use eyeball::unique::Observable;
use eyeball_im::ObservableVector;
use imbl::Vector;
use ruma::{api::client::sync::sync_events::v4, events::StateEventType, OwnedRoomId};
use ruma::{
api::client::sync::sync_events::v4,
events::{StateEventType, TimelineEventType},
OwnedRoomId,
};
use tokio::sync::broadcast::Sender;
use super::{
@ -53,6 +57,8 @@ pub struct SlidingSyncListBuilder {
reloaded_cached_data: Option<SlidingSyncListCachedData>,
once_built: Arc<Box<dyn Fn(SlidingSyncList) -> SlidingSyncList + Send + Sync>>,
bump_event_types: Vec<TimelineEventType>,
}
// Print debug values for the builder, except `once_built` which is ignored.
@ -66,6 +72,7 @@ impl fmt::Debug for SlidingSyncListBuilder {
.field("filters", &self.filters)
.field("timeline_limit", &self.timeline_limit)
.field("name", &self.name)
.field("bump_event_types", &self.bump_event_types)
.finish_non_exhaustive()
}
}
@ -85,6 +92,7 @@ impl SlidingSyncListBuilder {
reloaded_cached_data: None,
cache_policy: SlidingSyncListCachePolicy::Disabled,
once_built: Arc::new(Box::new(identity)),
bump_event_types: Vec::new(),
}
}
@ -166,6 +174,19 @@ impl SlidingSyncListBuilder {
}
}
/// Allowlist of event types which should be considered recent activity
/// when sorting `by_recency`.
///
/// By omitting event types, clients can ensure
/// that uninteresting events (e.g. a profile rename) do not cause a
/// room to jump to the top of its list(s). Empty or
/// omitted `bump_event_types` have no effect: all events in a room will
/// be considered recent activity.
pub fn bump_event_types(mut self, bump_event_types: &[TimelineEventType]) -> Self {
self.bump_event_types = bump_event_types.to_vec();
self
}
/// Build the list.
pub(in super::super) fn build(
self,
@ -183,6 +204,7 @@ impl SlidingSyncListBuilder {
timeline_limit: StdRwLock::new(self.timeline_limit),
name: self.name,
cache_policy: self.cache_policy,
bump_event_types: self.bump_event_types,
// Computed from the builder.
request_generator: StdRwLock::new(SlidingSyncListRequestGenerator::new(

View File

@ -20,7 +20,12 @@ use futures_core::Stream;
use imbl::Vector;
pub(super) use request_generator::*;
pub use room_list_entry::RoomListEntry;
use ruma::{api::client::sync::sync_events::v4, assign, events::StateEventType, OwnedRoomId};
use ruma::{
api::client::sync::sync_events::v4,
assign,
events::{StateEventType, TimelineEventType},
OwnedRoomId,
};
use serde::{Deserialize, Serialize};
use tokio::sync::broadcast::Sender;
use tracing::{instrument, warn};
@ -299,6 +304,10 @@ pub(super) struct SlidingSyncListInner {
/// [`SlidingSyncInner::internal_channel`] to learn more.
sliding_sync_internal_channel_sender: Sender<SlidingSyncInternalMessage>,
/// The `bump_event_types` field. See
/// [`SlidingSyncListBuilder::bump_event_types`] to learn more.
bump_event_types: Vec<TimelineEventType>,
#[cfg(any(test, feature = "testing"))]
sync_mode: StdRwLock<SlidingSyncMode>,
}
@ -369,6 +378,7 @@ impl SlidingSyncListInner {
}),
sort,
filters,
bump_event_types: self.bump_event_types.clone(),
})
}

View File

@ -45,9 +45,7 @@ use ruma::{
error::ErrorKind,
sync::sync_events::v4::{self, ExtensionsConfig},
},
assign,
events::TimelineEventType,
OwnedRoomId, RoomId,
assign, OwnedRoomId, RoomId,
};
use serde::{Deserialize, Serialize};
use tokio::{
@ -82,6 +80,11 @@ pub struct SlidingSync {
#[derive(Debug)]
pub(super) struct SlidingSyncInner {
/// A unique identifier for this instance of sliding sync.
///
/// Used to distinguish different connections to the sliding sync proxy.
_id: Option<String>,
/// Customize the homeserver for sliding sync only
homeserver: Option<Url>,
@ -100,10 +103,6 @@ pub(super) struct SlidingSyncInner {
/// The rooms details
rooms: AsyncRwLock<BTreeMap<OwnedRoomId, SlidingSyncRoom>>,
/// The `bump_event_types` field. See
/// [`SlidingSyncBuilder::bump_event_types`] to learn more.
bump_event_types: Vec<TimelineEventType>,
/// Room subscriptions, i.e. rooms that may be out-of-scope of all lists but
/// one wants to receive updates.
room_subscriptions: StdRwLock<BTreeMap<OwnedRoomId, v4::RoomSubscription>>,
@ -133,8 +132,8 @@ impl SlidingSync {
}
/// Create a new [`SlidingSyncBuilder`].
pub fn builder(client: Client) -> SlidingSyncBuilder {
SlidingSyncBuilder::new(client)
pub fn builder(id: String, client: Client) -> Result<SlidingSyncBuilder, Error> {
SlidingSyncBuilder::new(id, client)
}
/// Subscribe to a given room.
@ -227,7 +226,7 @@ impl SlidingSync {
mut list_builder: SlidingSyncListBuilder,
) -> Result<Option<SlidingSyncList>> {
let Some(ref storage_key) = self.inner.storage_key else {
return Err(error::Error::MissingStorageKeyForCaching.into());
return Err(error::Error::CacheDisabled.into());
};
let reloaded_rooms =
@ -419,11 +418,11 @@ impl SlidingSync {
(
// Build the request itself.
assign!(v4::Request::new(), {
// conn_id: self.inner.id.clone(),
pos,
delta_token,
timeout: Some(timeout),
lists: requests_lists,
bump_event_types: self.inner.bump_event_types.clone(),
room_subscriptions,
unsubscribe_rooms: room_unsubscriptions.iter().cloned().collect(),
extensions,
@ -729,7 +728,7 @@ mod tests {
let server = MockServer::start().await;
let client = logged_in_client(Some(server.uri())).await;
let sync = client.sliding_sync().build().await?;
let sync = client.sliding_sync("test-slidingsync")?.build().await?;
let extensions = sync.prepare_extension_config(None);
// If the user doesn't provide any extension config, we enable to-device and
@ -770,7 +769,7 @@ mod tests {
let server = MockServer::start().await;
let client = logged_in_client(Some(server.uri())).await;
let mut sliding_sync_builder = client.sliding_sync();
let mut sliding_sync_builder = client.sliding_sync("test-slidingsync")?;
for list in lists {
sliding_sync_builder = sliding_sync_builder.add_list(list);

View File

@ -14,12 +14,16 @@
//! The SDK's representation of the result of a `/sync` request.
use std::{collections::BTreeMap, fmt, time::Duration};
use std::{
collections::{btree_map, BTreeMap},
fmt,
time::Duration,
};
use eyeball::unique::Observable;
pub use matrix_sdk_base::sync::*;
use matrix_sdk_base::{
debug::{DebugListOfRawEventsNoId, DebugNotificationMap},
debug::{DebugInvitedRoom, DebugListOfRawEventsNoId, DebugNotificationMap},
deserialized_responses::AmbiguityChanges,
instant::Instant,
sync::SyncResponse as BaseSyncResponse,
@ -27,7 +31,7 @@ use matrix_sdk_base::{
use ruma::{
api::client::{
push::get_notifications::v3::Notification,
sync::sync_events::{self, DeviceLists},
sync::sync_events::{self, v3::InvitedRoom, DeviceLists},
},
events::{presence::PresenceEvent, AnyGlobalAccountDataEvent, AnyToDeviceEvent},
serde::Raw,
@ -35,7 +39,7 @@ use ruma::{
};
use tracing::{debug, error, warn};
use crate::{event_handler::HandlerKind, Client, Result};
use crate::{event_handler::HandlerKind, room, Client, Result};
/// The processed response of a `/sync` request.
#[derive(Clone, Default)]
@ -106,6 +110,50 @@ impl fmt::Debug for SyncResponse {
}
}
/// A batch of updates to a room.
#[derive(Clone)]
pub enum RoomUpdate {
/// Updates to a room the user is no longer in.
Left {
/// Room object with general information on the room.
room: room::Left,
/// Updates to the room.
updates: LeftRoom,
},
/// Updates to a room the user is currently in.
Joined {
/// Room object with general information on the room.
room: room::Joined,
/// Updates to the room.
updates: JoinedRoom,
},
/// Updates to a room the user is invited to.
Invited {
/// Room object with general information on the room.
room: room::Invited,
/// Updates to the room.
updates: InvitedRoom,
},
}
impl fmt::Debug for RoomUpdate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Left { room, updates } => {
f.debug_struct("Left").field("room", room).field("updates", updates).finish()
}
Self::Joined { room, updates } => {
f.debug_struct("Joined").field("room", room).field("updates", updates).finish()
}
Self::Invited { room, updates } => f
.debug_struct("Invited")
.field("room", room)
.field("updates", &DebugInvitedRoom(updates))
.finish(),
}
}
}
/// Internal functionality related to getting events from the server
/// (`sync_events` endpoint)
impl Client {
@ -132,30 +180,36 @@ impl Client {
} = response;
let now = Instant::now();
self.handle_sync_events(HandlerKind::GlobalAccountData, &None, account_data).await?;
self.handle_sync_events(HandlerKind::Presence, &None, presence).await?;
self.handle_sync_events(HandlerKind::ToDevice, &None, to_device).await?;
self.handle_sync_events(HandlerKind::GlobalAccountData, None, account_data).await?;
self.handle_sync_events(HandlerKind::Presence, None, presence).await?;
self.handle_sync_events(HandlerKind::ToDevice, None, to_device).await?;
for (room_id, room_info) in &rooms.join {
if room_info.timeline.limited {
self.notify_sync_gap(room_id);
}
let room = self.get_room(room_id);
if room.is_none() {
let Some(room) = self.get_joined_room(room_id) else {
error!(?room_id, "Can't call event handler, room not found");
continue;
}
};
self.send_room_update(room_id, || RoomUpdate::Joined {
room: room.clone(),
updates: room_info.clone(),
});
let JoinedRoom { unread_notifications: _, timeline, state, account_data, ephemeral } =
room_info;
self.handle_sync_events(HandlerKind::RoomAccountData, &room, account_data).await?;
self.handle_sync_state_events(&room, state).await?;
self.handle_sync_timeline_events(&room, &timeline.events).await?;
let room = room::Room::Joined(room);
let room = Some(&room);
self.handle_sync_events(HandlerKind::RoomAccountData, room, account_data).await?;
self.handle_sync_state_events(room, state).await?;
self.handle_sync_timeline_events(room, &timeline.events).await?;
// Handle ephemeral events after timeline, read receipts in here
// could refer to timeline events from the same response.
self.handle_sync_events(HandlerKind::EphemeralRoomData, &room, ephemeral).await?;
self.handle_sync_events(HandlerKind::EphemeralRoomData, room, ephemeral).await?;
}
for (room_id, room_info) in &rooms.leave {
@ -163,33 +217,41 @@ impl Client {
self.notify_sync_gap(room_id);
}
let room = self.get_room(room_id);
if room.is_none() {
let Some(room) = self.get_left_room(room_id) else {
error!(?room_id, "Can't call event handler, room not found");
continue;
}
};
self.send_room_update(room_id, || RoomUpdate::Left {
room: room.clone(),
updates: room_info.clone(),
});
let LeftRoom { timeline, state, account_data } = room_info;
self.handle_sync_events(HandlerKind::RoomAccountData, &room, account_data).await?;
self.handle_sync_state_events(&room, state).await?;
self.handle_sync_timeline_events(&room, &timeline.events).await?;
let left = room::Room::Left(room);
let room = Some(&left);
self.handle_sync_events(HandlerKind::RoomAccountData, room, account_data).await?;
self.handle_sync_state_events(room, state).await?;
self.handle_sync_timeline_events(room, &timeline.events).await?;
}
for (room_id, room_info) in &rooms.invite {
let room = self.get_room(room_id);
if room.is_none() {
let Some(room) = self.get_invited_room(room_id) else {
error!(?room_id, "Can't call event handler, room not found");
continue;
}
};
self.send_room_update(room_id, || RoomUpdate::Invited {
room: room.clone(),
updates: room_info.clone(),
});
// FIXME: Destructure room_info
self.handle_sync_events(
HandlerKind::StrippedState,
&room,
&room_info.invite_state.events,
)
.await?;
let invited = room::Room::Invited(room);
let room = Some(&invited);
let invite_state = &room_info.invite_state.events;
self.handle_sync_events(HandlerKind::StrippedState, room, invite_state).await?;
}
debug!("Ran event handlers in {:?}", now.elapsed());
@ -222,6 +284,19 @@ impl Client {
Ok(())
}
fn send_room_update(&self, room_id: &RoomId, make_msg: impl FnOnce() -> RoomUpdate) {
if let btree_map::Entry::Occupied(entry) =
self.inner.room_update_channels.lock().unwrap().entry(room_id.to_owned())
{
let tx = entry.get();
if tx.receiver_count() == 0 {
entry.remove();
} else {
_ = tx.send(make_msg());
}
}
}
async fn sleep() {
#[cfg(target_arch = "wasm32")]
gloo_timers::future::TimeoutFuture::new(1_000).await;

View File

@ -1,8 +1,11 @@
use std::{collections::BTreeMap, str::FromStr, time::Duration};
use assert_matches::assert_matches;
use futures_util::FutureExt;
use matrix_sdk::{
config::SyncSettings,
media::{MediaFormat, MediaRequest, MediaThumbnailSize},
sync::RoomUpdate,
RumaApiError, Session,
};
use matrix_sdk_test::{async_test, test_json};
@ -630,3 +633,28 @@ fn serialize_session() {
})
);
}
#[async_test]
async fn room_update_channel() {
let (client, server) = logged_in_client().await;
let mut rx = client.subscribe_to_room_updates(room_id!("!SVkFJHzfwvuaIEawgC:localhost"));
mock_sync(&server, &*test_json::SYNC, None).await;
let sync_settings = SyncSettings::new().timeout(Duration::from_millis(3000));
client.sync_once(sync_settings).await.unwrap();
let update = rx.recv().now_or_never().unwrap().unwrap();
let updates = assert_matches!(update, RoomUpdate::Joined { updates, .. } => updates);
assert_eq!(updates.account_data.len(), 1);
assert_eq!(updates.ephemeral.len(), 1);
assert_eq!(updates.state.len(), 9);
assert!(updates.timeline.limited);
assert_eq!(updates.timeline.events.len(), 1);
assert_eq!(updates.timeline.prev_batch, Some("t392-516_47314_0_7_1_1_1_11444_1".to_owned()));
assert_eq!(updates.unread_notifications.highlight_count, 0);
assert_eq!(updates.unread_notifications.notification_count, 11);
}

View File

@ -171,7 +171,10 @@ async fn test_state_event_getting() {
.deserialize()
.unwrap();
assert_matches::assert_matches!(encryption_event, AnySyncStateEvent::RoomEncryption(_));
assert_matches::assert_matches!(
encryption_event.as_sync(),
Some(AnySyncStateEvent::RoomEncryption(_))
);
}
#[async_test]

View File

@ -4,10 +4,7 @@ use matrix_sdk::{
config::SyncSettings,
ruma::{
api::client::room::create_room::v3::Request as CreateRoomRequest,
events::{
room::name::{RoomNameEventContent, SyncRoomNameEvent},
StateEventType,
},
events::{room::name::RoomNameEventContent, StateEventType},
},
Client,
};
@ -64,13 +61,14 @@ async fn test_redacting_name() -> Result<()> {
let raw_event =
room.get_state_event(StateEventType::RoomName, "").await?.expect("Room Name not found");
let room_name_event: SyncRoomNameEvent = raw_event.deserialize_as()?;
let room_name_event = raw_event.cast::<RoomNameEventContent>().deserialize()?;
let sync_room_name_event = room_name_event.as_sync().expect("event is sync event");
assert!(
room_name_event.as_original().expect("event exists").content.name.is_some(),
sync_room_name_event.as_original().expect("event exists").content.name.is_some(),
"Event not found"
);
room.redact(room_name_event.event_id(), None, None).await?;
room.redact(sync_room_name_event.event_id(), None, None).await?;
// sync up.
for _ in 0..=10 {
// we call sync up to ten times to give the server time to flush other
@ -84,10 +82,17 @@ async fn test_redacting_name() -> Result<()> {
let raw_event =
room.get_state_event(StateEventType::RoomName, "").await?.expect("Room Name not found");
let room_name_event: SyncRoomNameEvent = raw_event.deserialize_as()?;
let room_name_event = raw_event.cast::<RoomNameEventContent>().deserialize()?;
// Name content has been redacted
assert!(
room_name_event.as_original().expect("event exists").content.name.is_none(),
room_name_event
.as_sync()
.expect("event is sync event")
.as_original()
.expect("event exists")
.content
.name
.is_none(),
"Event hasn't been redacted"
);
@ -132,14 +137,18 @@ async fn test_redacting_name_static() -> Result<()> {
// check state event.
let room_name_event: SyncRoomNameEvent =
room.get_state_event_static().await?.expect("Room Name not found").deserialize()?;
let room_name_event = room
.get_state_event_static::<RoomNameEventContent>()
.await?
.expect("Room Name not found")
.deserialize()?;
let sync_room_name_event = room_name_event.as_sync().expect("event is sync event");
assert!(
room_name_event.as_original().expect("event exists").content.name.is_some(),
sync_room_name_event.as_original().expect("event exists").content.name.is_some(),
"Event not found"
);
room.redact(room_name_event.event_id(), None, None).await?;
room.redact(sync_room_name_event.event_id(), None, None).await?;
// we sync up.
for _ in 0..=10 {
// we call sync up to ten times to give the server time to flush other
@ -151,11 +160,21 @@ async fn test_redacting_name_static() -> Result<()> {
}
}
let room_name_event: SyncRoomNameEvent =
room.get_state_event_static().await?.expect("Room Name not found").deserialize()?;
let room_name_event = room
.get_state_event_static::<RoomNameEventContent>()
.await?
.expect("Room Name not found")
.deserialize()?;
// Name content has been redacted
assert!(
room_name_event.as_original().expect("event exists").content.name.is_none(),
room_name_event
.as_sync()
.expect("event is sync event")
.as_original()
.expect("event exists")
.content
.name
.is_none(),
"Event hasn't been redacted"
);

View File

@ -12,8 +12,10 @@ async fn setup(
let sliding_sync_proxy_url =
option_env!("SLIDING_SYNC_PROXY_URL").unwrap_or("http://localhost:8338").to_owned();
let client = get_client_for_user(name, use_sqlite_store).await?;
let sliding_sync_builder =
client.sliding_sync().homeserver(sliding_sync_proxy_url.parse()?).with_common_extensions();
let sliding_sync_builder = client
.sliding_sync("test-slidingsync")?
.homeserver(sliding_sync_proxy_url.parse()?)
.with_common_extensions();
Ok((client, sliding_sync_builder))
}